hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
42aa6e2de34fb2fd808dc831a36aab1b648f4f77 | diff --git a/code/extensions/WorkflowApplicable.php b/code/extensions/WorkflowApplicable.php
index <HASH>..<HASH> 100755
--- a/code/extensions/WorkflowApplicable.php
+++ b/code/extensions/WorkflowApplicable.php
@@ -27,22 +27,24 @@ class WorkflowApplicable extends DataObjectDecorator {
}
public function updateCMSFields(FieldSet $fields) {
- $svc = singleton('WorkflowService');
- $effective = $svc->getDefinitionFor($this->owner);
- $effectiveTitle = 'None';
- if ($effective) {
+ $service = singleton('WorkflowService');
+
+ if($effective = $service->getDefinitionFor($this->owner)) {
$effectiveTitle = $effective->Title;
+ } else {
+ $effectiveTitle = _t('WorkflowApplicable.NONE', '(none)');
}
- $definitions[] = 'Inherit';
- if($defs = $svc->getDefinitions())foreach ($defs->map() as $id => $title) {
- $definitions[$id] = $title;
+ $allDefinitions = array(_t('WorkflowApplicable.INHERIT', 'Inherit from parent'));
+
+ if($definitions = $service->getDefinitions()) {
+ $allDefinitions += $definitions->map();
}
$fields->addFieldsToTab('Root.Workflow', array(
new HeaderField('AppliedWorkflowHeader', _t('WorkflowApplicable.APPLIEDWORKFLOW', 'Applied Workflow')),
new DropdownField('WorkflowDefinitionID',
- _t('WorkflowApplicable.DEFINITION', 'Applied Workflow'), $definitions),
+ _t('WorkflowApplicable.DEFINITION', 'Applied Workflow'), $allDefinitions),
new ReadonlyField('EffectiveWorkflow',
_t('WorkflowApplicable.EFFECTIVE_WORKFLOW', 'Effective Workflow'), $effectiveTitle),
new HeaderField('WorkflowLogHeader', _t('WorkflowApplicable.WORKFLOWLOG', 'Workflow Log')), | MINOR: Tidied up the applied and effective workflow code in WorkflowApplicable->updateCMSFields(). | symbiote_silverstripe-advancedworkflow | train |
abbf1c01fcf0363eceb73e9e72fd48f8e3a09017 | diff --git a/clinacl.py b/clinacl.py
index <HASH>..<HASH> 100755
--- a/clinacl.py
+++ b/clinacl.py
@@ -103,13 +103,17 @@ def verify(keyhex):
def keybase():
+ # A Keybase NaCl signature is a Base64-encoded MessagePack blob containing
+ # the payload, the signing KID, and the detatched signature bytes. We
+ # decode, unpack, and then verify the signature. If it's valid, we print
+ # the payload (which is usually a JSON blob).
sig_base64 = sys.stdin.read()
sig_msgpack_bytes = base64.b64decode(sig_base64)
sig_obj = umsgpack.unpackb(sig_msgpack_bytes)
keybytes_tagged = sig_obj['body']['key']
- # Keybase KIDs are type-tagged with two bytes at the front and
- # one byte in the back.
- keybytes = keybytes_tagged[2:34]
+ # Keybase KIDs are just NaCl public keys type-tagged with two bytes at the
+ # front and one byte in the back. Stripping these gives the key.
+ keybytes = keybytes_tagged[2:-1]
verifykey = nacl.signing.VerifyKey(keybytes)
detatched_sig_bytes = sig_obj['body']['sig']
sig_payload = sig_obj['body']['payload'] | make KID tag-stripping more robust
As per the comment, we want to strip off two leading bytes and one
trailing byte from Keybase KIDs. We shouldn't assume the length of the
KID. If the length changes on us, we would rather fail with bad length
errors than mysteriously reject valid signatures. | oconnor663_clinacl | train |
94021872ad715bc741f13670259232241e53c473 | diff --git a/go/dhcp/main.go b/go/dhcp/main.go
index <HASH>..<HASH> 100644
--- a/go/dhcp/main.go
+++ b/go/dhcp/main.go
@@ -297,6 +297,7 @@ func (h *Interface) ServeDHCP(ctx context.Context, p dhcp.Packet, msgType dhcp.M
switch msgType {
case dhcp.Discover:
+ firstTry := true
log.LoggerWContext(ctx).Info("DHCPDISCOVER from " + clientMac + " (" + clientHostname + ")")
var free int
i := handler.available.Iterator()
@@ -317,7 +318,7 @@ func (h *Interface) ServeDHCP(ctx context.Context, p dhcp.Packet, msgType dhcp.M
if i.HasNext() {
var element uint32
// Check if the device request a specific ip
- if p.ParseOptions()[50] != nil {
+ if p.ParseOptions()[50] != nil && firstTry {
element := uint32(binary.BigEndian.Uint32(p.ParseOptions()[50])) - uint32(binary.BigEndian.Uint32(handler.start.To4()))
if handler.available.Contains(element) {
// Ip is available, return OFFER with this ip address
@@ -341,6 +342,8 @@ func (h *Interface) ServeDHCP(ctx context.Context, p dhcp.Packet, msgType dhcp.M
log.LoggerWContext(ctx).Info(p.CHAddr().String() + " Ip " + dhcp.IPAdd(handler.start, free).String() + " already in use, trying next")
// Added back in the pool since it's not the dhcp server who gave it
handler.hwcache.Delete(p.CHAddr().String())
+ firstTry = false
+ handler.available.Remove(uint32(free))
goto retry
}
handler.available.Remove(element) | don't obsess giving the requested IP if its already used | inverse-inc_packetfence | train |
1928ae222149789046a07cceec21b256c9c84a8c | diff --git a/docs/HowToUsePyparsing.rst b/docs/HowToUsePyparsing.rst
index <HASH>..<HASH> 100644
--- a/docs/HowToUsePyparsing.rst
+++ b/docs/HowToUsePyparsing.rst
@@ -226,13 +226,6 @@ Usage notes
or expressions that may occur within an ``And`` expression; an early element
of an ``And`` may match, but the overall expression may fail.
-- Performance of pyparsing may be slow for complex grammars and/or large
- input strings. The psyco_ package can be used to improve the speed of the
- pyparsing module with no changes to grammar or program logic - observed
- improvments have been in the 20-50% range.
-
-.. _psyco: http://psyco.sourceforge.net/
-
Classes
=======
@@ -363,11 +356,8 @@ methods for code to use are:
performance enhancement, known as "packrat parsing". packrat parsing is
disabled by default, since it may conflict with some user programs that use
parse actions. To activate the packrat feature, your
- program must call the class method ParserElement.enablePackrat(). If
- your program uses psyco to "compile as you go", you must call
- enablePackrat before calling psyco.full(). If you do not do this,
- Python will crash. For best results, call enablePackrat() immediately
- after importing pyparsing.
+ program must call the class method ParserElement.enablePackrat(). For best
+ results, call enablePackrat() immediately after importing pyparsing.
Basic ParserElement subclasses
diff --git a/examples/verilogParse.py b/examples/verilogParse.py
index <HASH>..<HASH> 100644
--- a/examples/verilogParse.py
+++ b/examples/verilogParse.py
@@ -73,10 +73,8 @@ from pyparsing import Literal, Keyword, Word, OneOrMore, ZeroOrMore, \
StringEnd, FollowedBy, ParserElement, Regex, cppStyleComment
import pyparsing
usePackrat = False
-usePsyco = False
packratOn = False
-psycoOn = False
if usePackrat:
try:
@@ -86,16 +84,6 @@ if usePackrat:
else:
packratOn = True
-# comment out this section to disable psyco function compilation
-if usePsyco:
- try:
- import psyco
- psyco.full()
- except:
- print("failed to import psyco Python optimizer")
- else:
- psycoOn = True
-
def dumpTokens(s,l,t):
import pprint
@@ -637,7 +625,6 @@ else:
print(" - using pyparsing version", pyparsing.__version__)
print(" - using Python version", sys.version)
if packratOn: print(" - using packrat parsing")
- if psycoOn: print(" - using psyco runtime optimization")
print()
import os
diff --git a/pyparsing.py b/pyparsing.py
index <HASH>..<HASH> 100644
--- a/pyparsing.py
+++ b/pyparsing.py
@@ -1677,11 +1677,9 @@ class ParserElement(object):
This speedup may break existing programs that use parse actions that
have side-effects. For this reason, packrat parsing is disabled when
you first import pyparsing. To activate the packrat feature, your
- program must call the class method :class:`ParserElement.enablePackrat`. If
- your program uses ``psyco`` to "compile as you go", you must call
- ``enablePackrat`` before calling ``psyco.full()``. If you do not do this,
- Python will crash. For best results, call ``enablePackrat()`` immediately
- after importing pyparsing.
+ program must call the class method :class:`ParserElement.enablePackrat`.
+ For best results, call ``enablePackrat()`` immediately after
+ importing pyparsing.
Example:: | Remove mentions of 'psyco' from docs and examples
The psyco package has been declared umaintained and dead. It is no
longer receiving bug fixes including for security issues. From
<URL>.
Users can continue to use PyPy for the latest and greatest in Python
JIT. | pyparsing_pyparsing | train |
821c2e87eb3f096b5c4baecefebfc476edc27cd9 | diff --git a/lib/puppet/configurer/plugin_handler.rb b/lib/puppet/configurer/plugin_handler.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/configurer/plugin_handler.rb
+++ b/lib/puppet/configurer/plugin_handler.rb
@@ -16,22 +16,7 @@ module Puppet::Configurer::PluginHandler
Puppet[:pluginsignore]
)
- plugin_downloader.evaluate.each { |file| load_plugin(file) }
- end
-
- def load_plugin(file)
- return unless FileTest.exist?(file)
- return if FileTest.directory?(file)
-
- begin
- if file =~ /.rb$/
- Puppet.info "Loading downloaded plugin #{file}"
- load file
- else
- Puppet.debug "Skipping downloaded plugin #{file}"
- end
- rescue Exception => detail
- Puppet.err "Could not load downloaded file #{file}: #{detail}"
- end
+ plugin_downloader.evaluate
+ Puppet::Util::Autoload.reload_changed
end
end
diff --git a/spec/unit/configurer/plugin_handler_spec.rb b/spec/unit/configurer/plugin_handler_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/unit/configurer/plugin_handler_spec.rb
+++ b/spec/unit/configurer/plugin_handler_spec.rb
@@ -54,69 +54,4 @@ describe Puppet::Configurer::PluginHandler do
@pluginhandler.expects(:download_plugins?).returns true
@pluginhandler.download_plugins
end
-
- it "should be able to load plugins" do
- @pluginhandler.should respond_to(:load_plugin)
- end
-
- it "should load each downloaded file" do
- FileTest.stubs(:exist?).returns true
- downloader = mock 'downloader'
-
- Puppet::Configurer::Downloader.expects(:new).returns downloader
-
- downloader.expects(:evaluate).returns %w{one two}
-
- @pluginhandler.expects(:download_plugins?).returns true
-
- @pluginhandler.expects(:load_plugin).with("one")
- @pluginhandler.expects(:load_plugin).with("two")
-
- @pluginhandler.download_plugins
- end
-
- it "should load ruby plugins when asked to do so" do
- FileTest.stubs(:exist?).returns true
- @pluginhandler.expects(:load).with("foo.rb")
-
- @pluginhandler.load_plugin("foo.rb")
- end
-
- it "should skip non-ruby plugins when asked to do so" do
- FileTest.stubs(:exist?).returns true
- @pluginhandler.expects(:load).never
-
- @pluginhandler.load_plugin("foo")
- end
-
- it "should not try to load files that don't exist" do
- FileTest.expects(:exist?).with("foo.rb").returns false
- @pluginhandler.expects(:load).never
-
- @pluginhandler.load_plugin("foo.rb")
- end
-
- it "should not try to load directories" do
- FileTest.stubs(:exist?).returns true
- FileTest.expects(:directory?).with("foo").returns true
- @pluginhandler.expects(:load).never
-
- @pluginhandler.load_plugin("foo")
- end
-
- it "should warn but not fail if loading a file raises an exception" do
- FileTest.stubs(:exist?).returns true
- @pluginhandler.expects(:load).with("foo.rb").raises "eh"
-
- Puppet.expects(:err)
- @pluginhandler.load_plugin("foo.rb")
- end
-
- it "should warn but not fail if loading a file raises a LoadError" do
- FileTest.stubs(:exist?).returns true
- @pluginhandler.expects(:load).with("foo.rb").raises LoadError.new("eh")
-
- Puppet.expects(:err)
- @pluginhandler.load_plugin("foo.rb")
- end
end | (#<I>) When syncing plugins, use autoloader to reload changes
Before this, all files synced would be loaded. This resulted in inconsistent
behavior, since not all files would necessarily be loaded during runs when
they did not need to be synced. This makes it so that any already loaded files
that are changed by pluginsync will get reloaded, and any other files are left
to be autoloaded as they normally would be. | puppetlabs_puppet | train |
818b463f63e6d13039188ca88a1cddaa9afcc655 | diff --git a/cubicle.gemspec b/cubicle.gemspec
index <HASH>..<HASH> 100644
--- a/cubicle.gemspec
+++ b/cubicle.gemspec
@@ -5,7 +5,7 @@
Gem::Specification.new do |s|
s.name = %q{cubicle}
- s.version = "0.1.3"
+ s.version = "0.1.4"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Nathan Stults"]
diff --git a/lib/cubicle/aggregation/aggregation_manager.rb b/lib/cubicle/aggregation/aggregation_manager.rb
index <HASH>..<HASH> 100644
--- a/lib/cubicle/aggregation/aggregation_manager.rb
+++ b/lib/cubicle/aggregation/aggregation_manager.rb
@@ -42,7 +42,7 @@ module Cubicle
if ((aggregation.name.split("_")[-1].split(".")) - query.member_names - [:all_measures]).blank?
filter = prepare_filter(query,options[:where] || {})
else
- aggregation = aggregate(query,:source_collection=>collection.name)
+ aggregation = aggregate(query,:source_collection=>aggregation.name)
end
end
@@ -146,7 +146,7 @@ module Cubicle
options[:finalize] = MapReduceHelper.generate_finalize_function(query)
options["query"] = prepare_filter(query,options[:where] || {})
- query.source_collection_name ||= aggregation.source_collection_name
+ query.source_collection_name = options.delete(:source_collection) || query.source_collection_name || aggregation.source_collection_name
target_collection = options.delete(:target_collection)
target_collection ||= query.target_collection_name if query.respond_to?(:target_collection_name)
diff --git a/lib/cubicle/version.rb b/lib/cubicle/version.rb
index <HASH>..<HASH> 100644
--- a/lib/cubicle/version.rb
+++ b/lib/cubicle/version.rb
@@ -1,3 +1,3 @@
module Cubicle
- VERSION = '0.1.3'
+ VERSION = '0.1.4'
end
\ No newline at end of file | Fixed a serious performance bug preventing aggregation caches from being utilized | PlasticLizard_Cubicle | train |
fd771b5620dfb8b7ba5d60f6b6ac751571ac8fb3 | diff --git a/lib/Layout/Resolver/Form/ConditionType/Mapper/Channel.php b/lib/Layout/Resolver/Form/ConditionType/Mapper/Channel.php
index <HASH>..<HASH> 100644
--- a/lib/Layout/Resolver/Form/ConditionType/Mapper/Channel.php
+++ b/lib/Layout/Resolver/Form/ConditionType/Mapper/Channel.php
@@ -32,6 +32,9 @@ final class Channel extends Mapper
];
}
+ /**
+ * @return array<string, int>
+ */
private function getChannelList(): array
{
$channels = $this->channelRepository->findAll();
diff --git a/lib/Layout/Resolver/Form/ConditionType/Mapper/Locale.php b/lib/Layout/Resolver/Form/ConditionType/Mapper/Locale.php
index <HASH>..<HASH> 100644
--- a/lib/Layout/Resolver/Form/ConditionType/Mapper/Locale.php
+++ b/lib/Layout/Resolver/Form/ConditionType/Mapper/Locale.php
@@ -7,6 +7,7 @@ namespace Netgen\Layouts\Sylius\Layout\Resolver\Form\ConditionType\Mapper;
use Netgen\Layouts\Layout\Resolver\Form\ConditionType\Mapper;
use Sylius\Component\Resource\Repository\RepositoryInterface;
use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
+use function is_string;
final class Locale extends Mapper
{
@@ -32,6 +33,9 @@ final class Locale extends Mapper
];
}
+ /**
+ * @return array<string, string>
+ */
private function getLocaleList(): array
{
$locales = $this->localeRepository->findAll();
@@ -39,6 +43,10 @@ final class Locale extends Mapper
/** @var \Sylius\Component\Locale\Model\Locale $locale */
foreach ($locales as $locale) {
+ if (!is_string($locale->getCode())) {
+ continue;
+ }
+
$localeList[$locale->getName()] = $locale->getCode();
}
diff --git a/lib/Validator/LocaleValidator.php b/lib/Validator/LocaleValidator.php
index <HASH>..<HASH> 100644
--- a/lib/Validator/LocaleValidator.php
+++ b/lib/Validator/LocaleValidator.php
@@ -38,7 +38,7 @@ final class LocaleValidator extends ConstraintValidator
$locale = $this->localeRepository->findOneBy(['code' => $value]);
if (!$locale instanceof LocaleInterface) {
$this->context->buildViolation($constraint->message)
- ->setParameter('%locale%', (string) $value)
+ ->setParameter('%locale%', $value)
->addViolation();
}
} | LAYOUTS-<I> Fix issues reported by phpstan | netgen-layouts_layouts-sylius | train |
6ff389c8e5ea50a1fc8a718850d12a19bd7f98ac | diff --git a/tests/Conditions/LowerTest.php b/tests/Conditions/LowerTest.php
index <HASH>..<HASH> 100644
--- a/tests/Conditions/LowerTest.php
+++ b/tests/Conditions/LowerTest.php
@@ -47,4 +47,26 @@ class LowerTest extends PHPUnit_Framework_TestCase
'empty column name' => array('', new Types\String(''), 'poney'),
);
}
+
+ /**
+ * @dataProvider providerTestFieldGreaterThanField
+ */
+ public function testFieldGreaterThanField($expected, $columnLeft, $columnRight)
+ {
+ $condition = new Conditions\Lower($columnLeft, $columnRight);
+
+ $this->assertSame($expected, $condition->toString($this->escaper));
+ }
+
+ public function providerTestFieldGreaterThanField()
+ {
+ return array(
+ array('pony < unicorn', new Types\String('pony'), new Types\String('unicorn'),),
+ array('pony < id', new Types\String('pony'), new Types\Integer('id'),),
+ array('id < pony', new Types\Integer('id'), new Types\String('pony'),),
+ array('id < ponyId', new Types\Integer('id'), new Types\Integer('ponyId'),),
+ array('creationDate < updateDate', new Types\Datetime('creationDate'), new Types\Datetime('updateDate'),),
+ array('good < evil', new Types\Boolean('good'), new Types\Boolean('evil'),),
+ );
+ }
} | fix #5 Add tests for Lower field condition | lebris_muffin | train |
c1c123888f44457970bbd41f63ee50ea514f9bf3 | diff --git a/lib/actions/set-test.js b/lib/actions/set-test.js
index <HASH>..<HASH> 100644
--- a/lib/actions/set-test.js
+++ b/lib/actions/set-test.js
@@ -334,3 +334,26 @@ test('should return response data when no request data', async (t) => {
t.is(ret.status, 'ok', ret.error)
t.deepEqual(ret.data, expectedData)
})
+
+test('should allow null as request data', async (t) => {
+ const scope = nock('http://api1.test')
+ .post('/database/_bulk_docs', {
+ docs: []
+ })
+ .reply(201, [{ ok: true }, { ok: true }])
+ const action = {
+ type: 'SET',
+ payload: {
+ service: 'entries',
+ data: null
+ }
+ }
+ const src = setupService('http://api1.test/database/_bulk_docs')
+ const getService = (type, service) => src
+
+ const ret = await set(action, { getService, schemas })
+
+ t.truthy(ret)
+ t.is(ret.status, 'ok', ret.error)
+ t.true(scope.isDone())
+})
diff --git a/lib/actions/set.js b/lib/actions/set.js
index <HASH>..<HASH> 100644
--- a/lib/actions/set.js
+++ b/lib/actions/set.js
@@ -30,8 +30,8 @@ async function set (action, { getService, schemas }) {
debug('Action: SET')
const { service: serviceId, data, endpoint, onlyMappedValues = true } = action.payload
- const type = action.payload.type || data.type
- const id = data.id
+ const type = action.payload.type || (data && data.type) || undefined
+ const id = (data && data.id) || undefined
const service = getService(type, serviceId)
if (!service) { | Fix bug where SET action with null as data, would throw an error | integreat-io_integreat | train |
1728e863cac18193b8d8b7675ba2f1c95f91b130 | diff --git a/plenum/test/pool_transactions/test_suspend_node.py b/plenum/test/pool_transactions/test_suspend_node.py
index <HASH>..<HASH> 100644
--- a/plenum/test/pool_transactions/test_suspend_node.py
+++ b/plenum/test/pool_transactions/test_suspend_node.py
@@ -90,7 +90,7 @@ def testStewardSuspendsNode(looper, txnPoolNodeSet,
ha=newNode.nodestack.ha, cliha=newNode.clientstack.ha)
looper.add(nodeTheta)
txnPoolNodeSet.append(nodeTheta)
- looper.run(checkNodesConnected(txnPoolNodeSet, overrideTimeout=10))
+ looper.run(checkNodesConnected(txnPoolNodeSet, overrideTimeout=30))
ensureClientConnectedToNodesAndPoolLedgerSame(looper, steward1,
*txnPoolNodeSet)
ensureClientConnectedToNodesAndPoolLedgerSame(looper, newSteward, | fixed failing test case testStewardSuspendsNode by increasing timeout | hyperledger_indy-plenum | train |
baceb85956c2c13dfba788666fa58b66591ab54a | diff --git a/src/Message/PurchaseRequest.php b/src/Message/PurchaseRequest.php
index <HASH>..<HASH> 100644
--- a/src/Message/PurchaseRequest.php
+++ b/src/Message/PurchaseRequest.php
@@ -37,9 +37,19 @@ class PurchaseRequest extends AbstractRequest
return $this->getParameter('sharedSecret');
}
+ public function setHostedDataId($value)
+ {
+ return $this->setParameter('hostedDataId', $value);
+ }
+
+ public function getHostedDataId()
+ {
+ return $this->getParameter('hostedDataId');
+ }
+
public function getData()
{
- $this->validate('amount', 'card');
+ $this->validate('amount');
$data = array();
$data['storename'] = $this->getStoreId();
@@ -53,16 +63,27 @@ class PurchaseRequest extends AbstractRequest
$data['full_bypass'] = 'true';
$data['oid'] = $this->getParameter('transactionId');
- $this->getCard()->validate();
+ // Card is only required if no hosteddataid (saved 'data vault' card)
+ if (is_null($this->getHostedDataId())) {
+ $this->validate('card');
+ }
+
+ // If a card is passed, validate it
+ if (!is_null($this->getCard())) {
- $data['cardnumber'] = $this->getCard()->getNumber();
- $data['cvm'] = $this->getCard()->getCvv();
- $data['expmonth'] = $this->getCard()->getExpiryDate('m');
- $data['expyear'] = $this->getCard()->getExpiryDate('y');
+ $this->getCard()->validate();
+
+ $data['cardnumber'] = $this->getCard()->getNumber();
+ $data['cvm'] = $this->getCard()->getCvv();
+ $data['expmonth'] = $this->getCard()->getExpiryDate('m');
+ $data['expyear'] = $this->getCard()->getExpiryDate('y');
+ }
$data['responseSuccessURL'] = $this->getParameter('returnUrl');
$data['responseFailURL'] = $this->getParameter('returnUrl');
+ $data['hosteddataid'] = $this->getHostedDataId();
+
return $data;
}
diff --git a/tests/GatewayTest.php b/tests/GatewayTest.php
index <HASH>..<HASH> 100644
--- a/tests/GatewayTest.php
+++ b/tests/GatewayTest.php
@@ -52,6 +52,7 @@ class GatewayTest extends GatewayTestCase
$this->assertFalse($response->isRedirect());
$this->assertEquals('abc123456', $response->getTransactionId());
$this->assertSame('APPROVED', $response->getMessage());
+ $this->assertNull($response->getTransactionReference());
}
/**
@@ -93,4 +94,58 @@ class GatewayTest extends GatewayTestCase
$this->assertEquals('abc1234', $response->getTransactionId());
$this->assertSame('DECLINED', $response->getMessage());
}
+
+ /**
+ * testPurchaseWithHostedDataId.
+ *
+ * Simulates a purchase with "save this card" selected
+ */
+ public function testPurchaseWithHostedDataId()
+ {
+ $dataId = rand();
+ $this->options['hostedDataId'] = $dataId;
+
+ $response = $this->gateway->purchase($this->options)->send();
+
+ $this->assertFalse($response->isSuccessful());
+ $this->assertTrue($response->isRedirect());
+ $requestData = $response->getRedirectData();
+ $this->assertEquals($dataId, $requestData['hosteddataid']);
+ }
+
+ /**
+ * testPurchaseWithHostedDataIdAndWithoutCard.
+ *
+ * Simulates paying using a saved card, rather than passing card data
+ */
+ public function testPurchaseWithHostedDataIdAndWithoutCard()
+ {
+ $dataId = rand();
+ $this->options['hostedDataId'] = $dataId;
+ unset($this->options['card']);
+
+ $response = $this->gateway->purchase($this->options)->send();
+
+ $this->assertFalse($response->isSuccessful());
+ $this->assertTrue($response->isRedirect());
+ $requestData = $response->getRedirectData();
+ $this->assertEquals($dataId, $requestData['hosteddataid']);
+ }
+
+ /**
+ * testPurchaseErrorWhenMissingHostedDataIdAndWithoutCard.
+ *
+ * Simulates neither hosteddataid or card data being passed, should be caught in app.
+ *
+ * @expectedException \Omnipay\Common\Exception\InvalidRequestException
+ */
+ public function testPurchaseErrorWhenMissingHostedDataIdAndWithoutCard()
+ {
+ unset($this->options['card']);
+
+ $response = $this->gateway->purchase($this->options)->send();
+
+ $this->assertFalse($response->isSuccessful());
+ $this->assertTrue($response->isRedirect());
+ }
} | Added hosteddataid to the request data to allow saved cards, effectively making the card parameter optional. Added unit tests to check this functionality. | thephpleague_omnipay-firstdata | train |
0b36bfbb0b684d01f941f9c716e6ee9f327d1353 | diff --git a/ipyrad/assemble/write_outfiles.py b/ipyrad/assemble/write_outfiles.py
index <HASH>..<HASH> 100644
--- a/ipyrad/assemble/write_outfiles.py
+++ b/ipyrad/assemble/write_outfiles.py
@@ -1062,11 +1062,13 @@ def filter_maxhet(data, superseqs, edges):
to every loc based on coverage...
"""
## the filter max
+ ## The type of max_shared_Hs_locus is determined and the cast to either
+ ## int or float is made at assembly load time
maxhet = data.paramsdict["max_shared_Hs_locus"]
if isinstance(maxhet, float):
- maxhet = int(superseqs.shape[1]*float(maxhet))
+ maxhet = int(superseqs.shape[1]*maxhet)
else:
- maxhet = int(maxhet)
+ maxhet = maxhet
## an empty array to fill with failed loci
hetfilt = np.zeros(superseqs.shape[0], dtype=np.bool)
diff --git a/ipyrad/core/assembly.py b/ipyrad/core/assembly.py
index <HASH>..<HASH> 100644
--- a/ipyrad/core/assembly.py
+++ b/ipyrad/core/assembly.py
@@ -1809,7 +1809,14 @@ def paramschecker(self, param, newvalue):
self.paramsdict['min_samples_locus'] = int(newvalue)
elif param == 'max_shared_Hs_locus':
- self.paramsdict['max_shared_Hs_locus'] = newvalue
+ if newvalue.isdigit():
+ self.paramsdict['max_shared_Hs_locus'] = int(newvalue)
+ else:
+ try:
+ self.paramsdict['max_shared_Hs_locus'] = float(newvalue)
+ except Exception as inst:
+ sys.exit("max_shared_Hs_locs must be int or float, you put: "\
+ + newvalue)
elif param == 'max_SNPs_locus':
newvalue = tuplecheck(newvalue, int) | Fixed bug with max_shared_Hs_locus not casting right, and step 7 would error out | dereneaton_ipyrad | train |
6017b8c4bdf0ea3d94253634a7ae22dfdeb73982 | diff --git a/src/test/java/com/arangodb/ArangoDriverGraphVertexTest.java b/src/test/java/com/arangodb/ArangoDriverGraphVertexTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/arangodb/ArangoDriverGraphVertexTest.java
+++ b/src/test/java/com/arangodb/ArangoDriverGraphVertexTest.java
@@ -20,7 +20,9 @@ import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.isA;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
import org.junit.Test;
@@ -47,7 +49,7 @@ public class ArangoDriverGraphVertexTest extends BaseGraphTest {
DocumentEntity<TestComplexEntity01> vertex = driver.graphCreateVertex(
this.graphName,
"from1-1",
- new TestComplexEntity01("xxx", "yyy", 10),
+ new TestComplexEntity01("Homer", "Simpson", 38),
true);
assertThat(vertex.getDocumentHandle(), is(notNullValue()));
assertThat(vertex.getDocumentRevision(), is(not(0L)));
@@ -56,11 +58,42 @@ public class ArangoDriverGraphVertexTest extends BaseGraphTest {
DocumentEntity<TestComplexEntity01> document = driver.getDocument(
vertex.getDocumentHandle(),
TestComplexEntity01.class);
- assertThat(document.getEntity().getUser(), is("xxx"));
- assertThat(document.getEntity().getDesc(), is("yyy"));
- assertThat(document.getEntity().getAge(), is(10));
+ assertThat(document.getEntity().getUser(), is("Homer"));
+ assertThat(document.getEntity().getDesc(), is("Simpson"));
+ assertThat(document.getEntity().getAge(), is(38));
}
+
+ @Test
+ public void test_create_vertex_error_graph() throws ArangoException {
+
+ try {
+ driver.graphCreateVertex("foo", "bar", new TestComplexEntity01("Homer", "Simpson", 38), true);
+ fail();
+ } catch (ArangoException e) {
+ assertThat(e.getCode(), greaterThan(300));
+ }
+
+ }
+
+ @Test
+ public void test_create_vertex_error_collection() throws ArangoException {
+
+ driver.createGraph(this.graphName, this.createEdgeDefinitions(2, 0), this.createOrphanCollections(2), true);
+
+ try {
+ DocumentEntity<TestComplexEntity01> vertex = driver.graphCreateVertex(
+ this.graphName,
+ "foo",
+ new TestComplexEntity01("Homer", "Simpson", 38),
+ true);
+ fail();
+ } catch (ArangoException e) {
+ assertThat(e.getCode(), greaterThan(300));
+ }
+
+ }
+
/*
* // TODO: create with _key // TODO: create with _key and duplication error
* | graph: added some tests for create_vertex | arangodb_arangodb-java-driver | train |
2258199c22c9369db629b312b1ece51a77adfc42 | diff --git a/src/scales/scale.time.js b/src/scales/scale.time.js
index <HASH>..<HASH> 100644
--- a/src/scales/scale.time.js
+++ b/src/scales/scale.time.js
@@ -52,8 +52,9 @@ module.exports = function(Chart) {
var timeOpts = me.options.time;
// We store the data range as unix millisecond timestamps so dataMin and dataMax will always be integers.
- var dataMin = Number.MAX_SAFE_INTEGER;
- var dataMax = Number.MIN_SAFE_INTEGER;
+ // Integer constants are from the ES6 spec.
+ var dataMin = Number.MAX_SAFE_INTEGER || 9007199254740991;
+ var dataMax = Number.MIN_SAFE_INTEGER || -9007199254740991;
var chartData = me.chart.data;
var parsedData = { | Add hard coded integer constants for *_SAFE_INTEGER which are not available on IE | chartjs_Chart.js | train |
152376aa9417736f34ddad940e54c6670e197158 | diff --git a/symphony/RESTful/__init__.py b/symphony/RESTful/__init__.py
index <HASH>..<HASH> 100644
--- a/symphony/RESTful/__init__.py
+++ b/symphony/RESTful/__init__.py
@@ -10,18 +10,21 @@ __author__ = 'Matt Joyce'
__email__ = '[email protected]'
__copyright__ = 'Copyright 2016, Symphony Communication Services LLC'
+import logging
+
from .nopkcs import NOPKCS
from .pkcs import PKCS
class RESTful(NOPKCS, PKCS):
- def __init__(self, url, session, keymngr, crt=None, key=None):
+ def __init__(self, url, session, keymngr, crt=None, key=None, logger=None):
self.__url__ = url
self.__session__ = session
self.__keymngr__ = keymngr
self.__crt__ = crt
self.__key__ = key
+ self.logger = logger or logging.getLogger(__name__)
def bool2str(self, boolval):
if boolval:
diff --git a/symphony/RESTful/nopkcs.py b/symphony/RESTful/nopkcs.py
index <HASH>..<HASH> 100644
--- a/symphony/RESTful/nopkcs.py
+++ b/symphony/RESTful/nopkcs.py
@@ -10,7 +10,6 @@ __author__ = 'Matt Joyce'
__email__ = '[email protected]'
__copyright__ = 'Copyright 2016, Symphony Communication Services LLC'
-import logging
import requests
@@ -35,8 +34,8 @@ class NOPKCS(object):
response = requests.get(self.__url__ + req_hook + str(req_args),
headers=headers,
verify=True)
- except requests.exceptions.RequestException as e:
- logging.error(e)
+ except requests.exceptions.RequestException as err:
+ self.logger.error(err)
return '500', 'Internal Error in RESTful.GET_query()'
# return the token
return response.status_code, response.text
@@ -59,8 +58,8 @@ class NOPKCS(object):
headers=headers,
data=req_args,
verify=True)
- except requests.exceptions.RequestException as e:
- logging.error(e)
+ except requests.exceptions.RequestException as err:
+ self.logger.error(err)
return '500', 'Internal Error in RESTful.POST_query()'
# return the token
return response.status_code, response.text
diff --git a/symphony/RESTful/pkcs.py b/symphony/RESTful/pkcs.py
index <HASH>..<HASH> 100644
--- a/symphony/RESTful/pkcs.py
+++ b/symphony/RESTful/pkcs.py
@@ -10,7 +10,6 @@ __author__ = 'Matt Joyce'
__email__ = '[email protected]'
__copyright__ = 'Copyright 2016, Symphony Communication Services LLC'
-import logging
import requests
@@ -37,8 +36,8 @@ class PKCS(object):
headers=headers,
cert=(self.__crt__, self.__key__),
verify=True)
- except requests.exceptions.RequestException as e:
- logging.error(e)
+ except requests.exceptions.RequestException as err:
+ self.logger.error(err)
return '500', 'Internal Error in PKCS_RESTful.GET_query()'
# return the token
return response.status_code, response.text
@@ -63,8 +62,8 @@ class PKCS(object):
data=req_args,
cert=(self.__crt__, self.__key__),
verify=True)
- except requests.exceptions.RequestException as e:
- logging.error(e)
+ except requests.exceptions.RequestException as err:
+ self.logger.error(err)
return '500', 'Internal Error in PKCS_RESTful.POST_query()'
# return the token
return response.status_code, response.text | prototype logger for RESTful class | symphonyoss_python-symphony | train |
f993ac44b528374515b41c6ca5a6c7c35b96438c | diff --git a/awsshell/resource/index.py b/awsshell/resource/index.py
index <HASH>..<HASH> 100644
--- a/awsshell/resource/index.py
+++ b/awsshell/resource/index.py
@@ -20,6 +20,7 @@ from collections import namedtuple
import jmespath
from botocore import xform_name
+from botocore.exceptions import BotoCoreError
LOG = logging.getLogger(__name__)
@@ -221,11 +222,21 @@ class ServerSideCompleter(object):
# param='InstanceIds'.
if service not in self._describer_creator.services_with_completions():
return []
- client = self._client_creator.create_client(service)
+ try:
+ client = self._client_creator.create_client(service)
+ except BotoCoreError as e:
+ # create_client() could raise an exception if the session
+ # isn't fully configured (say it's missing a region).
+ # However, we don't want to turn off all server side
+ # completions because it's still possible to create
+ # clients for some services without a region, e.g. IAM.
+ LOG.debug("Error when trying to create a client for %s",
+ service, exc_info=True)
+ return []
api_operation_name = client.meta.method_to_api_mapping.get(
operation.replace('-', '_'))
if api_operation_name is None:
- return
+ return []
# Now we need to convert the param name to the
# casing used by the API.
completer = self._describer_creator.create_completer_query(service)
@@ -235,7 +246,9 @@ class ServerSideCompleter(object):
return
try:
response = getattr(client, xform_name(result.operation, '_'))()
- except Exception:
+ except Exception as e:
+ LOG.debug("Error when calling %s.%s: %s", service,
+ result.operation, e, exc_info=True)
return
results = jmespath.search(result.path, response)
return results
diff --git a/tests/unit/test_resources.py b/tests/unit/test_resources.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_resources.py
+++ b/tests/unit/test_resources.py
@@ -1,8 +1,23 @@
"""Index and retrive information from the resource JSON."""
import pytest
+import mock
+
+from botocore.exceptions import NoRegionError
+
from awsshell.resource import index
[email protected]
+def describer_creator():
+ class FakeDescriberCreator(object):
+ SERVICES = ['ec2']
+
+ def services_with_completions(self):
+ return self.SERVICES
+
+ return FakeDescriberCreator()
+
+
def test_build_from_has_many():
resource = {
'service': {
@@ -211,14 +226,42 @@ def test_can_create_service_completers_from_cache():
assert factory.create_completer_query('ec2') == result
-def test_empty_results_returned_when_no_completion_data_exists():
- class FakeDescriberCreator(object):
- def services_with_completions(self):
- return []
+def test_empty_results_returned_when_no_completion_data_exists(describer_creator):
+ describer_creator.SERVICES = []
completer = index.ServerSideCompleter(
client_creator=None,
- describer_creator=FakeDescriberCreator()
+ describer_creator=describer_creator,
)
assert completer.retrieve_candidate_values(
'ec2', 'run-instances', 'ImageId') == []
+
+
+def test_no_completions_when_cant_create_client(describer_creator):
+ client_creator = mock.Mock(spec=index.CachedClientCreator)
+ # This is raised when you don't have a region configured via config file
+ # env var or manually via a session.
+ client_creator.create_client.side_effect = NoRegionError()
+ completer = index.ServerSideCompleter(
+ client_creator=client_creator,
+ describer_creator=describer_creator)
+
+ assert completer.retrieve_candidate_values(
+ 'ec2', 'foo', 'Bar') == []
+
+
+def test_no_completions_returned_on_unknown_operation(describer_creator):
+ client = mock.Mock()
+ client_creator = mock.Mock(spec=index.CachedClientCreator)
+ client_creator.create_client.return_value = client
+
+ client.meta.method_to_api_mapping = {
+ 'describe_foo': 'DescribeFoo'
+ }
+
+ completer = index.ServerSideCompleter(
+ client_creator=client_creator,
+ describer_creator=describer_creator)
+
+ assert completer.retrieve_candidate_values(
+ 'ec2', 'not_describe_foo', 'Bar') == [] | Fail gracefully when no region is configured
If we can't create a client for server side completion,
we should not propogate an exception. Instead we should
return no server side completion values.
In the future, it would be nice to have some sort of
notification area in the shell where we could let the
user know that server side completion won't work because
they don't have a region configured.
Fixes #<I>. | awslabs_aws-shell | train |
07f4c755caac74d292536723b1dcdd1402f58ddb | diff --git a/contrib/parseq-restli-client/src/test/java/com/linkedin/restli/client/ParSeqRestClientIntegrationTest.java b/contrib/parseq-restli-client/src/test/java/com/linkedin/restli/client/ParSeqRestClientIntegrationTest.java
index <HASH>..<HASH> 100644
--- a/contrib/parseq-restli-client/src/test/java/com/linkedin/restli/client/ParSeqRestClientIntegrationTest.java
+++ b/contrib/parseq-restli-client/src/test/java/com/linkedin/restli/client/ParSeqRestClientIntegrationTest.java
@@ -83,8 +83,10 @@ public abstract class ParSeqRestClientIntegrationTest extends BaseEngineTest {
@BeforeClass
public void init() throws Exception {
_serverScheduler = Executors.newScheduledThreadPool(Runtime.getRuntime().availableProcessors() + 1);
- _serverEngine = new EngineBuilder().setTaskExecutor(_serverScheduler).setTimerScheduler(_serverScheduler)
- .setPlanDeactivationListener(_batchingSupport).build();
+ EngineBuilder serverEngineBuilder = new EngineBuilder();
+ serverEngineBuilder.setTaskExecutor(_serverScheduler).setTimerScheduler(_serverScheduler)
+ .setPlanDeactivationListener(_batchingSupport);
+ _serverEngine = serverEngineBuilder.build();
_server = RestLiIntTestServer.createServer(_serverEngine, _port,
RestLiIntTestServer.supportedCompression, true, 5000);
_server.start(); | Fixed backwards incompatibility issue due to <I> patch. | linkedin_parseq | train |
f432a7c980bab11b2d5a43958e6b445418345aa0 | diff --git a/src/Storage/Field/Collection/LazyFieldCollection.php b/src/Storage/Field/Collection/LazyFieldCollection.php
index <HASH>..<HASH> 100644
--- a/src/Storage/Field/Collection/LazyFieldCollection.php
+++ b/src/Storage/Field/Collection/LazyFieldCollection.php
@@ -76,10 +76,14 @@ class LazyFieldCollection extends AbstractLazyCollection implements FieldCollect
if ($this->references) {
$repo = $this->em->getRepository(FieldValue::class);
$instances = $repo->findBy(['id' => $this->references]);
+ if ($instances === false) {
+ return;
+ }
- foreach ((array) $instances as $val) {
- $fieldtype = $val->getFieldtype();
- $field = $this->em->getFieldManager()->getFieldFor($fieldtype);
+ /** @var FieldValue $val */
+ foreach ($instances as $val) {
+ $fieldType = $val->getFieldType();
+ $field = $this->em->getFieldManager()->getFieldFor($fieldType);
$type = $field->getStorageType();
$typeCol = 'value_' . $type->getName();
@@ -89,7 +93,7 @@ class LazyFieldCollection extends AbstractLazyCollection implements FieldCollect
function ($errNo, $errStr, $errFile) {},
E_WARNING
);
- $hydratedVal = $this->em->getEntityBuilder($val->getContenttype())->getHydratedValue($val->$typeCol, $val->getName(), $val->getFieldname());
+ $hydratedVal = $this->em->getEntityBuilder($val->getContenttype())->getHydratedValue($val->$typeCol, $val->getName(), $val->getFieldName());
restore_error_handler();
// If we do not have a hydrated value returned then we fall back to the one passed in | Don't cast boolean to array.
$repo->findBy() can return false, which when cast to an array will result in [0 => false] | bolt_bolt | train |
1bdb310fc6d684c680a8748b64d977ec450ff71e | diff --git a/spec/lib/twingly/url_spec.rb b/spec/lib/twingly/url_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/twingly/url_spec.rb
+++ b/spec/lib/twingly/url_spec.rb
@@ -155,14 +155,14 @@ describe Twingly::URL do
end
end
- context "with url containing starting and trailing new lines" do
+ context "with url containing leading and trailing new lines" do
let(:test_url) { "\nhttp://www.twingly.com/blog-data/\r\n" }
let(:expected) { "http://www.twingly.com/blog-data/" }
it { is_expected.to eq(expected) }
end
- context "with url containing starting and trailing whitespaces" do
+ context "with url containing leading and trailing whitespaces" do
let(:test_url) { " http://www.twingly.com/blog-data/ " }
let(:expected) { "http://www.twingly.com/blog-data/" }
@@ -177,7 +177,7 @@ describe Twingly::URL do
end
leading_and_trailing_whitespace.each do |whitespace_name, whitespace|
- context "with url containing starting and trailing: #{whitespace_name}" do
+ context "with url containing leading and trailing: #{whitespace_name}" do
let(:test_url) { "#{whitespace}https://www.example.com/#{whitespace}" }
let(:expected) { "https://www.example.com/" } | Terminology: starting -> leading
Seems more common to use "leading" than "starting" when talking about
whitespace. | twingly_twingly-url | train |
a969a62a7f1b7927f9a1a55f4492f5fc4129631a | diff --git a/components/lib/dropdown/DropdownItem.js b/components/lib/dropdown/DropdownItem.js
index <HASH>..<HASH> 100644
--- a/components/lib/dropdown/DropdownItem.js
+++ b/components/lib/dropdown/DropdownItem.js
@@ -17,7 +17,7 @@ export const DropdownItem = React.memo((props) => {
'p-highlight': props.selected,
'p-disabled': props.disabled,
'p-dropdown-item-empty': (!props.label || props.label.length === 0)
- }, props.option.className);
+ }, props.option && props.option.className);
const content = props.template ? ObjectUtils.getJSXElement(props.template, props.option) : props.label;
return ( | Fixed #<I> - If the item's className option is null, Dropdown will throw a JS exception. | primefaces_primereact | train |
112b28e420dbd1dcebfbd297de928c02b0a1c8b3 | diff --git a/accentuation.py b/accentuation.py
index <HASH>..<HASH> 100644
--- a/accentuation.py
+++ b/accentuation.py
@@ -1,5 +1,5 @@
from characters import add_diacritic, ACUTE, CIRCUMFLEX
-from syllabify import onset_nucleus_coda
+from syllabify import onset_nucleus_coda, syllabify, SHORT, LONG, UNKNOWN, syllable_length
def syllable_add_accent(s, a):
@@ -7,8 +7,58 @@ def syllable_add_accent(s, a):
return o + add_diacritic(n, a) + c
+def possible_accentuations(w):
+ s = ["".join(x) for x in syllabify(w)]
+ ultima_length = syllable_length(s[-1], True)
+ penult_length = syllable_length(s[-2], False)
+ if ultima_length == SHORT:
+ # proparoxytone
+ yield "".join(s[:-3]) + syllable_add_accent(s[-3], ACUTE) + "".join(s[-2:])
+ if penult_length == SHORT:
+ # paroxytone
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], ACUTE) + s[-1]
+ elif penult_length == LONG:
+ # properispomenon
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], CIRCUMFLEX) + s[-1]
+ elif penult_length == UNKNOWN:
+ # paroxytone (conditional on short penult)
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], ACUTE) + s[-1]
+ # properispomenon (conditional on long penult)
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], CIRCUMFLEX) + s[-1]
+ # oxytone
+ yield "".join(s[:-1]) + syllable_add_accent(s[-1], ACUTE)
+ elif ultima_length == LONG:
+ # paroxytone
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], ACUTE) + s[-1]
+ # oxytone
+ yield "".join(s[:-1]) + syllable_add_accent(s[-1], ACUTE)
+ # perispomenon
+ yield "".join(s[:-1]) + syllable_add_accent(s[-1], CIRCUMFLEX)
+ elif ultima_length == UNKNOWN:
+ # proparoxytone (conditional on short ultima)
+ yield "".join(s[:-3]) + syllable_add_accent(s[-3], ACUTE) + "".join(s[-2:])
+ if penult_length == SHORT:
+ # paroxytone
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], ACUTE) + s[-1]
+ elif penult_length == LONG:
+ # properispomenon (conditional on short ultima)
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], CIRCUMFLEX) + s[-1]
+ elif penult_length == UNKNOWN:
+ # paroxytone (conditional on short penult)
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], ACUTE) + s[-1]
+ # properispomenon (conditional on long penult)
+ yield "".join(s[:-2]) + syllable_add_accent(s[-2], CIRCUMFLEX) + s[-1]
+ # perispomenon (condition on long ultima)
+ yield "".join(s[:-1]) + syllable_add_accent(s[-1], CIRCUMFLEX)
+ # oxytone
+ yield "".join(s[:-1]) + syllable_add_accent(s[-1], ACUTE)
+
+
if __name__ == "__main__":
assert syllable_add_accent("κος", ACUTE) == "κός"
assert syllable_add_accent("ος", ACUTE) == "ός"
assert syllable_add_accent("ου", CIRCUMFLEX) == "οῦ"
assert syllable_add_accent("φως", CIRCUMFLEX) == "φῶς"
+
+ for w in possible_accentuations("γυναικος"):
+ print(w) | first pass at possible_accentuations generator | jtauber_greek-accentuation | train |
1ae136973efe54da8e2103c2ddd760540c18a0d4 | diff --git a/posttroll/subscriber.py b/posttroll/subscriber.py
index <HASH>..<HASH> 100644
--- a/posttroll/subscriber.py
+++ b/posttroll/subscriber.py
@@ -298,9 +298,10 @@ class Subscribe(object):
for service in self._services:
addr = _get_addr_loop(service, self._timeout)
if not addr:
- raise TimeoutError("Can't get address for " + service)
-
- logger.debug("GOT address " + str(service) + " " + str(addr))
+ logger.warning("Can't get any address for " + service)
+ else:
+ logger.debug("Got address for " + str(service)
+ + ": " + str(addr))
self._addresses.extend(addr)
# Subscribe to those services and topics. | A subscribe context doesn't need a publisher to start anymore. | pytroll_posttroll | train |
c55ff23a3c4f4c0d972addba74a916f898d13b94 | diff --git a/lib/rules/indent.js b/lib/rules/indent.js
index <HASH>..<HASH> 100644
--- a/lib/rules/indent.js
+++ b/lib/rules/indent.js
@@ -46,7 +46,7 @@ module.exports = function (context) {
if (context.options[0] === "tab") {
indentChar = "\t";
indentSize = 1;
- } else if (typeof context.options[0] === "number") {
+ } else /* istanbul ignore else : this will be caught by options validation */ if (typeof context.options[0] === "number") {
indentSize = context.options[0];
} | Build: Unblock build by increasing code coverage | eslint_eslint | train |
55fd8002c736e53ef07d4050224cfb9c10c7c080 | diff --git a/template/helper/Form.php b/template/helper/Form.php
index <HASH>..<HASH> 100644
--- a/template/helper/Form.php
+++ b/template/helper/Form.php
@@ -418,7 +418,7 @@ class Form extends \lithium\template\Helper {
* parameters. By default, the label text is a human-friendly version of `$name`.
* However, you can specify the label manually as a string, or both the label
* text and options as an array, i.e.:
- * `array('label text' => array('class' => 'foo', 'any' => 'other options'))`.
+ * `array('Your Label Title' => array('class' => 'foo', 'other' => 'options'))`.
* - `'type'` _string_: The type of form field to render. Available default options
* are: `'text'`, `'textarea'`, `'select'`, `'checkbox'`, `'password'` or
* `'hidden'`, as well as any arbitrary type (i.e. HTML5 form fields). | Making docblock for `Form::field()` less ambiguous. | UnionOfRAD_lithium | train |
4d5a979995f1ae1dfd56fa0e45b07604c06bbd1e | diff --git a/core/src/main/java/jenkins/model/Jenkins.java b/core/src/main/java/jenkins/model/Jenkins.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/jenkins/model/Jenkins.java
+++ b/core/src/main/java/jenkins/model/Jenkins.java
@@ -2239,14 +2239,38 @@ public class Jenkins extends AbstractCIBase implements DirectlyModifiableTopLeve
* but we also call this periodically to self-heal any data out-of-sync issue.
*/
/*package*/ void trimLabels() {
+ trimLabels((Set) null);
+ }
+
+ /**
+ * Reset labels and remove invalid ones for the given nodes.
+ * @param nodes the nodes taken as reference to update labels
+ */
+ void trimLabels(Node... nodes) {
+ Set<LabelAtom> includedLabels = new HashSet<>();
+ Arrays.asList(nodes).stream().filter(Objects::nonNull).forEach(n -> includedLabels.addAll(n.getAssignedLabels()));
+ trimLabels(includedLabels);
+ }
+
+ /**
+ * Reset labels and remove invalid ones for the given nodes.
+ * @param includedLabels the labels taken as reference to update labels. If {@code null}, all labels are considered.
+ */
+ private void trimLabels(@CheckForNull Set<LabelAtom> includedLabels) {
Set<Label> nodeLabels = new HashSet<>(this.getAssignedLabels());
this.getNodes().forEach(n -> nodeLabels.addAll(n.getAssignedLabels()));
for (Iterator<Label> itr = labels.values().iterator(); itr.hasNext();) {
Label l = itr.next();
- if (nodeLabels.contains(l) || this.clouds.stream().anyMatch(c -> c.canProvision(l))) {
- resetLabel(l);
- } else {
- itr.remove();
+ if (includedLabels == null || includedLabels.contains(l)) {
+ if (nodeLabels.contains(l) || !l.getClouds().isEmpty()) {
+ // there is at least one static agent or one cloud that currently claims it can handle the label.
+ // if the cloud has been removed, or its labels updated such that it can not handle this, this is handle in later calls
+ // resetLabel will remove the agents, and clouds from the label, and they will be repopulated later.
+ // not checking `cloud.canProvision()` here prevents a potential call that will only be repeated later
+ resetLabel(l);
+ } else {
+ itr.remove();
+ }
}
}
}
diff --git a/core/src/main/java/jenkins/model/Nodes.java b/core/src/main/java/jenkins/model/Nodes.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/jenkins/model/Nodes.java
+++ b/core/src/main/java/jenkins/model/Nodes.java
@@ -32,6 +32,7 @@ import hudson.model.Computer;
import hudson.model.Node;
import hudson.model.Queue;
import hudson.model.Saveable;
+import hudson.model.labels.LabelAtom;
import hudson.model.listeners.SaveableListener;
import hudson.slaves.EphemeralNode;
import hudson.slaves.OfflineCause;
@@ -141,7 +142,7 @@ public class Nodes implements Saveable {
AtomicReference<Node> old = new AtomicReference<>();
old.set(nodes.put(node.getNodeName(), node));
jenkins.updateNewComputer(node);
- jenkins.trimLabels();
+ jenkins.trimLabels(node, oldNode);
// TODO there is a theoretical race whereby the node instance is updated/removed after lock release
try {
persistNode(node);
@@ -153,7 +154,7 @@ public class Nodes implements Saveable {
public void run() {
nodes.compute(node.getNodeName(), (ignoredNodeName, ignoredNode) -> oldNode);
jenkins.updateComputerList();
- jenkins.trimLabels();
+ jenkins.trimLabels(node, oldNode);
}
});
throw e;
@@ -201,7 +202,7 @@ public class Nodes implements Saveable {
@Override
public Boolean call() throws Exception {
if (node == nodes.get(node.getNodeName())) {
- jenkins.trimLabels();
+ jenkins.trimLabels(node);
return true;
}
return false;
@@ -242,7 +243,7 @@ public class Nodes implements Saveable {
Nodes.this.nodes.remove(oldOne.getNodeName());
Nodes.this.nodes.put(newOne.getNodeName(), newOne);
jenkins.updateComputerList();
- jenkins.trimLabels();
+ jenkins.trimLabels(oldOne, newOne);
}
});
updateNode(newOne);
@@ -276,7 +277,7 @@ public class Nodes implements Saveable {
}
if (node == nodes.remove(node.getNodeName())) {
jenkins.updateComputerList();
- jenkins.trimLabels();
+ jenkins.trimLabels(node);
}
}
}); | [JENKINS-<I>] Make trim labels more selective when we're operating on selected nodes (#<I>) | jenkinsci_jenkins | train |
e3fcf791239c0e9ba5c04686025f578b561ecd6f | diff --git a/py/conftest.py b/py/conftest.py
index <HASH>..<HASH> 100644
--- a/py/conftest.py
+++ b/py/conftest.py
@@ -19,21 +19,16 @@ import os
import platform
import socket
import subprocess
-import sys
import time
import pytest
-# from _pytest.skipping import MarkEvaluator
from selenium import webdriver
from selenium.webdriver import DesiredCapabilities
from test.selenium.webdriver.common.webserver import SimpleWebServer
from test.selenium.webdriver.common.network import get_lan_ip
-if sys.version_info[0] == 3:
- from urllib.request import urlopen
-else:
- from urllib import urlopen
+from urllib.request import urlopen
drivers = (
'Chrome', | [py] Clean up imports in conftest | SeleniumHQ_selenium | train |
42664558050fe40279521e82c7b43b681e3e4507 | diff --git a/django_extensions/management/commands/create_app.py b/django_extensions/management/commands/create_app.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/commands/create_app.py
+++ b/django_extensions/management/commands/create_app.py
@@ -2,6 +2,7 @@ import os
import re
import sys
import django_extensions
+from django import VERSION
from django.conf import settings
from django.db import connection
from django.core.management.base import CommandError, LabelCommand
@@ -73,6 +74,7 @@ def copy_template(app_template, copy_to, project_name, app_name):
"""copies the specified template directory to the copy_to location"""
import shutil
+ copy_migrations = True if VERSION[:2] >= (1, 7) else False
app_template = os.path.normpath(app_template)
# walks the template structure and copies it
for d, subdirs, files in os.walk(app_template):
@@ -83,6 +85,8 @@ def copy_template(app_template, copy_to, project_name, app_name):
for i, subdir in enumerate(subdirs):
if subdir.startswith('.'):
del subdirs[i]
+ elif subdir.startswith('migrations') and not copy_migrations:
+ del subdirs[i]
replacements = {'app_name': app_name, 'project_name': project_name}
replacements.update(REPLACEMENTS)
for f in files: | Implement copying the migrations directory in Django>=<I>
If the installed Django version >=<I>, the create_app
command will copy the migrations folder to match the default
create app folder structure
In older versions, this folder is not copied | django-extensions_django-extensions | train |
43891309ada546674c30923a461ae18c94d4307e | diff --git a/reader.go b/reader.go
index <HASH>..<HASH> 100644
--- a/reader.go
+++ b/reader.go
@@ -199,6 +199,10 @@ func (r *reader) waitAvailable(pos, wanted int64, ctxErr *error, wait bool) (ava
err = *ctxErr
return
}
+ if r.t.dataDownloadDisallowed || !r.t.networkingEnabled {
+ err = errors.New("downloading disabled and data not already available")
+ return
+ }
if !wait {
return
}
diff --git a/test/issue377_test.go b/test/issue377_test.go
index <HASH>..<HASH> 100644
--- a/test/issue377_test.go
+++ b/test/issue377_test.go
@@ -2,6 +2,8 @@ package test
import (
"errors"
+ "io"
+ "io/ioutil"
"os"
"sync"
"testing"
@@ -73,7 +75,27 @@ func testReceiveChunkStorageFailure(t *testing.T, seederFast bool) {
// Tell the seeder to find the leecher. Is it guaranteed seeders will always try to do this?
seederTorrent.AddClientPeer(leecherClient)
<-leecherTorrent.GotInfo()
- assertReadAllGreeting(t, leecherTorrent.NewReader())
+ r := leecherTorrent.Files()[0].NewReader()
+ defer r.Close()
+ // We can't use assertReadAllGreeting here, because the default storage write error handler
+ // disables data downloads, which now causes Readers to error when they're blocked.
+ if false {
+ assertReadAllGreeting(t, leecherTorrent.NewReader())
+ } else {
+ for func() bool {
+ // We don't seem to need to seek, but that's probably just because the storage failure is
+ // happening on the first read.
+ r.Seek(0, io.SeekStart)
+ output, err := ioutil.ReadAll(r)
+ if err != nil {
+ t.Logf("got error while reading: %v", err)
+ return true
+ }
+ assert.EqualValues(t, testutil.GreetingFileContents, output)
+ return false
+ }() {
+ }
+ }
// TODO: Check that PeerConns fastEnabled matches seederFast?
//select {}
}
diff --git a/torrent.go b/torrent.go
index <HASH>..<HASH> 100644
--- a/torrent.go
+++ b/torrent.go
@@ -2024,6 +2024,7 @@ func (t *Torrent) onWriteChunkErr(err error) {
go t.userOnWriteChunkErr(err)
return
}
+ t.logger.WithDefaultLevel(log.Critical).Printf("default chunk write error handler: disabling data download")
t.disallowDataDownloadLocked()
}
@@ -2038,12 +2039,14 @@ func (t *Torrent) disallowDataDownloadLocked() {
t.iterPeers(func(c *peer) {
c.updateRequests()
})
+ t.tickleReaders()
}
func (t *Torrent) AllowDataDownload() {
t.cl.lock()
defer t.cl.unlock()
t.dataDownloadDisallowed = false
+ t.tickleReaders()
t.iterPeers(func(c *peer) {
c.updateRequests()
}) | Return errors from Reader if data downloading won't occur
Chunk write errors to storage can disable data download. Previously Readers would wait indefinitely for the data to become available. This change returns an error instead of stalling. | anacrolix_torrent | train |
477c60306d35cd2208b0b58d490b1b3976605ddb | diff --git a/apiserver/service/service.go b/apiserver/service/service.go
index <HASH>..<HASH> 100644
--- a/apiserver/service/service.go
+++ b/apiserver/service/service.go
@@ -104,7 +104,8 @@ func (api *API) ServicesDeployWithPlacement(args params.ServicesDeploy) (params.
}
// ServicesDeployWithBindings fetches the charms from the charm store and deploys them
-// using the specified placement directives and saving the specified space bindings.
+// using the specified placement directives and saving the specified endpoint bindings.
+// It is identical to ServicesDeployWithPlacement, but only exists when the API supports bindings.
func (api *API) ServicesDeployWithBindings(args params.ServicesDeploy) (params.ErrorResults, error) {
return api.ServicesDeployWithPlacement(args)
}
diff --git a/apiserver/service/service_test.go b/apiserver/service/service_test.go
index <HASH>..<HASH> 100644
--- a/apiserver/service/service_test.go
+++ b/apiserver/service/service_test.go
@@ -375,7 +375,7 @@ func (s *serviceSuite) TestClientServiceDeployWithInvalidPlacement(c *gc.C) {
c.Assert(results.Results[0].Error.Error(), gc.Matches, ".* invalid placement is invalid")
}
-func (s *serviceSuite) TestClientServicesDeployWithBindings(c *gc.C) {
+func (s *serviceSuite) testClientServicesDeployWithBindings(c *gc.C, endpointBindings, expected map[string]string) {
curl, _ := s.UploadCharm(c, "utopic/riak-42", "riak")
var cons constraints.Value
@@ -384,10 +384,9 @@ func (s *serviceSuite) TestClientServicesDeployWithBindings(c *gc.C) {
CharmUrl: curl.String(),
NumUnits: 1,
Constraints: cons,
- EndpointBindings: map[string]string{"endpoint": "a-space"},
+ EndpointBindings: endpointBindings,
}
- s.State.AddSpace("a-space", "", nil, true)
results, err := s.serviceApi.ServicesDeployWithBindings(params.ServicesDeploy{
Services: []params.ServiceDeploy{args}},
)
@@ -400,10 +399,29 @@ func (s *serviceSuite) TestClientServicesDeployWithBindings(c *gc.C) {
retrievedBindings, err := service.EndpointBindings()
c.Assert(err, jc.ErrorIsNil)
- expected := map[string]string{"endpoint": "a-space", "ring": "default", "admin": "default"}
c.Assert(retrievedBindings, jc.DeepEquals, expected)
}
+func (s *serviceSuite) TestClientServicesDeployWithBindings(c *gc.C) {
+ s.State.AddSpace("a-space", "", nil, true)
+ expected := map[string]string{
+ "endpoint": "a-space",
+ "ring": "default",
+ "admin": "default",
+ }
+ endpointBindings := map[string]string{"endpoint": "a-space"}
+ s.testClientServicesDeployWithBindings(c, endpointBindings, expected)
+}
+
+func (s *serviceSuite) TestClientServicesDeployWithDefaultBindings(c *gc.C) {
+ expected := map[string]string{
+ "endpoint": "default",
+ "ring": "default",
+ "admin": "default",
+ }
+ s.testClientServicesDeployWithBindings(c, nil, expected)
+}
+
// TODO(wallyworld) - the following charm tests have been moved from the apiserver/client
// package in order to use the fake charm store testing infrastructure. They are legacy tests
// written to use the api client instead of the apiserver logic. They need to be rewritten and | Added test for default bindings vs specified ones.
Some cleanups. | juju_juju | train |
c9201c7a588ef101bd4b25c6a038a22e51cf6ebc | diff --git a/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/ProcessUtils.java b/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/ProcessUtils.java
index <HASH>..<HASH> 100644
--- a/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/ProcessUtils.java
+++ b/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/ProcessUtils.java
@@ -97,7 +97,8 @@ public class ProcessUtils {
}
public int getUnixPID(Process process) {
- Preconditions.checkArgument(process.getClass().getName().equals("java.lang.UNIXProcess"));
+ // older java versions have UNIXProcess, newer have ProcessImpl. Both have a pid field we can access
+ Preconditions.checkArgument(process.getClass().getName().equals("java.lang.UNIXProcess") || process.getClass().getName().equals("java.lang.ProcessImpl"));
Class<?> clazz = process.getClass(); | Update ProcessUtils to work with java<I> | HubSpot_Singularity | train |
04a2143326dd3eb2a9ebecd45e87edd35230db6c | diff --git a/src/history/html5.js b/src/history/html5.js
index <HASH>..<HASH> 100644
--- a/src/history/html5.js
+++ b/src/history/html5.js
@@ -73,7 +73,7 @@ export class HTML5History extends History {
export function getLocation (base: string): string {
let path = decodeURI(window.location.pathname)
- if (base && path.indexOf(base) === 0) {
+ if (base && path.toLowerCase().indexOf(base.toLowerCase()) === 0) {
path = path.slice(base.length)
}
return (path || '/') + window.location.search + window.location.hash | fix(html5): make base case insensitive
Fix #<I>
This is mostly convenience so it's always made case insensitive. If there are
any case sensitive requirements, the test should be made server side. | vuejs_vue-router | train |
cefda14e16b903ff6f37da2a85dfb758d8ca27aa | diff --git a/example/index.php b/example/index.php
index <HASH>..<HASH> 100644
--- a/example/index.php
+++ b/example/index.php
@@ -1,7 +1,7 @@
<?php
require 'vendor/autoload.php';
-use AfricasTalking\AfricasTalking;
+use AfricasTalking\SDK\AfricasTalking;
$username = "sandbox";
$apiKey = "YOUR_SANDBOX_API_KEY"; | [FIX] correction of namespace in example | AfricasTalkingLtd_africastalking-php | train |
f309669632f1c17cff097af2d9e87fd30d594081 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -316,6 +316,10 @@ module.exports = (command, args, options) => {
spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected);
spawned.catch = onrejected => handlePromise().catch(onrejected);
+ // eslint-disable-next-line no-use-extend-native/no-use-extend-native
+ if (Promise.prototype.finally) {
+ spawned.finally = onfinally => handlePromise().finally(onfinally);
+ }
return spawned;
};
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -519,3 +519,39 @@ test('removes exit handler on exit', async t => {
const included = ee.listeners('exit').includes(listener);
t.false(included);
});
+
+// eslint-disable-next-line no-use-extend-native/no-use-extend-native
+if (Promise.prototype.finally) {
+ test('finally function is executed on success', async t => {
+ let called = false;
+ const {stdout} = await m('noop', ['foo']).finally(() => {
+ called = true;
+ });
+ t.is(called, true);
+ t.is(stdout, 'foo');
+ });
+
+ test('finally function is executed on failure', async t => {
+ let called = false;
+ const err = await t.throws(m('exit', ['2']).finally(() => {
+ called = true;
+ }));
+ t.is(called, true);
+ t.is(typeof err.stdout, 'string');
+ t.is(typeof err.stderr, 'string');
+ });
+
+ test('throw in finally function bubbles up on success', async t => {
+ const result = await t.throws(m('noop', ['foo']).finally(() => {
+ throw new Error('called');
+ }));
+ t.is(result.message, 'called');
+ });
+
+ test('throw in finally bubbles up on error', async t => {
+ const result = await t.throws(m('exit', ['2']).finally(() => {
+ throw new Error('called');
+ }));
+ t.is(result.message, 'called');
+ });
+} | Make the Promise interface complete by adding a `.finally()` method (#<I>)
Fixes #<I> | sindresorhus_execa | train |
4e780e30b9a2511b93b3c42c88afc93536421787 | diff --git a/manifest.php b/manifest.php
index <HASH>..<HASH> 100755
--- a/manifest.php
+++ b/manifest.php
@@ -29,7 +29,7 @@ return array(
'label' => 'QTI test model',
'description' => 'TAO QTI test implementation',
'license' => 'GPL-2.0',
- 'version' => '14.1.6',
+ 'version' => '15.0.0',
'author' => 'Open Assessment Technologies',
'requires' => array(
'taoTests' => '>=6.4.0',
diff --git a/scripts/update/Updater.php b/scripts/update/Updater.php
index <HASH>..<HASH> 100644
--- a/scripts/update/Updater.php
+++ b/scripts/update/Updater.php
@@ -1589,6 +1589,6 @@ class Updater extends \common_ext_ExtensionUpdater {
$this->setVersion('14.1.5');
}
- $this->skip('14.1.5', '14.1.6');
+ $this->skip('14.1.5', '15.0.0');
}
} | Bump to version <I> | oat-sa_extension-tao-testqti | train |
df17de8dc0092778bf2f593ecffdbee70c406c88 | diff --git a/etrago/appl.py b/etrago/appl.py
index <HASH>..<HASH> 100644
--- a/etrago/appl.py
+++ b/etrago/appl.py
@@ -54,7 +54,7 @@ args = {# Setup and Configuration:
'solver': 'gurobi', # glpk, cplex or gurobi
# Export options:
'lpfile': False, # state if and where you want to save pyomo's lp file: False or /path/tofolder
- 'results': '/home/lukas_wienholt/results/NEP_full_EHVk250_t3',#'/home/openego/pf_results/110paper/noEHVcluster/NEP2035_k500_t5', # state if and where you want to save results as csv: False or /path/tofolder
+ 'results': '/home/lukas_wienholt/results/NEP+_full_EHVk250_t3',#'/home/openego/pf_results/110paper/noEHVcluster/NEP2035_k500_t5', # state if and where you want to save results as csv: False or /path/tofolder
'export': False, # state if you want to export the results back to the database
# Settings:
'storage_extendable': True, # state if you want storages to be installed at each node if necessary.
@@ -297,7 +297,7 @@ def etrago(args):
# Siedenbrünzow/Sanitz
#network.generators.p_nom.loc[(network.generators.bus == '27541') & (network.generators.carrier == 'wind')] = 1800 #0
# Wilhemshaven2
-# network.generators.p_nom.loc[(network.generators.bus == '26892') & (network.generators.carrier == 'wind')] = 2400 #0
+ network.generators.p_nom.loc[(network.generators.bus == '26892') & (network.generators.carrier == 'wind')] = 2400 #0
# Segeberg
# network.generators.p_nom.loc[(network.generators.bus == '24876') & (network.generators.carrier == 'wind')] = 1800 #0
@@ -368,8 +368,8 @@ if __name__ == '__main__':
network = etrago(args)
# plots
# make a line loading plot
- plot_line_loading(network)
+ # plot_line_loading(network)
# plot stacked sum of nominal power for each generator type and timestep
- plot_stacked_gen(network, resolution="MW")
+ # plot_stacked_gen(network, resolution="MW")
# plot to show extendable storages
- storage_expansion(network)
+ # storage_expansion(network) | set params for nep+ | openego_eTraGo | train |
aa83cb076a673ace2069d71a14c77296448ce589 | diff --git a/nifty-client/src/main/java/com/facebook/nifty/client/NiftyClient.java b/nifty-client/src/main/java/com/facebook/nifty/client/NiftyClient.java
index <HASH>..<HASH> 100644
--- a/nifty-client/src/main/java/com/facebook/nifty/client/NiftyClient.java
+++ b/nifty-client/src/main/java/com/facebook/nifty/client/NiftyClient.java
@@ -194,7 +194,7 @@ public class NiftyClient implements Closeable
throw new TTransportException(message, f.getCause());
}
- if (f.isSuccess() && (channel != null)) {
+ if (f.isSuccess() && channel != null) {
if (channel.isOpen()) {
allChannels.add(channel);
}
diff --git a/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java b/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java
index <HASH>..<HASH> 100644
--- a/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java
+++ b/nifty-core/src/main/java/com/facebook/nifty/core/NiftyDispatcher.java
@@ -110,7 +110,8 @@ public class NiftyDispatcher extends SimpleChannelUpstreamHandler
// of an estimate than a hard limit. Netty may continue to decode and process several
// more requests that were in the latest read, even while further reads on the channel
// have been blocked.
- if ((requestSequenceId > lastResponseWrittenId.get() + queuedResponseLimit) &&
+<<<<<<< HEAD
+ if (requestSequenceId > lastResponseWrittenId.get() + queuedResponseLimit &&
!DispatcherContext.isChannelReadBlocked(ctx))
{
DispatcherContext.blockChannelReads(ctx);
diff --git a/pom.xml b/pom.xml
index <HASH>..<HASH> 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@ limitations under the License.
<parent>
<groupId>com.facebook</groupId>
<artifactId>facebook-oss-pom</artifactId>
- <version>4</version>
+ <version>5</version>
</parent>
<groupId>com.facebook.nifty</groupId> | Update facebook-oss-pom to v5 from v4 | facebookarchive_nifty | train |
dbdf7dc85177bb9c042bbe0cc330a7118f16aaaa | diff --git a/lib/svtplay_dl/service/__init__.py b/lib/svtplay_dl/service/__init__.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/__init__.py
+++ b/lib/svtplay_dl/service/__init__.py
@@ -22,7 +22,10 @@ class Service(object):
self.cookies = {}
self.auto_name = None
self.output = {"title": None, "season": None, "episode": None, "episodename": None,
- "id": None, "service": self.__class__.__name__.lower()}
+ "id": None, "service": self.__class__.__name__.lower(),
+ "tvshow": None, "title_nice": None, "showdescription": None,
+ "episodedescription": None, "showthumbnailurl": None,
+ "episodethumbnailurl": None}
if not http:
self.http = HTTP(config)
else:
diff --git a/lib/svtplay_dl/service/barnkanalen.py b/lib/svtplay_dl/service/barnkanalen.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/barnkanalen.py
+++ b/lib/svtplay_dl/service/barnkanalen.py
@@ -53,6 +53,7 @@ class Barnkanalen(Svtplay):
self.config.set("live", janson["video"]["live"])
self.outputfilename(janson["video"])
+ self.extrametadata(janson)
if "programVersionId" in janson["video"]:
vid = janson["video"]["programVersionId"]
diff --git a/lib/svtplay_dl/service/svtplay.py b/lib/svtplay_dl/service/svtplay.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/svtplay.py
+++ b/lib/svtplay_dl/service/svtplay.py
@@ -70,6 +70,7 @@ class Svtplay(Service, OpenGraphThumbMixin):
janson = json.loads(match.group(1))["videoPage"]
self.outputfilename(janson["video"])
+ self.extrametadata(janson)
if "programVersionId" in janson["video"]:
vid = janson["video"]["programVersionId"]
@@ -266,3 +267,34 @@ class Svtplay(Service, OpenGraphThumbMixin):
return season, episode
else:
return None, None
+
+ def extrametadata(self, data):
+ self.output["tvshow"] = (self.output["season"] is not None and
+ self.output["episode"] is not None)
+ try:
+ title = data["video"]["programTitle"]
+ self.output["title_nice"] = title
+ except:
+ title = data["video"]["titleSlug"]
+ self.output["title_nice"] = title
+ try:
+ # Get the image if size/format is not specified in the URL set it to large
+ url = data['state']["titleModel"]["thumbnail"].format(format="large")
+ self.output["showthumbnailurl"] = url
+ except:
+ pass
+ try:
+ url = data["video"]["thumbnailXL"].format(format="large")
+ self.output["episodethumbnailurl"] = url
+ except:
+ # Get the image if size/format is not specified in the URL set it to large
+ url = data["video"]["thumbnail"].format(format="large")
+ self.output["episodethumbnailurl"] = url
+ try:
+ self.output["showdescription"] = data['state']["titleModel"]["description"]
+ except:
+ pass
+ try:
+ self.output["episodedescription"] = data["video"]["description"]
+ except:
+ pass
\ No newline at end of file | Get extra metadata, like thumbnails and descriptions. Currently implemented for svtplay and barnkanalen. | spaam_svtplay-dl | train |
669f21ca29bdbc379ef21d4aa51e1053fbfdb97c | diff --git a/src/ol/geom/multilinestring.js b/src/ol/geom/multilinestring.js
index <HASH>..<HASH> 100644
--- a/src/ol/geom/multilinestring.js
+++ b/src/ol/geom/multilinestring.js
@@ -136,12 +136,19 @@ ol.geom.MultiLineString.prototype.getEnds = function() {
* @todo stability experimental
*/
ol.geom.MultiLineString.prototype.getLineStrings = function() {
- // FIXME we should construct the line strings from the flat coordinates
- var coordinates = this.getCoordinates();
+ var flatCoordinates = this.flatCoordinates;
+ var ends = this.ends_;
+ var layout = this.layout;
+ /** @type {Array.<ol.geom.LineString>} */
var lineStrings = [];
+ var offset = 0;
var i, ii;
- for (i = 0, ii = coordinates.length; i < ii; ++i) {
- lineStrings.push(new ol.geom.LineString(coordinates[i]));
+ for (i = 0, ii = ends.length; i < ii; ++i) {
+ var end = ends[i];
+ var lineString = new ol.geom.LineString(null);
+ lineString.setFlatCoordinates(layout, flatCoordinates.slice(offset, end));
+ lineStrings.push(lineString);
+ offset = end;
}
return lineStrings;
}; | Construct individual line strings directly from flat coordinates | openlayers_openlayers | train |
d714cba74d4ab1d8c75d429aa67997481e39f5f8 | diff --git a/src/components/display.js b/src/components/display.js
index <HASH>..<HASH> 100644
--- a/src/components/display.js
+++ b/src/components/display.js
@@ -1,3 +1,4 @@
+import React, {PropTypes} from 'react';
const Display = ({value}) => <div data-focus='display'>{value}</div>;
Display.displayName = 'Display';
diff --git a/src/example/index.js b/src/example/index.js
index <HASH>..<HASH> 100644
--- a/src/example/index.js
+++ b/src/example/index.js
@@ -36,53 +36,7 @@ import store from './store';
import {definitions, domains, masterDataConfig} from './config';
moment.locale('fr');
-<<<<<<< 306b0d63d0b8875aa2816bd9f0d6a5bd039a7a22
const App = ({children}) => {
-=======
- },
- address: {
- uuid: { domain: 'DO_RODRIGO', isRequired: false},
- city: { domain: 'DO_DON_DIEGO', isRequired: true}
- }
-}
-
-const domains = {
- DO_RODRIGO: {
- type: 'text',
- validator: [{
- type: 'string',
- options: {
- maxLength: 2
- }
- }],
- formatter: value => value + ' - formaté'
- },
- DO_DON_DIEGO: {
- type: 'text',
- validator: [{
- type: 'string',
- options: {
- maxLength: 200
- }
- }],
- formatter: value => value + ' - formaté'
- },
- DO_DATE : {
- formatter: date => date ? moment(date, format).format('DD/MM/YYYY') : ''
- },
- DO_CIVILITE: {
- type: 'text',
- validator: [{
- type: 'string',
- options: {
- maxLength: 200
- }
- }]
- }
-}
-
-const App = () => {
->>>>>>> Formatted value for the list
return (
<StoreProvider store={store}>
<MetadataProvider definitions={definitions} domains={domains}> | Fix errors of the rebase* | get-focus_deprecated-focus-graph | train |
673c8795b94528e9ec8086ad790b9d958e894e8f | diff --git a/test/12-integration-tests.js b/test/12-integration-tests.js
index <HASH>..<HASH> 100644
--- a/test/12-integration-tests.js
+++ b/test/12-integration-tests.js
@@ -549,7 +549,7 @@ describe('Integration tests', () => {
});
describe('Node security project audit', () => {
- it('Should fail if there are vulnerable dependencies', () =>
+ it.only('Should fail if there are vulnerable dependencies', () =>
exec('git checkout master')
.then(() => pkgd())
.then((pkgInfo) => {
@@ -572,8 +572,12 @@ describe('Integration tests', () => {
.then(() => {
throw new Error('Promise rejection expected');
})
- .catch((err) =>
- assert(err.message.indexOf('Vulnerability found') > -1)
+ .catch(
+ (err) =>
+ /* prettier-ignore */
+ nodeInfos.isAtLeastNpm6
+ ? assert(err.message.indexOf('Vulnerability found') > -1)
+ : assert(err.message.indexOf('Skipped vulnerable dependencies check') > -1)
));
['[email protected]', '[email protected]'].forEach(function(
dependency
@@ -1604,7 +1608,7 @@ describe('Integration tests', () => {
nodeInfos.isAtLeastNpm6
? assert(publishLog.includes('Checking for the vulnerable dependencies'))
: assert(publishLog.includes('Skipped vulnerable dependencies'));
-
+
/* prettier-ignore */
assert(publishLog.includes('Checking for the sensitive data in the working tree'));
/* prettier-ignore */ | test(validation): try fix test on npm version < 6 | inikulin_publish-please | train |
f303fff912e163ee0dc7cf5dfc28b2b41641e90f | diff --git a/gutenberg/api.py b/gutenberg/api.py
index <HASH>..<HASH> 100644
--- a/gutenberg/api.py
+++ b/gutenberg/api.py
@@ -6,8 +6,10 @@ from .common import serialization
from .common import typesafe
from .common import wget
import abc
+import gzip
import itertools
import logging
+import os
class TextSource(serialization.SerializableObject):
@@ -201,6 +203,21 @@ class Corpus(serialization.SerializableObject):
def __init__(self, text_source, basedir):
self.text_source = text_source
self.basedir = basedir
+ self._textdir = os.path.join(basedir, 'texts')
+
+ def _location(self, text_info):
+ """This function is a one-to-one mapping between the TextInfo space and
+ paths on the file-system that belong to the Corpus object (i.e.
+ subpaths of Corpus.basedir).
+
+ Arguments:
+ TextInfo: The descriptor of the text to read/write
+
+ Returns:
+ str: A unique location at which the text can be read/written
+
+ """
+ return os.path.join(self._textdir, '%s.gz' % text_info.uid)
@classmethod
def from_config(cls, config):
@@ -213,6 +230,28 @@ class Corpus(serialization.SerializableObject):
return cls(text_source=config.text_source,
basedir=config.basedir)
+ def _fulltext(self, text_info):
+ """Wrapper around TextSource.fulltext that caches texts on disk.
+
+ Arguments:
+ text_info (TextInfo): Meta-data about the text to be materialized.
+
+ Returns:
+ unicode: The full body of the text.
+
+ """
+ location = self._location(text_info)
+
+ try:
+ with gzip.open(location, 'rb') as gzipped:
+ fulltext = gzipped.read()
+ except IOError:
+ fulltext = self.text_source.fulltext(text_info)
+ if fulltext:
+ with gzip.open(location, 'wb') as gzipped:
+ gzipped.write(fulltext)
+ return fulltext
+
@abc.abstractmethod
def texts_for_author(self, author):
"""Retrieves all the texts from a given author from the corpus.
diff --git a/gutenberg/corpus.py b/gutenberg/corpus.py
index <HASH>..<HASH> 100644
--- a/gutenberg/corpus.py
+++ b/gutenberg/corpus.py
@@ -5,7 +5,6 @@ from . import api
from .common import db
import collections
import functools
-import gzip
import jellyfish
import logging
import os
@@ -36,30 +35,9 @@ class SqliteCorpus(api.Corpus):
INSERT INTO TextInfo(uid, title, author, location)
VALUES(?, ?, ?, ?)
''', ((text_info.uid, text_info.title, text_info.author,
- os.path.join(self.basedir, '%s.gz' % text_info.uid))
+ self._location(text_info))
for text_info in iter(self.text_source)))
- def _fulltext(self, text_info, location=None):
- if location is None:
- with db.connect(self._index) as dbcon:
- result = dbcon.execute('''
- SELECT location
- FROM TextInfo
- WHERE uid = ?
- LIMIT 1
- ''', (text_info.uid, )).fetchone()
- location = result['location']
-
- try:
- with gzip.open(location, 'rb') as gzipped:
- fulltext = gzipped.read()
- except IOError:
- fulltext = self.text_source.fulltext(text_info)
- if fulltext:
- with gzip.open(location, 'wb') as gzipped:
- gzipped.write(fulltext)
- return fulltext
-
def texts_for_author(self, author):
matches = collections.defaultdict(list)
with db.connect(self._index) as dbcon:
@@ -82,4 +60,4 @@ class SqliteCorpus(api.Corpus):
uid=text['uid'],
author=matched_author,
title=text['title'])
- yield text_info, self._fulltext(text_info, text['location'])
+ yield text_info, self._fulltext(text_info) | Promote Corpus._fulltext into the base-class | c-w_gutenberg | train |
ca9199776f7a9e25fc0cd93b5875145c6496a063 | diff --git a/pqhelper/base.py b/pqhelper/base.py
index <HASH>..<HASH> 100644
--- a/pqhelper/base.py
+++ b/pqhelper/base.py
@@ -580,9 +580,9 @@ class Board(object):
def _random_fill(self):
"""Fill the board with random tiles based on the Tile class."""
a = self._array
- for position in self.positions():
- if a[position].is_blank():
- a[position] = Tile.random_tile()
+ for p, tile in self.positions_with_tile():
+ if tile.is_blank():
+ a[p] = Tile.random_tile()
# Special Methods
def __str__(self):
@@ -608,14 +608,14 @@ class Board(object):
"""
a = self._array
rows, cols = a.shape
- for this_position in self.positions():
+ for this_position, tile in self.positions_with_tile():
#produce horizontal swap for this position
r, c = this_position
if c < cols - 1:
other_position = (r, c + 1)
if self._swap_optimizer_allows(this_position, other_position):
yield (this_position, other_position)
- #produce vertical swap for this position. not DRY but meh.
+ #produce vertical swap for this position. not DRY but maybe ok
if r < rows - 1:
other_position = (r + 1, c)
if self._swap_optimizer_allows(this_position, other_position):
@@ -669,16 +669,17 @@ class Board(object):
"""Generate an independent copy of self."""
return self.__class__(str(self))
- def positions(self):
- """Generate all positions as a tuple of (row,col)."""
- # if desired, use it[0].item() to reference the content of the cell
- it = numpy.nditer(self._array, flags=['multi_index', 'refs_ok'])
- while not it.finished:
- yield (it.multi_index[0], it.multi_index[1])
- it.iternext()
+ def positions_with_tile(self):
+ """Generate all positions and tiles as tuples of (row,col), tile.
+
+ docstring to make my IDE stop assuming tile is a standard dtype. sorry!
+ :rtype : tuple
+ """
+ for p, tile in numpy.ndenumerate(self._array):
+ yield p, tile
def is_empty(self):
- return all(self._array[p].is_blank() for p in self.positions())
+ return all(tile.is_blank() for p, tile in self.positions_with_tile())
# Delegated behavior to numpy.ndarray
def __getitem__(self, item):
diff --git a/tests/unit/test_base.py b/tests/unit/test_base.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_base.py
+++ b/tests/unit/test_base.py
@@ -197,7 +197,7 @@ class Test_Game(unittest.TestCase):
ends_of_turn = list(game.ends_of_turn(root=root))
last_state_board = ends_of_turn[0].parent.board
# confirm that the end state is full
- for p in last_state_board.positions():
+ for p, tile in last_state_board.positions_with_tile():
tile = last_state_board[p]
self.assertFalse(tile.is_blank(),
'Unexpectedly found a blank when the board'
@@ -351,8 +351,7 @@ class Test_Board(unittest.TestCase):
def test_random_start_board_produces_stable_full_board(self):
board = Board.random_start_board()
# Confirm it's full
- for p in board.positions():
- tile = board[p]
+ for p, tile in board.positions_with_tile():
self.assertFalse(tile.is_blank(),
'Unexpectedly found a blank when the start board'
' should be full:\n{}'.format(board))
@@ -782,8 +781,8 @@ class Test_Board(unittest.TestCase):
self.fail('Expected the starting board to be empty for testing but'
' got this board:\n{}'.format(board))
board._random_fill()
- for p in board.positions():
- self.assertFalse(board[p].is_blank())
+ for p, tile in board.positions_with_tile():
+ self.assertFalse(tile.is_blank())
def test_existing_tiles_remain_after_random_fill(self):
#prepare some positioned tiles
@@ -859,7 +858,7 @@ class Test_Board(unittest.TestCase):
for row in range(8):
for col in range(8):
positions_spec.append((row, col))
- positions = list(board.positions())
+ positions = [p for p, tile in board.positions_with_tile()]
self.assertItemsEqual(positions, positions_spec,
'Expected to get all possible coordinates in an'
'8x8 grid:\n{}\nbut got'
@@ -869,8 +868,8 @@ class Test_Board(unittest.TestCase):
blank = Tile('.')
board = Board()
# make sure all positions are blank
- for position in board.positions():
- board[position] = blank
+ for p, tile in board.positions_with_tile():
+ board[p] = blank
self.assertTrue(board.is_empty())
def test_str_returns_8x8_lines_with_EOL_showing_type_for_each_tile(self): | Reimplemented board position iterator (which is used tens of millions
of times with something hopefully more efficient. numpy.ndenumerate.
At least it's easier to read. | kobejohn_PQHelper | train |
1f0485faac63ce6eefb70f62bc5c7797d6f9b035 | diff --git a/mamba/example_group.py b/mamba/example_group.py
index <HASH>..<HASH> 100644
--- a/mamba/example_group.py
+++ b/mamba/example_group.py
@@ -33,12 +33,13 @@ class ExampleGroup(runnable.Runnable):
self._start(reporter)
try:
- self._bind_helpers_to(execution_context)
self.execute_hook('before_all', execution_context)
for example in self:
+ example_execution_context = copy.copy(execution_context)
+ self._bind_helpers_to(example_execution_context)
example.execute(reporter,
- copy.copy(execution_context),
+ example_execution_context,
tags=tags)
self.execute_hook('after_all', execution_context) | Bind helper methods after copying context | nestorsalceda_mamba | train |
406763b039e98b28928f38dcf651a0431d873fcf | diff --git a/core/common/src/main/java/alluxio/RuntimeConstants.java b/core/common/src/main/java/alluxio/RuntimeConstants.java
index <HASH>..<HASH> 100644
--- a/core/common/src/main/java/alluxio/RuntimeConstants.java
+++ b/core/common/src/main/java/alluxio/RuntimeConstants.java
@@ -27,4 +27,6 @@ public final class RuntimeConstants {
public static final String ALLUXIO_JAR =
"target/alluxio-" + VERSION + "-jar-with-dependencies.jar";
+ private RuntimeConstants() {
+ } // prevent instantiation
} | [ALLUXIO-<I>] Fix comments | Alluxio_alluxio | train |
f6af9ec3515ce7b1b0e6e8e945330d85faa7cd7b | diff --git a/private/model/api/docstring.go b/private/model/api/docstring.go
index <HASH>..<HASH> 100644
--- a/private/model/api/docstring.go
+++ b/private/model/api/docstring.go
@@ -50,8 +50,7 @@ func (d *apiDocumentation) setup(a *API) error {
for opName, doc := range d.Operations {
if _, ok := a.Operations[opName]; !ok {
- return fmt.Errorf("%s, doc op %q not found in API op set",
- a.name, opName)
+ continue
}
a.Operations[opName].Documentation = docstring(doc)
}
diff --git a/private/model/api/docstring_test.go b/private/model/api/docstring_test.go
index <HASH>..<HASH> 100644
--- a/private/model/api/docstring_test.go
+++ b/private/model/api/docstring_test.go
@@ -80,3 +80,52 @@ func TestDocstring(t *testing.T) {
})
}
}
+
+func TestApiDocumentation_missingShapes(t *testing.T) {
+ docs := apiDocumentation{
+ Service: "some service documentation",
+ Operations: map[string]string{
+ "OperationOne": "some operation documentation",
+ "OperationTwo": "some more operation documentation",
+ },
+ Shapes: map[string]shapeDocumentation{
+ "ShapeOne": {
+ Base: "some shape documentation",
+ },
+ "ShapeTwo": {
+ Base: "some more shape documentation",
+ Refs: map[string]string{
+ "ShapeOne$shapeTwo": "shape ref document",
+ },
+ },
+ },
+ }
+
+ api := API{
+ Operations: map[string]*Operation{
+ "OperationOne": {},
+ },
+ Shapes: map[string]*Shape{
+ "ShapeOne": {
+ Type: "structure",
+ MemberRefs: map[string]*ShapeRef{},
+ },
+ },
+ }
+
+ if err := docs.setup(&api); err != nil {
+ t.Fatalf("expect no error, got %v", err)
+ }
+
+ if _, ok := api.Operations["OperationTwo"]; ok {
+ t.Errorf("expect operation shape to not be added from document model")
+ }
+
+ if _, ok := api.Shapes["ShapeTwo"]; ok {
+ t.Errorf("expect shape to not be added from document model")
+ }
+
+ if _, ok := api.Shapes["ShapeOne"].MemberRefs["shapeTwo"]; ok {
+ t.Errorf("expect shape to not be added from document model")
+ }
+} | Allow docs-2.json to reference shapes not present in API without failure. (#<I>) | aws_aws-sdk-go | train |
4a79cc2cb9ee99f5f96b2348926174b8f967a65a | diff --git a/synapse/lib/cmdr.py b/synapse/lib/cmdr.py
index <HASH>..<HASH> 100644
--- a/synapse/lib/cmdr.py
+++ b/synapse/lib/cmdr.py
@@ -11,7 +11,14 @@ s_mixins.addSynMixin('cmdr','synapse.cores.common.Cortex','synapse.cmds.cortex.A
def getItemCmdr(item, outp=None, **opts):
+ '''
+ Construct and return a cmdr for the given item.
+ Example:
+
+ cmdr = getItemCmdr(foo)
+
+ '''
cmdr = s_cli.Cli(item,outp=outp)
refl = s_reflect.getItemInfo(item)
@@ -25,5 +32,13 @@ def getItemCmdr(item, outp=None, **opts):
return cmdr
def runItemCmdr(item, outp=None, **opts):
+ '''
+ Create a cmdr for the given item and run the cmd loop.
+
+ Example:
+
+ runItemCmdr(foo)
+
+ '''
cmdr = getItemCmdr(item, outp=outp, **opts)
cmdr.runCmdLoop() | added doc strings. :D | vertexproject_synapse | train |
6f8f9c3241929032c1c62fc9c279879f49a185ab | diff --git a/lib/chef/resource/windows_service.rb b/lib/chef/resource/windows_service.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/resource/windows_service.rb
+++ b/lib/chef/resource/windows_service.rb
@@ -41,10 +41,6 @@ class Chef
allowed_actions :configure_startup, :create, :delete, :configure
- property :service_name, String,
- description: "The name of the service.",
- name_property: true, identity: true
-
# The display name to be used by user interface programs to identify the
# service. This string has a maximum length of 256 characters.
property :display_name, String, regex: /^.{1,256}$/, | Remove service_name property that's already part of service
No need to define this twice | chef_chef | train |
39eede500fb6384be5ebb801d8327ff25e2797ec | diff --git a/phantomflow.js b/phantomflow.js
index <HASH>..<HASH> 100644
--- a/phantomflow.js
+++ b/phantomflow.js
@@ -19,6 +19,7 @@ var glob = require( "glob" );
var cp = require( 'child_process' );
var wrench = require( 'wrench' );
var async = require( 'async' );
+var log = console.log;
var optionDebug;
var grunt_fatal_original;
@@ -54,7 +55,6 @@ module.exports.init = function ( options ) {
var earlyExit = typeof options.earlyexit === 'undefined' ? false : options.earlyexit;
// Dashboard mode?
- var log = console.log;
var errorLog = console.error;
var updateTableAndStats = _.noop;
var dashboardDone = _.noop;
@@ -175,7 +175,7 @@ module.exports.init = function ( options ) {
}
/*
- Enable https://github.com/ariya/phantomjs/wiki/Troubleshooting#remote-debugging
+ Enable https://github.com/ariya/phantomjs/wiki/Troubleshootingremote-debugging
*/
if ( remoteDebug ) {
args.push( | move log reference to top (#<I>) | HuddleEng_PhantomFlow | train |
4bebaa670dde14edc6793c4576bfcd4ef71e0d47 | diff --git a/getFileFlysystem.php b/getFileFlysystem.php
index <HASH>..<HASH> 100644
--- a/getFileFlysystem.php
+++ b/getFileFlysystem.php
@@ -23,14 +23,25 @@
require_once __DIR__ . '/../vendor/autoload.php';
use oat\tao\model\websource\FlyTokenWebSource;
-use oat\tao\model\websource\TokenWebSource;
+use oat\oatbox\service\ServiceManager;
+use oat\oatbox\filesystem\FileSystemService;
+use oat\oatbox\service\SimpleConfigDriver;
$url = $_SERVER['REQUEST_URI'];
$rel = substr($url, strpos($url, FlyTokenWebSource::ENTRY_POINT) + strlen(FlyTokenWebSource::ENTRY_POINT));
$parts = explode('/', $rel, 2);
list ($webSourceId) = $parts;
$webSourceId = preg_replace('/[^a-zA-Z0-9]*/', '', $webSourceId);
-$configPath = $_SERVER['DOCUMENT_ROOT'] . DIRECTORY_SEPARATOR . 'config' . DIRECTORY_SEPARATOR . 'tao' . DIRECTORY_SEPARATOR . 'websource_' . $webSourceId . '.conf.php';
+
+$root = $_SERVER['DOCUMENT_ROOT'];
+$driver = new SimpleConfigDriver();
+$configService = $driver->connect('config', array(
+ 'dir' => $root .DIRECTORY_SEPARATOR. 'config' .DIRECTORY_SEPARATOR,
+ 'humanReadable' => true
+));
+$serviceManager = new ServiceManager($configService);
+
+$configPath = $root . DIRECTORY_SEPARATOR . 'config' . DIRECTORY_SEPARATOR . 'tao' . DIRECTORY_SEPARATOR . 'websource_' . $webSourceId . '.conf.php';
if (!file_exists($configPath)) {
header('HTTP/1.0 403 Forbidden');
@@ -45,13 +56,12 @@ if (!is_array($config) || !isset($config['className'])) {
$className = $config['className'];
$options = isset($config['options']) ? $config['options'] : array();
$source = new $className($options);
-if (!$source instanceof TokenWebSource) {
+if (!$source instanceof FlyTokenWebSource) {
header('HTTP/1.0 403 Forbidden');
die();
}
-$root = $_SERVER['DOCUMENT_ROOT'];
-$fsService = include $root . DIRECTORY_SEPARATOR . 'config' . DIRECTORY_SEPARATOR . 'generis' . DIRECTORY_SEPARATOR . 'filesystem.conf.php';
+$fsService = $serviceManager->get(FileSystemService::SERVICE_ID);
$fileSystem = $fsService->getFileSystem($source->getOption($source::OPTION_FILESYSTEM_ID));
$source->setFileSystem($fileSystem);
diff --git a/models/classes/websource/FlyTokenWebSource.php b/models/classes/websource/FlyTokenWebSource.php
index <HASH>..<HASH> 100644
--- a/models/classes/websource/FlyTokenWebSource.php
+++ b/models/classes/websource/FlyTokenWebSource.php
@@ -30,10 +30,6 @@ class FlyTokenWebSource extends TokenWebSource
{
const ENTRY_POINT = '/getFileFlysystem.php/';
- static $instances = [];
-
- protected $fsService;
-
/**
* @param $fileSystem
*/ | Ensure Filesystem has access to servicemanager | oat-sa_tao-core | train |
aabf3b7949bba202ce5639bc05941a366a58b4e2 | diff --git a/ui/Surface.js b/ui/Surface.js
index <HASH>..<HASH> 100644
--- a/ui/Surface.js
+++ b/ui/Surface.js
@@ -6,6 +6,7 @@ import { getDOMRangeFromEvent } from '../util/windowUtils'
import DefaultDOMElement from '../dom/DefaultDOMElement'
import Component from './Component'
import Clipboard from './Clipboard'
+import DOMSelection from './DOMSelection'
import UnsupportedNode from './UnsupportedNodeComponent'
const BROWSER_DELAY = platform.isFF ? 1 : 0
@@ -42,8 +43,7 @@ export default class Surface extends Component {
this.clipboard = new Clipboard(this.editorSession)
- this.domSelection = this.context.domSelection
- if (!this.domSelection) throw new Error('DOMSelection instance must be provided via context.')
+ this.domSelection = this.context.domSelection || new DOMSelection(this)
this._state = {
// true if the document session's selection is addressing this surface | Make Surface more robust w.r.t missing DOMSelection. | substance_substance | train |
8fc923f5219db7d4c9622476b1e97735c771c746 | diff --git a/restcomm/restcomm.rvd/src/main/webapp/controllers.js b/restcomm/restcomm.rvd/src/main/webapp/controllers.js
index <HASH>..<HASH> 100644
--- a/restcomm/restcomm.rvd/src/main/webapp/controllers.js
+++ b/restcomm/restcomm.rvd/src/main/webapp/controllers.js
@@ -36,7 +36,6 @@ App.controller('projectManagerCtrl', function ($scope, $http, $location, $routeP
console.log("project already exists");
$scope.notifications.unshift({message:"A Voice or USSD project with that name already exists" });
$timeout(function () {
- console.log("removing notification");
$scope.notifications.pop();
}, 5000);
} | RESTCOMM-<I> #comment removed forgotten debugging messages | RestComm_Restcomm-Connect | train |
fb8c0c814d1ca51664230ccf2e1866ceaaa375ee | diff --git a/dalesbred/src/main/java/org/dalesbred/dialect/Dialect.java b/dalesbred/src/main/java/org/dalesbred/dialect/Dialect.java
index <HASH>..<HASH> 100644
--- a/dalesbred/src/main/java/org/dalesbred/dialect/Dialect.java
+++ b/dalesbred/src/main/java/org/dalesbred/dialect/Dialect.java
@@ -39,6 +39,7 @@ import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
+import java.util.function.Function;
import java.util.logging.Logger;
/**
@@ -56,13 +57,13 @@ public abstract class Dialect {
}
@NotNull
- public Object createNativeDatabaseEnum(@NotNull Enum<?> value, @NotNull String typeName) {
- return value.name();
+ public Function<Enum<?>, ?> createNativeEnumToDatabaseConversion(@NotNull String typeName) {
+ throw new UnsupportedOperationException("native enums are not supported by " + getClass().getName());
}
@NotNull
- public <T extends Enum<T>> T parseNativeDatabaseEnum(@NotNull Class<T> enumType, @NotNull Object value) {
- return Enum.valueOf(enumType, value.toString());
+ public <T extends Enum<T>> Function<Object, T> createNativeEnumFromDatabaseConversion(@NotNull Class<T> enumType) {
+ throw new UnsupportedOperationException("native enums are not supported by " + getClass().getName());
}
@Override
diff --git a/dalesbred/src/main/java/org/dalesbred/dialect/PostgreSQLDialect.java b/dalesbred/src/main/java/org/dalesbred/dialect/PostgreSQLDialect.java
index <HASH>..<HASH> 100644
--- a/dalesbred/src/main/java/org/dalesbred/dialect/PostgreSQLDialect.java
+++ b/dalesbred/src/main/java/org/dalesbred/dialect/PostgreSQLDialect.java
@@ -29,6 +29,7 @@ import org.postgresql.util.PGobject;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Date;
+import java.util.function.Function;
/**
* Support for PostgreSQL.
@@ -37,11 +38,22 @@ public class PostgreSQLDialect extends Dialect {
@NotNull
@Override
- public Object createNativeDatabaseEnum(@NotNull Enum<?> value, @NotNull String typeName) {
+ public Function<Enum<?>, ?> createNativeEnumToDatabaseConversion(@NotNull String typeName) {
+ return value -> createPgObject(value.name(), typeName);
+ }
+
+ @NotNull
+ @Override
+ public <T extends Enum<T>> Function<Object, T> createNativeEnumFromDatabaseConversion(@NotNull Class<T> enumType) {
+ return value -> Enum.valueOf(enumType, value.toString());
+ }
+
+ @NotNull
+ private Object createPgObject(@NotNull String value, @NotNull String typeName) {
try {
PGobject object = new PGobject();
object.setType(typeName);
- object.setValue(value.name());
+ object.setValue(value);
return object;
} catch (SQLException e) {
throw convertException(e);
diff --git a/dalesbred/src/main/java/org/dalesbred/internal/instantiation/DefaultTypeConversionRegistry.java b/dalesbred/src/main/java/org/dalesbred/internal/instantiation/DefaultTypeConversionRegistry.java
index <HASH>..<HASH> 100644
--- a/dalesbred/src/main/java/org/dalesbred/internal/instantiation/DefaultTypeConversionRegistry.java
+++ b/dalesbred/src/main/java/org/dalesbred/internal/instantiation/DefaultTypeConversionRegistry.java
@@ -65,8 +65,8 @@ final class DefaultTypeConversionRegistry implements TypeConversionRegistry {
@Override
public <T extends Enum<T>> void registerNativeEnumConversion(@NotNull Class<T> enumType, @NotNull String typeName) {
registerConversions(Object.class, enumType,
- value -> dialect.parseNativeDatabaseEnum(enumType, value),
- value -> dialect.createNativeDatabaseEnum(value, typeName));
+ dialect.createNativeEnumFromDatabaseConversion(enumType),
+ dialect.createNativeEnumToDatabaseConversion(typeName)::apply);
}
@NotNull | Fail earlier if Dialect does not support native enums
Throw an exception upon registration when registering native enum
conversions with a dialect that does not support them. | EvidentSolutions_dalesbred | train |
733582995a0cb13c8005ccdb24f77ff0591d9bf4 | diff --git a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/validation/IssueCodes.java b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/validation/IssueCodes.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/validation/IssueCodes.java
+++ b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/validation/IssueCodes.java
@@ -62,8 +62,8 @@ public class IssueCodes {
public static final String INVALID_NUMBER_FORMAT = ISSUE_CODE_PREFIX + "invalidNumberFormat";
public static final String FIELD_ALREADY_INITIALIZED = ISSUE_CODE_PREFIX + "field_already_initialized";
public static final String INVALID_TYPE = ISSUE_CODE_PREFIX + "invalid_type";
- public static final String FORBIDDEN_REFERENCE = "forbidden_reference";
- public static final String DISCOURAGED_REFERENCE = "discouraged_reference";
+ public static final String FORBIDDEN_REFERENCE = ISSUE_CODE_PREFIX + "forbidden_reference";
+ public static final String DISCOURAGED_REFERENCE = ISSUE_CODE_PREFIX + "discouraged_reference";
// list is not necessarily complete | [Validation] use common prefix for issueCodes | eclipse_xtext-extras | train |
7ad0c0b9acd4aee7eb0b4787730b3e2058579d26 | diff --git a/livestyle-client.js b/livestyle-client.js
index <HASH>..<HASH> 100644
--- a/livestyle-client.js
+++ b/livestyle-client.js
@@ -191,7 +191,7 @@
toWatch = [],
url,
i;
- cssIncludes.unshift({ href: location.pathname });
+ //cssIncludes.unshift({ href: location.pathname }); // See https://github.com/One-com/livestyle/issues/11
for (i = 0; i < cssIncludes.length; i += 1) {
url = removeCacheBuster(cssIncludes[i].href);
@@ -277,7 +277,7 @@
setTimeout(startPolling, pollTimeout);
}
};
- cssIncludes.unshift({ href: location.pathname });
+ //cssIncludes.unshift({ href: location.pathname }); // See https://github.com/One-com/livestyle/issues/11
proceed();
}; | Disabled the client subscribing to the current page url, which introduced a lot of errors. See Issue #<I> to reimplement it. | One-com_livestyle | train |
64e240a4f553187d3e1eda1f0755dc11333fc937 | diff --git a/src/Friday/Http/Dispatcher.php b/src/Friday/Http/Dispatcher.php
index <HASH>..<HASH> 100644
--- a/src/Friday/Http/Dispatcher.php
+++ b/src/Friday/Http/Dispatcher.php
@@ -41,11 +41,11 @@ class Dispatcher
if(isset($route[3]) && is_string($route[3])) {
$view = $route[3];
$data = $route[4];
- return ['view', $view, $data];
+ return ['view' => [$view, $data]];
}
elseif(isset($route[2]) && is_string($route[2])) {
list($controller, $method) = explode('@', $route[2]);
- return ['controller_method', $controller, $method];
+ return ['controller' => [$controller, $method]];
}
elseif(isset($route[2]) && ($route[2] instanceof Closure || get_class($route[2]) === "Closure")) {
$function = $route[2];
@@ -56,14 +56,14 @@ class Dispatcher
}
ob_start();
if($request->getParam('Closure') !== false && is_array($request->getParam('Closure')) ) {
- call_user_func_array($route[2], $request->getParam('Closure'));
+ $return = call_user_func_array($route[2], $request->getParam('Closure'));
}
else {
//$function();
throw new \Exception('Invaliding parameter passed in route\'s callable function: '.$route[1]);
}
$output = ob_get_clean();
- return ['output', $output];
+ return ['output' => [$output, $return]];
}
else {
throw new \Exception('Invaliding route is registered for: '.$route[1]); | dispatch() improved
dispatch method now return output+return
and controller ,view as assoc array | ironphp_ironphp | train |
c8a4e9327d1e06926ea24575dc3c9cd932004775 | diff --git a/sinchsms.py b/sinchsms.py
index <HASH>..<HASH> 100644
--- a/sinchsms.py
+++ b/sinchsms.py
@@ -3,7 +3,13 @@
sinchsms - a module to send sms using the Sinch REST apis, www.sinch.com
"""
-import requests
+try:
+ import urllib.request as urllib2
+except ImportError:
+ import urllib2
+
+import json
+import base64
class SinchSMS(object):
@@ -18,7 +24,8 @@ class SinchSMS(object):
Visit your dashboard at sinch.com to locate your application key and secret.
These can be found under apps/credentials section.
"""
- self._auth = ('application:' + app_key, app_secret)
+ b64bytes = base64.b64encode(('application:%s:%s' % (app_key, app_secret)).encode())
+ self._auth = 'basic %s' % b64bytes.decode('ascii')
def _request(self, url, values=None):
""" Send a request and read response.
@@ -26,12 +33,22 @@ class SinchSMS(object):
Sends a get request if values are None, post request otherwise.
"""
if values:
- response = requests.post(url, json=values, auth=self._auth)
+ jsonData = json.dumps(values)
+ request = urllib2.Request(url, jsonData.encode())
+ request.add_header('content-type', 'application/json')
+ request.add_header('authorization', self._auth)
+ connection = urllib2.urlopen(request)
+ response = connection.read()
+ connection.close()
else:
- response = requests.get(url, auth=self._auth)
+ request = urllib2.Request(url)
+ request.add_header('authorization', self._auth)
+ connection = urllib2.urlopen(request)
+ response = connection.read()
+ connection.close()
try:
- result = response.json()
+ result = json.loads(response.decode())
except ValueError as exception:
return {'errorCode': 1, 'message': str(exception)} | Replace requests dependency with urllib2. | sinch_python-sinch-sms | train |
f30d365a54b90bb74e19850153f67a4b325a87db | diff --git a/info.py b/info.py
index <HASH>..<HASH> 100644
--- a/info.py
+++ b/info.py
@@ -24,7 +24,7 @@ def release():
:returns: Current release string
:current: |release|
'''
- return version() + 'a2'
+ return version() + 'a3'
def url():
'''
diff --git a/photon/util/locations.py b/photon/util/locations.py
index <HASH>..<HASH> 100644
--- a/photon/util/locations.py
+++ b/photon/util/locations.py
@@ -122,11 +122,11 @@ def change_location(src, tgt, move=False, verbose=True):
from .system import shell_notify
if _path.exists(src):
- if _path.isfile(src):
- _copy2(src, search_location(tgt, create_in=_path.dirname(tgt), verbose=verbose))
- else:
- for l in _listdir(src): change_location(_path.abspath(_path.join(src, l)), _path.abspath(_path.join(tgt, l)))
-
+ if tgt:
+ if _path.isfile(src):
+ _copy2(src, search_location(tgt, create_in=_path.dirname(tgt), verbose=verbose))
+ else:
+ for l in _listdir(src): change_location(_path.abspath(_path.join(src, l)), _path.abspath(_path.join(tgt, l)))
if move: _rmtree(src) if _path.isdir(src) else _remove(src)
if verbose: shell_notify(
'%s location' %('deleted' if not tgt and move else 'moved' if move else 'copied'), | Enable the delete functionality as advertised in the documentation | spookey_photon | train |
d3411306d970d449d70ae55980d904ae1cddc47f | diff --git a/addons/fabric8-camel-maven-plugin/src/main/java/io/fabric8/forge/camel/maven/EndpointMojo.java b/addons/fabric8-camel-maven-plugin/src/main/java/io/fabric8/forge/camel/maven/EndpointMojo.java
index <HASH>..<HASH> 100644
--- a/addons/fabric8-camel-maven-plugin/src/main/java/io/fabric8/forge/camel/maven/EndpointMojo.java
+++ b/addons/fabric8-camel-maven-plugin/src/main/java/io/fabric8/forge/camel/maven/EndpointMojo.java
@@ -143,22 +143,31 @@ public class EndpointMojo extends AbstractMojo {
catalog.setSuggestionStrategy(new LuceneSuggestionStrategy());
// enable loading other catalog versions dynamically
catalog.setVersionManager(new MavenVersionManager());
+ // enable caching
+ catalog.enableCache();
if (downloadVersion) {
- String camelVersion = findCamelVersion(project);
- if (camelVersion != null && !camelVersion.equals(catalog.getCatalogVersion())) {
+ String catalogVersion = catalog.getCatalogVersion();
+ String version = findCamelVersion(project);
+ if (version != null && !version.equals(catalogVersion)) {
// the project uses a different Camel version so attempt to load it
- getLog().info("Downloading Camel version: " + camelVersion);
- boolean loaded = catalog.loadVersion(camelVersion);
+ getLog().info("Downloading Camel version: " + version);
+ boolean loaded = catalog.loadVersion(version);
if (!loaded) {
- getLog().warn("Error downloading Camel version: " + camelVersion);
+ getLog().warn("Error downloading Camel version: " + version);
}
}
}
+ // if using the same version as the fabric8-camel-maven-plugin we must still load it
+ if (catalog.getLoadedVersion() == null) {
+ catalog.loadVersion(catalog.getCatalogVersion());
+ }
+
if (catalog.getLoadedVersion() != null) {
getLog().info("Using Camel version: " + catalog.getLoadedVersion());
} else {
+ // force load version from the fabric8-camel-maven-plugin
getLog().info("Using Camel version: " + catalog.getCatalogVersion());
} | Fixes #<I> to make the camel maven plugin work again to support different Camel versions. | fabric8io_fabric8-forge | train |
5d0d3dc9d6d434bc3c278e71f887b2a89e66a9af | diff --git a/netpyne/simFuncs.py b/netpyne/simFuncs.py
index <HASH>..<HASH> 100644
--- a/netpyne/simFuncs.py
+++ b/netpyne/simFuncs.py
@@ -1935,11 +1935,11 @@ if neuromlExists:
to_start = 0.0 if ind==0 else lens[ind-1]
to_end = lens[ind]
tot = lens[-1]
- print to_start, to_end, tot, ind, seg, seg_id
+ #print to_start, to_end, tot, ind, seg, seg_id
fract_sec = (to_start + fract_along *(to_end-to_start))/(tot)
ind+=1
- print("============= Converted %s:%s on pop %s to %s on %s"%(seg_id, fract_along, population_id, nrn_sec, fract_sec))
+ #print("============= Converted %s:%s on pop %s to %s on %s"%(seg_id, fract_along, population_id, nrn_sec, fract_sec))
return nrn_sec, fract_sec
#
@@ -2070,7 +2070,7 @@ if neuromlExists:
nmlHandler.finalise()
print('Finished import: %s'%nmlHandler.gids)
- print('Connections: %s'%nmlHandler.connections)
+ #print('Connections: %s'%nmlHandler.connections)
sim.initialize(netParams, simConfig) # create network object and set cfg and net params | Slightly less verbose | Neurosim-lab_netpyne | train |
fe4e3df434d85cb9189f07fd39c8e3cb29b89713 | diff --git a/simpleauth2/adapters/gaewebapp2/__init__.py b/simpleauth2/adapters/gaewebapp2/__init__.py
index <HASH>..<HASH> 100644
--- a/simpleauth2/adapters/gaewebapp2/__init__.py
+++ b/simpleauth2/adapters/gaewebapp2/__init__.py
@@ -5,7 +5,6 @@ from urllib import urlencode
from webapp2_extras import sessions
import datetime
import logging
-import openid
import urlparse
@@ -108,11 +107,10 @@ class GAEWebapp2Adapter(adapters.WebObBaseAdapter):
request = None
response = None
session = None
- openid_store = None
config = None
def __init__(self, handler, config=None, session=None, session_secret=None,
- session_key='simpleauth2', openid_store=openid.NDBOpenIDStore):
+ session_key='simpleauth2', openid_store=None):
self.request = handler.request
self.response = handler.response
@@ -120,7 +118,7 @@ class GAEWebapp2Adapter(adapters.WebObBaseAdapter):
self._handler = handler
self._session_secret = session_secret
self._session_key = session_key
- self.openid_store = openid_store
+ self._openid_store = openid_store
if config:
self.config = config
@@ -152,6 +150,16 @@ class GAEWebapp2Adapter(adapters.WebObBaseAdapter):
return adapters.RPC(rpc, response_parser or self.response_parser, content_parser)
+ @property
+ def openid_store(self):
+ if self._openid_store:
+ return self._openid_store
+ else:
+ # import only if needed to avoid python-openid dependency if not neccessary
+ from openid import NDBOpenIDStore
+ return NDBOpenIDStore
+
+
@staticmethod
def response_parser(response, content_parser):
return Response(status_code=response.status_code, | OpenIDStore now gets imported on demand. | authomatic_authomatic | train |
199d70ca95815c1f1913198b6119876324f8106c | diff --git a/presto-main/src/main/java/com/facebook/presto/sql/analyzer/MaterializedViewQueryOptimizer.java b/presto-main/src/main/java/com/facebook/presto/sql/analyzer/MaterializedViewQueryOptimizer.java
index <HASH>..<HASH> 100644
--- a/presto-main/src/main/java/com/facebook/presto/sql/analyzer/MaterializedViewQueryOptimizer.java
+++ b/presto-main/src/main/java/com/facebook/presto/sql/analyzer/MaterializedViewQueryOptimizer.java
@@ -133,7 +133,7 @@ public class MaterializedViewQueryOptimizer
return process(node);
}
catch (Exception ex) {
- logger.warn(ex.getMessage());
+ logger.warn("Failed to rewrite query with materialized view with following exception: %s", ex.getMessage());
return node;
}
}
diff --git a/presto-main/src/main/java/com/facebook/presto/sql/rewrite/MaterializedViewOptimizationRewriteUtils.java b/presto-main/src/main/java/com/facebook/presto/sql/rewrite/MaterializedViewOptimizationRewriteUtils.java
index <HASH>..<HASH> 100644
--- a/presto-main/src/main/java/com/facebook/presto/sql/rewrite/MaterializedViewOptimizationRewriteUtils.java
+++ b/presto-main/src/main/java/com/facebook/presto/sql/rewrite/MaterializedViewOptimizationRewriteUtils.java
@@ -13,6 +13,7 @@
*/
package com.facebook.presto.sql.rewrite;
+import com.facebook.airlift.log.Logger;
import com.facebook.presto.Session;
import com.facebook.presto.common.QualifiedObjectName;
import com.facebook.presto.metadata.Metadata;
@@ -29,10 +30,13 @@ import com.facebook.presto.sql.tree.Table;
import java.util.Set;
+import static com.facebook.presto.SystemSessionProperties.isMaterializedViewDataConsistencyEnabled;
import static com.facebook.presto.common.RuntimeMetricName.OPTIMIZED_WITH_MATERIALIZED_VIEW;
public class MaterializedViewOptimizationRewriteUtils
{
+ private static final Logger log = Logger.get(MaterializedViewOptimizationRewriteUtils.class);
+
private MaterializedViewOptimizationRewriteUtils() {}
public static Query optimizeQueryUsingMaterializedView(
@@ -50,8 +54,15 @@ public class MaterializedViewOptimizationRewriteUtils
for (QualifiedObjectName candidate : materializedViewCandidates) {
Query optimizedQuery = getQueryWithMaterializedViewOptimization(metadata, session, sqlParser, accessControl, node, candidate);
if (node != optimizedQuery) {
- MaterializedViewStatus materializedViewStatus = metadata.getMaterializedViewStatus(session, candidate);
- if (materializedViewStatus.isFullyMaterialized() || materializedViewStatus.isPartiallyMaterialized()) {
+ if (isMaterializedViewDataConsistencyEnabled(session)) {
+ //TODO: We should be able to leverage this information in the StatementAnalyzer as well.
+ MaterializedViewStatus materializedViewStatus = metadata.getMaterializedViewStatus(session, candidate);
+ if (materializedViewStatus.isFullyMaterialized() || materializedViewStatus.isPartiallyMaterialized()) {
+ session.getRuntimeStats().addMetricValue(OPTIMIZED_WITH_MATERIALIZED_VIEW, 1);
+ return optimizedQuery;
+ }
+ }
+ else {
session.getRuntimeStats().addMetricValue(OPTIMIZED_WITH_MATERIALIZED_VIEW, 1);
return optimizedQuery;
}
@@ -68,10 +79,16 @@ public class MaterializedViewOptimizationRewriteUtils
Query statement,
QualifiedObjectName materializedViewQualifiedObjectName)
{
- ConnectorMaterializedViewDefinition materializedView = metadata.getMaterializedView(session, materializedViewQualifiedObjectName).get();
- Table materializedViewTable = new Table(QualifiedName.of(materializedView.getTable()));
+ try {
+ ConnectorMaterializedViewDefinition materializedView = metadata.getMaterializedView(session, materializedViewQualifiedObjectName).get();
+ Table materializedViewTable = new Table(QualifiedName.of(materializedView.getTable()));
- Query materializedViewDefinition = (Query) sqlParser.createStatement(materializedView.getOriginalSql());
- return (Query) new MaterializedViewQueryOptimizer(metadata, session, sqlParser, accessControl, new RowExpressionDomainTranslator(metadata), materializedViewTable, materializedViewDefinition).rewrite(statement);
+ Query materializedViewDefinition = (Query) sqlParser.createStatement(materializedView.getOriginalSql());
+ return (Query) new MaterializedViewQueryOptimizer(metadata, session, sqlParser, accessControl, new RowExpressionDomainTranslator(metadata), materializedViewTable, materializedViewDefinition).rewrite(statement);
+ }
+ catch (RuntimeException ex) {
+ log.warn("Failed to get materialized view for %s, with exception: %s", materializedViewQualifiedObjectName, ex.getMessage());
+ return statement;
+ }
}
} | Avoid status check if consistency is not enabled
Materialized view optimizer should optimize query irrespective
of materialized view status, if consistency check is disabled. | prestodb_presto | train |
8e569236d16f1a8995e5b7b5fc2e719de48dc3e7 | diff --git a/lib/poise/helpers/inversion.rb b/lib/poise/helpers/inversion.rb
index <HASH>..<HASH> 100644
--- a/lib/poise/helpers/inversion.rb
+++ b/lib/poise/helpers/inversion.rb
@@ -78,7 +78,11 @@ module Poise
# end
def provider(val=nil)
if val && !val.is_a?(Class)
- provider_class = Poise::Helpers::Inversion.provider_for(resource_name, node, val)
+ resource_names = [resource_name]
+ # If subclass_providers! might be in play, check for those names too.
+ resource_names.concat(self.class.subclass_resource_equivalents) if self.class.respond_to?(:subclass_resource_equivalents)
+ # Silly ruby tricks to find the first provider that exists and no more.
+ provider_class = resource_names.lazy.map {|name| Poise::Helpers::Inversion.provider_for(name, node, val) }.select {|x| x }.first
Chef::Log.debug("[#{self}] Checking for an inversion provider for #{val}: #{provider_class && provider_class.name}")
val = provider_class if provider_class
end | Make sure setting an inversion provider manually groks resource equivs. | poise_poise | train |
175f9ef6792ad29f6d309b2033b4ff41700915bd | diff --git a/libraries/joomla/database/database.php b/libraries/joomla/database/database.php
index <HASH>..<HASH> 100644
--- a/libraries/joomla/database/database.php
+++ b/libraries/joomla/database/database.php
@@ -9,7 +9,7 @@
defined('JPATH_PLATFORM') or die;
-jimport('joomla.database.databaseexception');
+JLoader::register('DatabaseException', JPATH_PLATFORM.'/joomla/database/databaseexception.php');
jimport('joomla.filesystem.folder');
/**
@@ -1175,6 +1175,18 @@ abstract class JDatabase
abstract public function getTableColumns($tables, $typeOnly = true);
/**
+ * Retrieves field information about the given tables.
+ *
+ * @param mixed $tables A table name or a list of table names.
+ *
+ * @return array An array of keys for the table(s).
+ *
+ * @since 11.1
+ * @throws DatabaseException
+ */
+ abstract public function getTableKeys($tables);
+
+ /**
* Method to get an array of all tables in the database.
*
* @return array An array of all the tables in the database.
@@ -1523,7 +1535,16 @@ abstract class JDatabase
// Deprecation warning.
JLog::add('JDatabase::getTableFields() is deprecated. Use JDatabase::getTableColumns().', JLog::WARNING, 'deprecated');
- return $this->getTableColumns($tables, $typeOnly);
+ $results = array();
+
+ settype($tables, 'array');
+
+ foreach ($tables as $table)
+ {
+ $results[$table] = $this->getTableColumns($table, $typeOnly);
+ }
+
+ return $results;
}
/** | Fix bug in the loader for the database exception class. | joomla_joomla-framework | train |
c5fefd3d10627363f6a8663ac14e6cff799827c4 | diff --git a/main_test.go b/main_test.go
index <HASH>..<HASH> 100644
--- a/main_test.go
+++ b/main_test.go
@@ -299,6 +299,8 @@ func (t *DBTest) TestTypeHstore(c *C) {
}
func (t *DBTest) TestTypeStmtHstore(c *C) {
+ t.db.Exec("CREATE EXTENSION hstore")
+
stmt, err := t.db.Prepare("SELECT $1::hstore")
c.Assert(err, IsNil)
@@ -307,6 +309,8 @@ func (t *DBTest) TestTypeStmtHstore(c *C) {
_, err = stmt.QueryOne(pg.LoadInto(&dst), src)
c.Assert(err, IsNil)
c.Assert(dst, DeepEquals, src)
+
+ t.db.Exec("DROP EXTENSION hstore")
}
func (t *DBTest) TestQueryInts(c *C) { | tests: Create/drop hstore extension. | go-pg_pg | train |
b397e18ad2b7d0848ead58dfcd32b8a64f7d322c | diff --git a/app/assets/javascripts/fae/form/_form_manager.js b/app/assets/javascripts/fae/form/_form_manager.js
index <HASH>..<HASH> 100644
--- a/app/assets/javascripts/fae/form/_form_manager.js
+++ b/app/assets/javascripts/fae/form/_form_manager.js
@@ -98,7 +98,7 @@ Fae.form.formManager = {
var $labelsCheckbox = $labelTextEl.find('input');
var newLabelText = '';
- if ($container.hasClass('required') || $label.hasClass('required')) {
+ if ($container.hasClass('required') || $label.hasClass('required') || $labelInner.text().indexOf('*') !== -1) {
newLabelText = _this.requiredEl;
} | detect fae page required fields | wearefine_fae | train |
27b75af201f9fff2e8c020f8cfdc780f201e9111 | diff --git a/lib/neo4j/active_node/query/query_proxy.rb b/lib/neo4j/active_node/query/query_proxy.rb
index <HASH>..<HASH> 100644
--- a/lib/neo4j/active_node/query/query_proxy.rb
+++ b/lib/neo4j/active_node/query/query_proxy.rb
@@ -208,9 +208,21 @@ module Neo4j
end
def _create_relationship(other_node_or_nodes, properties)
- _session.query(context: @options[:context])
- .match(:start, :end).match_nodes(start: @start_object, end: other_node_or_nodes)
- .send(association.create_method, "start#{_association_arrow(properties, true)}end").exec
+ if association.relationship_class
+ _create_relationship_with_rel_class(other_node_or_nodes, properties)
+ else
+ _session.query(context: @options[:context])
+ .match(:start, :end).match_nodes(start: @start_object, end: other_node_or_nodes)
+ .send(association.create_method, "start#{_association_arrow(properties, true)}end").exec
+ end
+ end
+
+ def _create_relationship_with_rel_class(other_node_or_nodes, properties)
+ Array(other_node_or_nodes).each do |other_node|
+ node_props = (association.direction == :in) ? {from_node: other_node, to_node: @start_object} : {from_node: @start_object, to_node: other_node}
+
+ association.relationship_class.create(properties.except(:_classname).merge(node_props))
+ end
end
def read_attribute_for_serialization(*args)
@@ -298,21 +310,20 @@ module Neo4j
end
def _association_chain_var
+ fail 'Crazy error' if !(start_object || @query_proxy)
+
if start_object
:"#{start_object.class.name.gsub('::', '_').downcase}#{start_object.neo_id}"
- elsif @query_proxy
- @query_proxy.node_var || :"node#{_chain_level}"
else
- fail 'Crazy error' # TODO: Better error
+ @query_proxy.node_var || :"node#{_chain_level}"
end
end
def _association_query_start(var)
- if object = (start_object || @query_proxy)
- object.query_as(var)
- else
- fail 'Crazy error' # TODO: Better error
- end
+ # TODO: Better error
+ fail 'Crazy error' if !(object = (start_object || @query_proxy))
+
+ object.query_as(var)
end
def _rel_chain_var
@@ -326,11 +337,8 @@ module Neo4j
def instance_vars_from_options!(options)
@node_var, @session, @source_object, @starting_query, @optional,
@start_object, @query_proxy, @chain_level, @association_labels,
- @rel_length = options.values_at(:node, :session, :source_object,
- :starting_query, :optional,
- :start_object, :query_proxy,
- :chain_level, :association_labels,
- :rel_length)
+ @rel_length = options.values_at(:node, :session, :source_object, :starting_query, :optional,
+ :start_object, :query_proxy, :chain_level, :association_labels, :rel_length)
end
def build_deeper_query_proxy(method, args)
diff --git a/spec/e2e/active_rel_spec.rb b/spec/e2e/active_rel_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/e2e/active_rel_spec.rb
+++ b/spec/e2e/active_rel_spec.rb
@@ -25,6 +25,9 @@ describe 'ActiveRel' do
property :score, type: Integer
property :links
+ property :default, default: 'default_value'
+ property :should_be_nil
+ validates :should_be_nil, inclusion: {in: [nil]}
serialize :links
end
end
@@ -213,6 +216,18 @@ describe 'ActiveRel' do
t1.string_others << f1
expect(t1.string_others.count).to eq 2
end
+
+ it 'should use the ActiveRel class' do
+ result = Neo4j::Session.current.query("MATCH (start)-[r]-() WHERE start.uuid = {start_uuid} RETURN r.default AS value", start_uuid: f1.uuid).to_a
+ expect(result[0].value).to eq('default_value')
+ end
+
+ it 'should validate when creating' do
+ f = FromClass.create
+ f.others.create(t1, should_be_nil: 'not_nil')
+ result = Neo4j::Session.current.query("MATCH (start)-[r]-() WHERE start.uuid = {start_uuid} RETURN r.default AS value", start_uuid: f.uuid).to_a
+ expect(result).to be_empty
+ end
end
context 'with rel created from activerel' do | Make relationship creations with #<< and #create on associations use the rel_class if possible. | neo4jrb_neo4j | train |
fd0fc7200ce1fcd4405122985db36f1fdcec3e5e | diff --git a/src/com/opencms/workplace/CmsXmlLanguageFile.java b/src/com/opencms/workplace/CmsXmlLanguageFile.java
index <HASH>..<HASH> 100755
--- a/src/com/opencms/workplace/CmsXmlLanguageFile.java
+++ b/src/com/opencms/workplace/CmsXmlLanguageFile.java
@@ -1,7 +1,7 @@
/*
* File : $Source: /alkacon/cvs/opencms/src/com/opencms/workplace/Attic/CmsXmlLanguageFile.java,v $
-* Date : $Date: 2003/02/22 11:17:31 $
-* Version: $Revision: 1.43 $
+* Date : $Date: 2003/02/28 13:25:43 $
+* Version: $Revision: 1.44 $
*
* This library is part of OpenCms -
* the Open Source Content Mananagement System
@@ -35,7 +35,7 @@ package com.opencms.workplace;
* been changed to use the standard <code>java.util.ResouceBundle</code> technology.<p>
*
* @author Alexander Kandzior ([email protected])
- * @version $Revision: 1.43 $ $Date: 2003/02/22 11:17:31 $
+ * @version $Revision: 1.44 $ $Date: 2003/02/28 13:25:43 $
*/
import com.opencms.boot.I_CmsLogChannels;
import com.opencms.core.A_OpenCms;
@@ -248,6 +248,12 @@ public class CmsXmlLanguageFile {
}
}
+ if (keyName.startsWith("help.")) {
+ // online help might not have been installed or missing help key, return default page
+ return "index.html";
+ }
+
+ // key was not found
if (DEBUG > 1) System.err.println("CmsXmlLanguageFile.getLanguageValue(): '" + keyName + "' not found at all (this is bad)");
if (I_CmsLogChannels.C_LOGGING && A_OpenCms.isLogging(I_CmsLogChannels.C_OPENCMS_INFO)) {
A_OpenCms.log(I_CmsLogChannels.C_OPENCMS_INFO, this.getClass().getName() + | Added ignore for .help missing keys | alkacon_opencms-core | train |
5bcc47bb7e6fb9eaf2849cc66c379fcc2baed487 | diff --git a/model/portableElement/action/RegisterPortableElement.php b/model/portableElement/action/RegisterPortableElement.php
index <HASH>..<HASH> 100644
--- a/model/portableElement/action/RegisterPortableElement.php
+++ b/model/portableElement/action/RegisterPortableElement.php
@@ -52,6 +52,9 @@ abstract class RegisterPortableElement extends common_ext_action_InstallAction
try {
$model = $service->getValidPortableElementFromDirectorySource($sourceDirectory);
+ if(empty($model)){
+ return Report::createFailure('no valid portable element found in directory "'.$sourceDirectory. '"');
+ }
if(!empty($params)){
$minRequiredVersion = $params[0];
// if the minimal required version number string "x.y.z" is given in the parameter, the new target version should be equal or higher than it
diff --git a/model/portableElement/validator/PortableElementAssetValidator.php b/model/portableElement/validator/PortableElementAssetValidator.php
index <HASH>..<HASH> 100644
--- a/model/portableElement/validator/PortableElementAssetValidator.php
+++ b/model/portableElement/validator/PortableElementAssetValidator.php
@@ -135,7 +135,8 @@ abstract class PortableElementAssetValidator implements Validatable
throw new PortableElementParserException('Unable to locate extracted zip file.');
}
- $filePath = $source . $file;
+ $filePath = rtrim($source, DIRECTORY_SEPARATOR) . DIRECTORY_SEPARATOR . str_replace('./', '', $file);
+
if (file_exists($filePath) || file_exists($filePath . '.js')) {
return true;
}
diff --git a/model/portableElement/validator/PortableElementModelValidator.php b/model/portableElement/validator/PortableElementModelValidator.php
index <HASH>..<HASH> 100644
--- a/model/portableElement/validator/PortableElementModelValidator.php
+++ b/model/portableElement/validator/PortableElementModelValidator.php
@@ -43,6 +43,8 @@ abstract class PortableElementModelValidator extends PortableElementAssetValidat
'libraries',
'stylesheets',
'mediaFiles',
+ 'modules',
+ 'config',
],
'creator' => [
'libraries', | default model validator to have modules and config as optional and fixed file path construction in validFile() | oat-sa_extension-tao-itemqti | train |
bf0175704c5e2d1995ad9242729ec3e9e79b5ae6 | diff --git a/lib/google_static_maps_helper/path.rb b/lib/google_static_maps_helper/path.rb
index <HASH>..<HASH> 100644
--- a/lib/google_static_maps_helper/path.rb
+++ b/lib/google_static_maps_helper/path.rb
@@ -51,9 +51,9 @@ module GoogleStaticMapsHelper
out += "#{path_params}|" unless path_params.empty?
- out += inject([]) do |point_params, point|
- point_params << point.to_url
- end.join('|')
+ out += encoded_url_points if encoding_points?
+ out += unencoded_url_points unless encoding_points?
+ out
end
@@ -127,5 +127,15 @@ module GoogleStaticMapsHelper
def add_points(points)
points.each {|point| self << point}
end
+
+ def encoded_url_points
+ ''
+ end
+
+ def unencoded_url_points
+ inject([]) do |point_params, point|
+ point_params << point.to_url
+ end.join('|')
+ end
end
end
diff --git a/spec/google_static_maps_helper_spec.rb b/spec/google_static_maps_helper_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/google_static_maps_helper_spec.rb
+++ b/spec/google_static_maps_helper_spec.rb
@@ -35,7 +35,7 @@ describe GoogleStaticMapsHelper do
point2 = {:lat => 3, :lng => 4}
out = GoogleStaticMapsHelper.url_for do
- path point, point2, :color => :red
+ path point, point2, :color => :red, :encode_points => false
end
out.should include('path=color:red|1,2|3,4')
diff --git a/spec/map_spec.rb b/spec/map_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/map_spec.rb
+++ b/spec/map_spec.rb
@@ -357,7 +357,7 @@ describe GoogleStaticMapsHelper::Map do
describe "paths" do
before do
- @path = GoogleStaticMapsHelper::Path.new
+ @path = GoogleStaticMapsHelper::Path.new :encode_points => false
@point = GoogleStaticMapsHelper::Location.new(:lat => 1, :lng => 2)
@point2 = GoogleStaticMapsHelper::Location.new(:lat => 3, :lng => 4)
@path << @point << @point2
diff --git a/spec/path_spec.rb b/spec/path_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/path_spec.rb
+++ b/spec/path_spec.rb
@@ -119,7 +119,7 @@ describe GoogleStaticMapsHelper::Path do
describe "url_params" do
before do
- @path = GoogleStaticMapsHelper::Path.new
+ @path = GoogleStaticMapsHelper::Path.new :encode_points => false
@point = GoogleStaticMapsHelper::Location.new(:lat => 1, :lng => 2)
@point2 = GoogleStaticMapsHelper::Location.new(:lat => 3, :lng => 4)
@path << @point << @point2 | Paths are now set to encode it's points as default.
Have also refactored an internal url_params method so it will delegate
to correct method which either encodes or not encode the points.. | thhermansen_google_static_maps_helper | train |
049f26c1438b9baeebb4f35d75416cab9617cf2e | diff --git a/src/com/google/javascript/jscomp/GlobalTypeInfo.java b/src/com/google/javascript/jscomp/GlobalTypeInfo.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/GlobalTypeInfo.java
+++ b/src/com/google/javascript/jscomp/GlobalTypeInfo.java
@@ -717,6 +717,14 @@ class GlobalTypeInfo implements CompilerPass {
private void visitFunctionEarly(Node fn) {
JSDocInfo fnDoc = NodeUtil.getFunctionJSDocInfo(fn);
Node nameNode = NodeUtil.getFunctionNameNode(fn);
+ boolean isRedeclaration;
+ if (nameNode == null || !nameNode.isQualifiedName()) {
+ isRedeclaration = false;
+ } else if (nameNode.isName()) {
+ isRedeclaration = currentScope.isDefinedLocally(nameNode.getString());
+ } else {
+ isRedeclaration = currentScope.isDefined(nameNode);
+ }
// Collect the names of the formals.
// If a formal is a placeholder for variable arity, eg,
// /** @param {...?} var_args */ function f(var_args) { ... }
@@ -768,9 +776,12 @@ class GlobalTypeInfo implements CompilerPass {
RawNominalType.makeUnrestrictedClass(qname, typeParameters);
}
nominaltypesByNode.put(fn, rawNominalType);
+ if (isRedeclaration) {
+ return;
+ }
if (nameNode.isName()
|| currentScope.isNamespace(nameNode.getFirstChild())) {
- if (fn.getParent().isAssign()) {
+ if (nameNode.isGetProp()) {
fn.getParent().getFirstChild()
.putBooleanProp(Node.ANALYZED_DURING_GTI, true);
}
@@ -1744,7 +1755,7 @@ class GlobalTypeInfo implements CompilerPass {
if (isNamespace(leftmost)) {
return getNamespace(leftmost).isDefined(qname.getAllButLeftmost());
}
- return parent == null ? null : parent.isDefined(qnameNode);
+ return parent == null ? false : parent.isDefined(qnameNode);
}
private boolean isNamespace(Node expr) {
diff --git a/test/com/google/javascript/jscomp/NewTypeInferenceTest.java b/test/com/google/javascript/jscomp/NewTypeInferenceTest.java
index <HASH>..<HASH> 100644
--- a/test/com/google/javascript/jscomp/NewTypeInferenceTest.java
+++ b/test/com/google/javascript/jscomp/NewTypeInferenceTest.java
@@ -5920,6 +5920,13 @@ public class NewTypeInferenceTest extends CompilerTypeTestCase {
"};");
}
+ public void testMockedOutConstructorDoesntCrash() {
+ typeCheck(
+ "/** @constructor */ function Foo(){}\n" +
+ "/** @constructor */ Foo = function(){};",
+ NewTypeInference.MISTYPED_ASSIGN_RHS);
+ }
+
public void testDebuggerStatementDoesntCrash() {
checkNoWarnings("debugger;");
} | [NEW TYPE INFERENCE] Don't crash on redeclared constructor
-------------
Created by MOE: <URL> | google_closure-compiler | train |
f35442ad1b1e78007e5daf1c5172f3c90dea70b9 | diff --git a/sanic/worker.py b/sanic/worker.py
index <HASH>..<HASH> 100644
--- a/sanic/worker.py
+++ b/sanic/worker.py
@@ -36,10 +36,12 @@ class GunicornWorker(base.Worker):
super().init_process()
def run(self):
- self._runner = asyncio.async(self._run(), loop=self.loop)
+ self._runner = asyncio.ensure_future(self._run(), loop=self.loop)
try:
self.loop.run_until_complete(self._runner)
+ trigger_events(self._server_settings.get('after_start', []), self.loop)
+ self.loop.run_until_complete(self._check_alive())
finally:
trigger_events(self._server_settings.get('before_stop', []), self.loop)
self.loop.close()
@@ -83,7 +85,7 @@ class GunicornWorker(base.Worker):
**self._server_settings
))
- trigger_events(self._server_settings.get('after_start', []), self.loop)
+ async def _check_alive(self):
# If our parent changed then we shut down.
pid = os.getpid()
try: | Fix RuntimeError: this event loop is already running | huge-success_sanic | train |
803ef9ada14e966229f1de92ce1e5e82f332e851 | diff --git a/simple_history/tests/tests/test_commands.py b/simple_history/tests/tests/test_commands.py
index <HASH>..<HASH> 100644
--- a/simple_history/tests/tests/test_commands.py
+++ b/simple_history/tests/tests/test_commands.py
@@ -108,8 +108,11 @@ class TestPopulateHistory(TestCase):
out.getvalue())
+@skipUnless(django.get_version() >= "1.7", "Requires 1.7 migrations")
class TestMigrate(TestCase):
- @skipUnless(django.get_version() >= "1.7", "Requires 1.7 migrations")
+ def test_migrate_command(self):
+ management.call_command('makemigration', 'migration_test_app', stdout=StringIO())
+
def test_migrate_command(self):
management.call_command('migrate', 'migration_test_app', fake=True, stdout=StringIO()) | Added 'makemigration' command to tests | treyhunner_django-simple-history | train |
e5cdb1b1ed16893e932bd81820a48dea6d1d55e9 | diff --git a/modules/orionode/lib/git/diff.js b/modules/orionode/lib/git/diff.js
index <HASH>..<HASH> 100644
--- a/modules/orionode/lib/git/diff.js
+++ b/modules/orionode/lib/git/diff.js
@@ -230,16 +230,11 @@ function processDiff(diff, filePath, paths, fileDir, req, res, includeDiff, incl
prefix = "-";
break;
case git.Diff.LINE.DEL_EOFNL:
- prefix = "\\ No newline at end of file";
- break;
case git.Diff.LINE.ADD_EOFNL:
- prefix = "\\ No newline at end of file";
+ prefix = "";
break;
}
- var content = line.content();
- var index = content.indexOf("\n");
- if (index !== -1) content = content.substring(0, index + 1);
- buffer.push(prefix + content);
+ buffer.push(prefix + line.content());
});
}));
}); | Bug <I> - Patch fails to apply due to whitespace | eclipse_orion.client | train |
b46c8ba6ba91a45e00885e78cc05953577c051a1 | diff --git a/src/Storage/DBFile.php b/src/Storage/DBFile.php
index <HASH>..<HASH> 100644
--- a/src/Storage/DBFile.php
+++ b/src/Storage/DBFile.php
@@ -278,6 +278,16 @@ class DBFile extends DBComposite implements AssetContainer, Thumbnail
}
/**
+ * Return URL for this image. Alias for getURL()
+ *
+ * @return string
+ */
+ public function Link()
+ {
+ return $this->getURL();
+ }
+
+ /**
* Get URL, but without resampling.
* Note that this will return the url even if the file does not exist.
* | ENHANCEMENT Add DBFile::Link() alias for DBFile::getURL() so that it matches File::Link()
Fixes <URL> | silverstripe_silverstripe-assets | train |
7e962fdc76fc07730e9a7c9cd4af53e56a2b78c0 | diff --git a/spyderlib/plugins/externalconsole.py b/spyderlib/plugins/externalconsole.py
index <HASH>..<HASH> 100644
--- a/spyderlib/plugins/externalconsole.py
+++ b/spyderlib/plugins/externalconsole.py
@@ -1263,7 +1263,7 @@ class ExternalConsole(SpyderPluginWidget):
self.start(fname=None, wdir=to_text_string(wdir), args='',
interact=True, debug=False, python=True)
- def start_ipykernel(self, client, wdir=None, create_client=True):
+ def start_ipykernel(self, client=None, wdir=None, create_client=True):
"""Start new IPython kernel"""
if create_client and not self.get_option('monitor/enabled'):
QMessageBox.warning(self, _('Open an IPython console'),
diff --git a/spyderlib/plugins/ipythonconsole.py b/spyderlib/plugins/ipythonconsole.py
index <HASH>..<HASH> 100644
--- a/spyderlib/plugins/ipythonconsole.py
+++ b/spyderlib/plugins/ipythonconsole.py
@@ -1210,19 +1210,23 @@ class IPythonConsole(SpyderPluginWidget):
self.extconsole.close_console(index=idx, from_ipyclient=True)
# Set attributes for the new kernel
- self.extconsole.set_ipykernel_attrs(connection_file, kernel_widget)
+ match = re.match('^kernel-(\d+).json', connection_file)
+ kernel_id = match.groups()[0]
+ self.extconsole.set_ipykernel_attrs(connection_file, kernel_widget,
+ kernel_id)
# Connect client to new kernel
km, kc = self.create_kernel_manager_and_client(connection_file)
client.ipywidget.kernel_manager = km
client.ipywidget.kernel_client = kc
client.kernel_widget_id = id(kernel_widget)
+ client.connection_file = connection_file
client.get_control().setFocus()
# Rename client tab
- client_widget_id = id(client)
- self.rename_ipyclient_tab(connection_file, client_widget_id)
-
+ client.name = kernel_id + '/A'
+ self.rename_ipyclient_tab(client)
+
#----Drag and drop
#TODO: try and reimplement this block
# (this is still the original code block copied from externalconsole.py) | IPython Console: Fix restarting kernel action which was broken since revision a6cac<I> | spyder-ide_spyder | train |
be49b2b9421b9e48424d236814e4b3bb948a5110 | diff --git a/lib/Cake/Utility/ObjectCollection.php b/lib/Cake/Utility/ObjectCollection.php
index <HASH>..<HASH> 100644
--- a/lib/Cake/Utility/ObjectCollection.php
+++ b/lib/Cake/Utility/ObjectCollection.php
@@ -261,7 +261,7 @@ abstract class ObjectCollection {
}
/**
- * Gets the list of attached behaviors, or, whether the given behavior is attached
+ * Gets the list of attached objects, or, whether the given object is attached
*
* @param string $name Optional. The name of the behavior to check the status of. If omitted,
* returns an array of currently-attached behaviors | fixing up the docs for Collection::attached(), was showing behaviors but is for all collections | cakephp_cakephp | train |
841b0e4dc49c1734b5664017f14e1cd910722f39 | diff --git a/src/AnyContent/Client/Client.php b/src/AnyContent/Client/Client.php
index <HASH>..<HASH> 100755
--- a/src/AnyContent/Client/Client.php
+++ b/src/AnyContent/Client/Client.php
@@ -465,6 +465,45 @@ class Client
}
return (int)$result;
+ }
+
+
+ public function saveRecords($records, $workspace = 'default', $viewName = 'default', $language = 'default')
+ {
+ if (count($records)==0)
+ {
+ return false;
+ }
+ $record = $records[0];
+ $contentTypeName = $record->getContentType();
+
+ $url = 'content/' . $contentTypeName . '/records/' . $workspace . '/' . $viewName;
+
+ $json = json_encode($records);
+
+ $request = $this->guzzle->post($url, null, array( 'records' => $json, 'language' => $language ));
+
+ $result = false;
+ try
+ {
+ $result = $request->send()->json();
+
+ }
+ catch (\Exception $e)
+ {
+ throw new AnyContentClientException($e->getMessage(), AnyContentClientException::CLIENT_CONNECTION_ERROR);
+ }
+
+ // repository info has changed
+ $this->deleteRepositoryInfo($workspace, $language);
+
+ if ($result === false)
+ {
+ return false;
+ }
+
+
+ return $result;
}
@@ -620,6 +659,31 @@ class Client
return $result;
}
+ public function deleteRecords(ContentTypeDefinition $contentTypeDefinition, $workspace = 'default', $language = 'default')
+ {
+ $url = 'content/' . $contentTypeDefinition->getName() . '/records/' . $workspace;
+ $options = array( 'query' => array( 'language' => $language ) );
+ $request = $this->guzzle->delete($url, null, null, $options);
+
+ try
+ {
+ $result = $request->send()->json();
+ }
+ catch (\Exception $e)
+ {
+ $response = $request->getResponse();
+ if ($response && $response->getStatusCode() != 404)
+ {
+ throw new AnyContentClientException($e->getMessage(), AnyContentClientException::CLIENT_CONNECTION_ERROR);
+ }
+ }
+
+ // repository info has changed
+ $this->deleteRepositoryInfo($workspace, $language);
+
+ return $result;
+ }
+
public function getRecords(ContentTypeDefinition $contentTypeDefinition, $workspace = 'default', $viewName = 'default', $language = 'default', $order = 'id', $properties = array(), $limit = null, $page = 1, $filter = null, $subset = null, $timeshift = 0)
{
diff --git a/src/AnyContent/Client/Repository.php b/src/AnyContent/Client/Repository.php
index <HASH>..<HASH> 100755
--- a/src/AnyContent/Client/Repository.php
+++ b/src/AnyContent/Client/Repository.php
@@ -161,6 +161,10 @@ class Repository
return $this->client->saveRecord($record, $workspace, $viewName, $language);
}
+ public function saveRecords(Array $records, $workspace = 'default', $viewName = 'default', $language = 'default')
+ {
+ return $this->client->saveRecords($records, $workspace, $viewName, $language);
+ }
public function getRecords($workspace = 'default', $viewName = 'default', $language = 'default', $order = 'id', $properties = array(), $limit = null, $page = 1, $filter = null, $subset = null, $timeshift = 0)
{
@@ -218,6 +222,17 @@ class Repository
}
+ public function deleteRecords($workspace = 'default', $language = 'default')
+ {
+ if ($this->contentTypeDefinition)
+ {
+ return $this->client->deleteRecords($this->contentTypeDefinition, $workspace, $language);
+ }
+
+ return false;
+
+ }
+
public function getConfig($configTypeName = null)
{ | methods for saving/deleting more than one record | nhagemann_anycontent-client-php | train |
16eabdfd887986120be6f2042b83f4922c9b665c | diff --git a/code/model/Order.php b/code/model/Order.php
index <HASH>..<HASH> 100644
--- a/code/model/Order.php
+++ b/code/model/Order.php
@@ -18,7 +18,6 @@
*/
class Order extends DataObject implements PermissionProvider {
-
/**
* Add a string to the start of an order number (can be useful for
* exporting orders).
@@ -27,10 +26,38 @@ class Order extends DataObject implements PermissionProvider {
* @config
*/
private static $order_prefix = "";
+
+ /**
+ * List of possible statuses this order can have. Rather than using
+ * an enum, we load this as a config variable that can be changed
+ * more freely.
+ *
+ * @var array
+ * @config
+ */
+ private static $statuses = array(
+ "incomplete" => "Incomplete",
+ "failed" => "Failed",
+ "cancelled" => "Cancelled",
+ "pending" => "Pending",
+ "paid" => "Paid",
+ "processing" => "Processing",
+ "dispatched" => "Dispatched",
+ "refunded" => "Refunded"
+ );
+
+ /**
+ * Set the default status for a new order, if this is set to null or
+ * blank, it will not be used.
+ *
+ * @var string
+ * @config
+ */
+ private static $default_status = "incomplete";
private static $db = array(
'OrderNumber' => 'Varchar',
- 'Status' => "Enum('incomplete,failed,cancelled,pending,paid,processing,dispatched,refunded','incomplete')",
+ 'Status' => "Varchar",
// Billing Details
'Company' => 'Varchar',
@@ -111,8 +138,9 @@ class Order extends DataObject implements PermissionProvider {
public function getCMSFields() {
$fields = parent::getCMSFields();
- // Remove defailt item admin
+ // Remove default item admin
$fields->removeByName('Items');
+ $fields->removeByName('Status');
$fields->removeByName('EmailDispatchSent');
$fields->removeByName('PostageID');
$fields->removeByName('PaymentID');
@@ -142,8 +170,18 @@ class Order extends DataObject implements PermissionProvider {
$fields->addFieldToTab(
'Root.Main',
ReadonlyField::create('OrderNumber', "#"),
- 'Status'
+ 'Company'
);
+
+ $fields->addFieldToTab(
+ 'Root.Main',
+ $statusfield = DropdownField::create('Status', null, $this->config()->statuses),
+ 'Company'
+ );
+
+ // Set default status if we can
+ if($this->config()->default_status && !$this->Status)
+ $statusfield->setValue($this->config()->default_status);
$fields->addFieldToTab(
'Root.Main', | Add status codes to orders as a config variable | silvercommerce_shoppingcart | train |
c6055374e89ece0127453784d7f0140e1149ff64 | diff --git a/lib/datagrid/drivers/active_record.rb b/lib/datagrid/drivers/active_record.rb
index <HASH>..<HASH> 100644
--- a/lib/datagrid/drivers/active_record.rb
+++ b/lib/datagrid/drivers/active_record.rb
@@ -102,7 +102,11 @@ module Datagrid
end
def batch_each(scope, batch_size, &block)
- scope.find_each(batch_size ? { :batch_size => batch_size} : {}, &block)
+ if scope.limit_value
+ scope.find_each(batch_size ? { :batch_size => batch_size} : {}, &block)
+ else
+ raise Datagrid::ConfigurationError, "ActiveRecord can not use batches in combination with SQL limit"
+ end
end
def default_cache_key(asset)
diff --git a/spec/datagrid/drivers/active_record_spec.rb b/spec/datagrid/drivers/active_record_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/datagrid/drivers/active_record_spec.rb
+++ b/spec/datagrid/drivers/active_record_spec.rb
@@ -50,4 +50,17 @@ describe Datagrid::Drivers::ActiveRecord do
)
end
end
+
+ describe "batches usage" do
+
+ it "should be incompatible with scope with limit" do
+ report = test_report do
+ scope {Entry.limit(5)}
+ self.batch_size = 20
+ end
+ expect { report.assets }.to raise_error(Datagrid::ConfigurationError)
+ end
+ end
+
+
end | Raise when batches are used in combo with SQL limit | bogdan_datagrid | train |
111639e0b8e0edc2570a85b80bbec1c0749ebdb8 | diff --git a/tests/test_api.py b/tests/test_api.py
index <HASH>..<HASH> 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -436,6 +436,7 @@ class TestOSBS(object):
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'user': TEST_USER,
+ 'release': '1'
}
if branch:
kwargs['git_branch'] = branch
@@ -461,6 +462,7 @@ class TestOSBS(object):
'outer_template': ORCHESTRATOR_OUTER_TEMPLATE,
'customize_conf': ORCHESTRATOR_CUSTOMIZE_CONF,
'arrangement_version': DEFAULT_ARRANGEMENT_VERSION,
+ 'release': '1'
}
(flexmock(osbs) | Test that release value can be used in OSBS api | projectatomic_osbs-client | train |
6113ac1559d62c828dfbf08ef0f7f172c24cf7f5 | diff --git a/python/pyspark/rdd.py b/python/pyspark/rdd.py
index <HASH>..<HASH> 100644
--- a/python/pyspark/rdd.py
+++ b/python/pyspark/rdd.py
@@ -250,7 +250,7 @@ class RDD(object):
def map(self, f, preservesPartitioning=False):
"""
Return a new RDD by applying a function to each element of this RDD.
-
+
>>> rdd = sc.parallelize(["b", "a", "c"])
>>> sorted(rdd.map(lambda x: (x, 1)).collect())
[('a', 1), ('b', 1), ('c', 1)]
@@ -312,6 +312,15 @@ class RDD(object):
"use mapPartitionsWithIndex instead", DeprecationWarning, stacklevel=2)
return self.mapPartitionsWithIndex(f, preservesPartitioning)
+ def getNumPartitions(self):
+ """
+ Returns the number of partitions in RDD
+ >>> rdd = sc.parallelize([1, 2, 3, 4], 2)
+ >>> rdd.getNumPartitions()
+ 2
+ """
+ return self._jrdd.splits().size()
+
def filter(self, f):
"""
Return a new RDD containing only the elements that satisfy a predicate.
@@ -413,9 +422,9 @@ class RDD(object):
def intersection(self, other):
"""
- Return the intersection of this RDD and another one. The output will not
+ Return the intersection of this RDD and another one. The output will not
contain any duplicate elements, even if the input RDDs did.
-
+
Note that this method performs a shuffle internally.
>>> rdd1 = sc.parallelize([1, 10, 2, 3, 4, 5])
@@ -571,14 +580,14 @@ class RDD(object):
"""
Applies a function to each partition of this RDD.
- >>> def f(iterator):
- ... for x in iterator:
- ... print x
+ >>> def f(iterator):
+ ... for x in iterator:
+ ... print x
... yield None
>>> sc.parallelize([1, 2, 3, 4, 5]).foreachPartition(f)
"""
self.mapPartitions(f).collect() # Force evaluation
-
+
def collect(self):
"""
Return a list that contains all of the elements in this RDD.
@@ -673,7 +682,7 @@ class RDD(object):
yield acc
return self.mapPartitions(func).fold(zeroValue, combOp)
-
+
def max(self):
"""
@@ -692,7 +701,7 @@ class RDD(object):
1.0
"""
return self.reduce(min)
-
+
def sum(self):
"""
Add up the elements in this RDD.
@@ -786,7 +795,7 @@ class RDD(object):
m1[k] += v
return m1
return self.mapPartitions(countPartition).reduce(mergeMaps)
-
+
def top(self, num):
"""
Get the top N elements from a RDD.
@@ -814,7 +823,7 @@ class RDD(object):
def takeOrdered(self, num, key=None):
"""
Get the N elements from a RDD ordered in ascending order or as specified
- by the optional key function.
+ by the optional key function.
>>> sc.parallelize([10, 1, 2, 9, 3, 4, 5, 6, 7]).takeOrdered(6)
[1, 2, 3, 4, 5, 6]
@@ -834,7 +843,7 @@ class RDD(object):
if key_ != None:
x = [i[1] for i in x]
return x
-
+
def merge(a, b):
return next(topNKeyedElems(a + b))
result = self.mapPartitions(lambda i: topNKeyedElems(i, key)).reduce(merge)
@@ -1169,12 +1178,12 @@ class RDD(object):
combiners[k] = mergeCombiners(combiners[k], v)
return combiners.iteritems()
return shuffled.mapPartitions(_mergeCombiners)
-
+
def foldByKey(self, zeroValue, func, numPartitions=None):
"""
Merge the values for each key using an associative function "func" and a neutral "zeroValue"
- which may be added to the result an arbitrary number of times, and must not change
- the result (e.g., 0 for addition, or 1 for multiplication.).
+ which may be added to the result an arbitrary number of times, and must not change
+ the result (e.g., 0 for addition, or 1 for multiplication.).
>>> rdd = sc.parallelize([("a", 1), ("b", 1), ("a", 1)])
>>> from operator import add
@@ -1182,8 +1191,8 @@ class RDD(object):
[('a', 2), ('b', 1)]
"""
return self.combineByKey(lambda v: func(zeroValue, v), func, func, numPartitions)
-
-
+
+
# TODO: support variant with custom partitioner
def groupByKey(self, numPartitions=None):
"""
@@ -1302,7 +1311,7 @@ class RDD(object):
def repartition(self, numPartitions):
"""
Return a new RDD that has exactly numPartitions partitions.
-
+
Can increase or decrease the level of parallelism in this RDD. Internally, this uses
a shuffle to redistribute data.
If you are decreasing the number of partitions in this RDD, consider using `coalesce`, | [SPARK-<I>] Add getNumPartitions to pyspark RDD
Add getNumPartitions to pyspark RDD to provide an intuitive way to get number of partitions in RDD like we can do in scala today. | apache_spark | train |
febdc8949f22e2dbed9a7658984e24b0925f5064 | diff --git a/src/main/java/eu/fusepool/extractor/RdfGeneratingExtractor.java b/src/main/java/eu/fusepool/extractor/RdfGeneratingExtractor.java
index <HASH>..<HASH> 100644
--- a/src/main/java/eu/fusepool/extractor/RdfGeneratingExtractor.java
+++ b/src/main/java/eu/fusepool/extractor/RdfGeneratingExtractor.java
@@ -62,6 +62,6 @@ public abstract class RdfGeneratingExtractor implements SyncExtractor {
}
- protected abstract TripleCollection generateRdf(Entity entity) throws IOException;
+ protected abstract TripleCollection generateRdf(HttpRequestEntity entity) throws IOException;
} | RdfGeneratingExtractor to pass HttpRequestEntity to generateRdf method | fusepoolP3_p3-transformer-library | train |
bbff9cf5ac87d2ab88e4eda0079a6bd3ec2f003d | diff --git a/upload/admin/model/design/layout.php b/upload/admin/model/design/layout.php
index <HASH>..<HASH> 100644
--- a/upload/admin/model/design/layout.php
+++ b/upload/admin/model/design/layout.php
@@ -96,7 +96,7 @@ class ModelDesignLayout extends Model {
}
public function getLayoutModules($layout_id) {
- $query = $this->db->query("SELECT * FROM " . DB_PREFIX . "layout_module WHERE layout_id = '" . (int)$layout_id . "'");
+ $query = $this->db->query("SELECT * FROM " . DB_PREFIX . "layout_module WHERE layout_id = '" . (int)$layout_id . "' ORDER BY position ASC, sort_order ASC");
return $query->rows;
} | Module order by position and sort_order in layout | opencart_opencart | train |
a8225a11d0e8deffcc81f8345f918c158fef105b | diff --git a/lxd/api_project.go b/lxd/api_project.go
index <HASH>..<HASH> 100644
--- a/lxd/api_project.go
+++ b/lxd/api_project.go
@@ -19,19 +19,21 @@ import (
"github.com/lxc/lxd/shared/version"
)
-var projectsCmd = Command{
- name: "projects",
- get: projectsGet,
- post: projectsPost,
+var projectsCmd = APIEndpoint{
+ Name: "projects",
+
+ Get: APIEndpointAction{Handler: projectsGet},
+ Post: APIEndpointAction{Handler: projectsPost},
}
-var projectCmd = Command{
- name: "projects/{name}",
- get: projectGet,
- post: projectPost,
- put: projectPut,
- patch: projectPatch,
- delete: projectDelete,
+var projectCmd = APIEndpoint{
+ Name: "projects/{name}",
+
+ Delete: APIEndpointAction{Handler: projectDelete},
+ Get: APIEndpointAction{Handler: projectGet},
+ Patch: APIEndpointAction{Handler: projectPatch},
+ Post: APIEndpointAction{Handler: projectPost},
+ Put: APIEndpointAction{Handler: projectPut},
}
func projectsGet(d *Daemon, r *http.Request) Response { | lxd/project: Port to APIEndpoint | lxc_lxd | train |
ff312a93f21f27d0fc5c10622ad32ab05b692104 | diff --git a/schedule/models.py b/schedule/models.py
index <HASH>..<HASH> 100644
--- a/schedule/models.py
+++ b/schedule/models.py
@@ -6,8 +6,6 @@ from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.template.defaultfilters import date
from django.utils.translation import ugettext, ugettext_lazy as _
-from schedule.periods import Month
-from schedule.occurrence import Occurrence
import datetime
from dateutil import rrule
@@ -145,6 +143,19 @@ class Event(models.Model):
[]
"""
+ persisted_occurrences = self.occurrence_set.all()
+ occurrences = self._get_occurrence_list(start, end)
+ for index in range(len(occurrences)):
+ for p_occurrence in persisted_occurrences:
+ if occurrences[index] == p_occurrence:
+ occurrences[index] = p_occurrence
+ return occurrences
+
+
+ def _get_occurrence_list(self, start, end):
+ """
+
+ """
if self.rule is not None:
params = self.rule.get_params()
frequency = 'rrule.%s' % self.rule.frequency
@@ -158,7 +169,7 @@ class Event(models.Model):
o_end = o_start + (self.end - self.start)
if o_end >= start:
if o_start < end:
- occurrences.append(Occurrence(self,o_start,o_end))
+ occurrences.append(Occurrence(event=self,start=o_start,end=o_end))
else:
break
return occurrences
@@ -168,7 +179,7 @@ class Event(models.Model):
else:
# check if event is in the period
if self.start < end and self.end >= start:
- return [Occurrence(self, self.start, self.end)]
+ return [Occurrence(event=self, start=self.start, end=self.end)]
else:
return []
@@ -346,6 +357,7 @@ class Calendar(models.Model):
return reverse('s_create_event_in_calendar', args=[self.slug])
def get_month(self, date=datetime.datetime.now()):
+ from periods import Month
return Month(self.events.all(), date)
@@ -569,3 +581,39 @@ class EventRelation(models.Model):
def __unicode__(self):
return '%s(%s)-%s' % (self.event.title, self.distinction, self.content_object)
+
+class Occurrence(models.Model):
+ event = models.ForeignKey(Event, verbose_name=_("event"))
+ description = models.TextField(_("description"), blank=True, null=True)
+ start = models.DateTimeField(_("start"))
+ end = models.DateTimeField(_("end"))
+
+ def __unicode__(self):
+ # #TODO remove this if statement
+ # if self.pk is not None:
+ # return ugettext("PERSISTED: %(start)s to %(end)s") % {
+ # 'start': self.start,
+ # 'end': self.end,
+ # }
+ return ugettext("%(start)s to %(end)s") % {
+ 'start': self.start,
+ 'end': self.end,
+ }
+
+ def __cmp__(self, other):
+ rank = cmp(self.start, other.start)
+ if rank == 0:
+ return cmp(self.end, other.end)
+ return rank
+
+ def __eq__(self, other):
+ return self.event == other.event and self.start== other.start and self.end == other.end
+"""
+from schedule import models
+from schedule import periods
+import datetime
+events = models.Event.objects.all()
+month = periods.Month(events, datetime.datetime.now())
+month._get_sorted_occurrences()
+"""
+
\ No newline at end of file
diff --git a/schedule/periods.py b/schedule/periods.py
index <HASH>..<HASH> 100644
--- a/schedule/periods.py
+++ b/schedule/periods.py
@@ -4,7 +4,7 @@ from django.template.defaultfilters import date
from django.utils.translation import ugettext, ugettext_lazy as _
from django.utils.dates import WEEKDAYS, WEEKDAYS_ABBR
from django.conf import settings
-from schedule.occurrence import Occurrence
+from models import Occurrence
# Look for FIRST_DAY_OF_WEEK as a locale setting
first_day_of_week = ugettext('FIRST_DAY_OF_WEEK')
@@ -49,8 +49,15 @@ class Period(object):
def _get_sorted_occurrences(self):
occurrences = []
+ persisted_occurrences = Occurrence.objects.filter(event__in = self.events)
for event in self.events:
- occurrences += event.get_occurrences(self.start, self.end)
+ event_occurrences = event._get_occurrence_list(self.start, self.end)
+ #TODO I am sure the loop below can be done better
+ for index in range(len(event_occurrences)):
+ for p_occurrence in persisted_occurrences:
+ if event_occurrences[index] == p_occurrence:
+ event_occurrences[index] = p_occurrence
+ occurrences += event_occurrences
return sorted(occurrences)
def classify_occurrence(self, occurrence): | Added first try at persisted occurrences. This could is not stable, and should not be used in production of any kind.
git-svn-id: <URL> | llazzaro_django-scheduler | train |
2d4568cacb769523a956ac0576178b0561313db5 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -8,6 +8,6 @@ var userConfig = {
options: hexo.config.pug || {}
};
-hexo.extend.renderer.register('pug', 'html', function(data) {
- return pugRenderer(extend(data, userConfig));
+hexo.extend.renderer.register('pug', 'html', function(data, locals) {
+ return pugRenderer(extend(data, userConfig), locals);
}, true);
\ No newline at end of file | Added missing locals to pug renderer parameters. | maxknee_hexo-render-pug | train |
42546ffb21cf399975a70c139716ceb8a383df5a | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index <HASH>..<HASH> 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,6 +1,6 @@
# master
-## Version 2.1.10 (7 Nov 2019)
+## Version 2.1.11 (7 Nov 2019)
* revise README [jcupitt]
* add watermark example [jcupitt]
diff --git a/doc/conf.py b/doc/conf.py
index <HASH>..<HASH> 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -66,7 +66,7 @@ author = u'John Cupitt'
# The short X.Y version.
version = u'2.1'
# The full version, including alpha/beta/rc tags.
-release = u'2.1.10'
+release = u'2.1.11'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/pyvips/version.py b/pyvips/version.py
index <HASH>..<HASH> 100644
--- a/pyvips/version.py
+++ b/pyvips/version.py
@@ -1,4 +1,4 @@
# this is execfile()d into setup.py imported into __init__.py
-__version__ = '2.1.10'
+__version__ = '2.1.11'
__all__ = ['__version__'] | dropped a patch by mistake -- push a fresh version
now <I> | libvips_pyvips | train |
8b28d3fbb4e0fd996f92b5bb7263acd028446d02 | diff --git a/src/Spekkionu/Assetcachebuster/Assetcachebuster.php b/src/Spekkionu/Assetcachebuster/Assetcachebuster.php
index <HASH>..<HASH> 100644
--- a/src/Spekkionu/Assetcachebuster/Assetcachebuster.php
+++ b/src/Spekkionu/Assetcachebuster/Assetcachebuster.php
@@ -17,7 +17,7 @@ class Assetcachebuster
*
* @var string $cdn The url for the cdn
*/
- protected $cdn = null;
+ protected $cdn = '/';
/**
* Asset cache busting hash | Update Assetcachebuster.php
When no cdn provided use slash to have absolute urls | spekkionu_laravel-assetcachebuster | train |
b8da6b839903af83ad47835edb0251017c69a4ad | diff --git a/tensorflow_probability/python/internal/backend/numpy/numpy_test.py b/tensorflow_probability/python/internal/backend/numpy/numpy_test.py
index <HASH>..<HASH> 100644
--- a/tensorflow_probability/python/internal/backend/numpy/numpy_test.py
+++ b/tensorflow_probability/python/internal/backend/numpy/numpy_test.py
@@ -196,7 +196,7 @@ def matmul_compatible_pairs(draw,
@hps.composite
-def psd_matrices(draw, eps=1e-2):
+def pd_matrices(draw, eps=1.):
x = draw(
single_arrays(
shape=shapes(min_dims=2),
@@ -210,7 +210,7 @@ def psd_matrices(draw, eps=1e-2):
@hps.composite
def nonsingular_matrices(draw):
- mat = draw(psd_matrices()) # pylint: disable=no-value-for-parameter
+ mat = draw(pd_matrices()) # pylint: disable=no-value-for-parameter
signs = draw(
hnp.arrays(
mat.dtype,
@@ -326,8 +326,8 @@ NUMPY_TEST_CASES = [
# defaults=(None,))
TestCase('linalg.cholesky_solve', [
matmul_compatible_pairs(
- x_strategy=psd_matrices().map(np.linalg.cholesky))
- ], jax_disabled='See discussion in http://b/140073324'),
+ x_strategy=pd_matrices().map(np.linalg.cholesky))
+ ]),
# ArgSpec(args=['coeffs', 'x', 'name'], varargs=None, keywords=None,
# defaults=(None,))
@@ -362,7 +362,7 @@ NUMPY_TEST_CASES = [
[single_arrays(dtype=np.complex64, elements=complex_numbers())]),
TestCase('math.real',
[single_arrays(dtype=np.complex64, elements=complex_numbers())]),
- TestCase('linalg.cholesky', [psd_matrices()]),
+ TestCase('linalg.cholesky', [pd_matrices()]),
TestCase('linalg.lu', [nonsingular_matrices()]),
TestCase('linalg.diag_part', [single_arrays(shape=shapes(min_dims=2))]),
TestCase('identity', [single_arrays()]),
@@ -442,7 +442,7 @@ NUMPY_TEST_CASES = [
# keywords=None, defaults=(True, False, None))
TestCase('linalg.triangular_solve', [
matmul_compatible_pairs(
- x_strategy=psd_matrices().map(np.linalg.cholesky))
+ x_strategy=pd_matrices().map(np.linalg.cholesky))
]),
# ArgSpec(args=['shape_x', 'shape_y'], varargs=None, keywords=None, | Change psd_matrices() test strategy to pd_matrices(). Set eps=1 in order to produce better conditioned matrices. This is intended to avoid numerical inconsistencies between TensorFlow and JAX on poorly conditioned matrix factors and solves.
PiperOrigin-RevId: <I> | tensorflow_probability | train |
e99d52ea5310a01de2a186f1d26ac03e436aead1 | diff --git a/packages/hw-app-fct/src/Fct.js b/packages/hw-app-fct/src/Fct.js
index <HASH>..<HASH> 100644
--- a/packages/hw-app-fct/src/Fct.js
+++ b/packages/hw-app-fct/src/Fct.js
@@ -49,7 +49,7 @@ export default class Fct {
* @example
* const fctaddr = await fct.getAddress("44'/131'/0'/0'/0'")
* const ecaddr = await fct.getAddress("44'/132'/0'/0'/0'")
- * const idaddr = await fct.getAddress("44'/143165576'/0'/0'/0'")
+ * const idaddr = await fct.getAddress("44'/281'/0'/0'/0'")
*/
getAddress(
@@ -65,7 +65,7 @@ export default class Fct {
const bipPath = BIPPath.fromString(path).toPathArray();
let buffer = new Buffer.alloc(1 + bipPath.length * 4);
- const boolIdAddr = (bipPath[1] === 0x88888888)
+ const boolIdAddr = (bipPath[1] === 0x80000119)
buffer.writeInt8(bipPath.length, 0);
bipPath.forEach((segment, index) => {
@@ -245,7 +245,7 @@ export default class Fct {
* @param rawMessage this is the raw data Buffer to be signed
* @param tosha512 set this to true to hash the rawMessage using sha512, the default is sha256.
* @example
- fct.signMessageHash("44'/143165576'/0'/0'/0", "The quick brown fox jumps over the lazy dog.",true).then(result => ...)
+ fct.signMessageHash("44'/281'/0'/0'/0", "The quick brown fox jumps over the lazy dog.",true).then(result => ...)
*/
signMessageHash(
path: string,
@@ -343,7 +343,7 @@ export default class Fct {
* @param rawMessage this is the raw data Buffer to be signed
* @param tosha512 set this to true to has the rawMessage .
* @example
- fct.signMessageRaw("44'/143165576'/0'/0/0", "The quick brown fox jumps over the lazy dog.").then(result => ...)
+ fct.signMessageRaw("44'/281'/0'/0/0", "The quick brown fox jumps over the lazy dog.").then(result => ...)
*/
signMessageRaw(
path: string, | updated id type to comply with slip<I> request | MyFactomWallet_ledger-factomjs | train |
47615c202078a5fbacd9471286f3c182882f418f | diff --git a/test/e2e_node/cpu_manager_test.go b/test/e2e_node/cpu_manager_test.go
index <HASH>..<HASH> 100644
--- a/test/e2e_node/cpu_manager_test.go
+++ b/test/e2e_node/cpu_manager_test.go
@@ -666,7 +666,7 @@ func runCPUManagerTests(f *framework.Framework) {
}
// Serial because the test updates kubelet configuration.
-var _ = SIGDescribe("CPU Manager [Serial] [Feature:CPUManager][NodeAlphaFeature:CPUManager]", func() {
+var _ = SIGDescribe("CPU Manager [Serial] [Feature:CPUManager]", func() {
f := framework.NewDefaultFramework("cpu-manager-test")
ginkgo.Context("With kubeconfig updated with static CPU Manager policy run the CPU Manager tests", func() { | e2e: node: remove obsolete AlphaFeature tag
The CPUManager graduated to beta a while ago (k8s <I>?)
so let's get rid of the obsolete Alpha tag on its e2e tests. | kubernetes_kubernetes | train |
9089fb8a41a1cef6df2bf2839e1981630acc58cc | diff --git a/lib/utils/input/text.js b/lib/utils/input/text.js
index <HASH>..<HASH> 100644
--- a/lib/utils/input/text.js
+++ b/lib/utils/input/text.js
@@ -14,6 +14,10 @@ const ESCAPES = {
CARRIAGE: '\r'
}
+function formatCC(data) {
+ return data.replace(/\s/g, '').replace(/(.{4})/g, '$1 ').trim()
+}
+
module.exports = function(
{
label = '',
@@ -107,6 +111,7 @@ module.exports = function(
async function onData(buffer) {
let data = buffer.toString()
+
value = stripAnsi(value)
if (abortSequences.has(data)) {
@@ -162,7 +167,7 @@ module.exports = function(
resolve(value)
} else {
if (mask === 'cc' || mask === 'ccv') {
- value = value.replace(/\s/g, '').replace(/(.{4})/g, '$1 ').trim()
+ value = formatCC(value)
value = value.replace(regex, chalk.gray('$1'))
} else if (mask === 'expDate') {
value = value.replace(regex, chalk.gray('$1'))
@@ -186,21 +191,30 @@ module.exports = function(
if (mask) {
if (/\d/.test(data) && caretOffset !== 0) {
+ let formattedData = data
+
+ if (mask === 'cc' || mask === 'ccv') {
+ formattedData = formatCC(data)
+ }
+
if (value[value.length + caretOffset + 1] === ' ') {
tmp =
value.substr(0, value.length + caretOffset) +
- data +
- value.substr(value.length + caretOffset + 1)
- caretOffset += 2
+ formattedData +
+ value.substr(value.length + caretOffset + formattedData.length)
+
+ caretOffset += formattedData.length + 1
+
if (value[value.length + caretOffset] === '/') {
- caretOffset += 2
+ caretOffset += formattedData.length + 1
}
} else {
tmp =
value.substr(0, value.length + caretOffset) +
- data +
- value.substr(value.length + caretOffset + 1)
- caretOffset++
+ formattedData +
+ value.substr(value.length + caretOffset + formattedData.length)
+
+ caretOffset += formattedData.length
}
} else if (/\s/.test(data) && caretOffset < 0) {
caretOffset++
@@ -226,7 +240,7 @@ module.exports = function(
}
if (mask === 'cc' || mask === 'ccv') {
- value = value.replace(/\s/g, '').replace(/(.{4})/g, '$1 ').trim()
+ value = formatCC(value)
value = value.replace(regex, chalk.gray('$1'))
} else if (mask === 'expDate') {
value = value.replace(regex, chalk.gray('$1')) | Allow credit card numbers to be pasted with `now cc add` (#<I>)
This fixes a bug where, when pasting a credit card, it would print the
number correctly, but also append too much placeholder. When hitting
enter, backspace, or another number, it would begin printing (and
formatting) `undefined`. This adds a special case where the CC will be
formatted to account for spaces when comparing against the length of the
existing value or placeholder. | zeit_now-cli | train |
28851e2e7bdfffc12418a7d0c0ca49858f888027 | diff --git a/auth.js b/auth.js
index <HASH>..<HASH> 100644
--- a/auth.js
+++ b/auth.js
@@ -30,8 +30,10 @@ module.exports = function(app, options) {
cookieSecret: uuid.v4(),
successRedirect: '/',
failureRedirect: '/',
- serializeUser: null,
- deserializeUser: null
+ serializeUser: null, // optional pointer to a callback function
+ deserializeUser: null, // as above
+ // if set to true, the library will automatically provide a failure route
+ useDefaultFailureRoute: true
};
var strategy;
// end private variables
@@ -122,5 +124,14 @@ module.exports = function(app, options) {
),
authCallbackHandler
);
+ // create Auth0 failure route if requested
+ if (_options.useDefaultFailureRoute) {
+ app.get(
+ _options.auth0.failureRedirect,
+ function (req, res) {
+ res.send(403);
+ }
+ )
+ }
// end constructor function proper
}; | Add very simple generic passport failure handler | DecodedCo_express-auth0-simple | train |
390a5fda784f86b3c50233f65e280dd6a0291e21 | diff --git a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/jvmmodel/JvmTypesBuilder.java b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/jvmmodel/JvmTypesBuilder.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/jvmmodel/JvmTypesBuilder.java
+++ b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/jvmmodel/JvmTypesBuilder.java
@@ -21,9 +21,11 @@ import org.eclipse.emf.ecore.EClassifier;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.InternalEObject;
+import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
+import org.eclipse.xtext.EcoreUtil2;
import org.eclipse.xtext.common.types.JvmAnnotationAnnotationValue;
import org.eclipse.xtext.common.types.JvmAnnotationReference;
import org.eclipse.xtext.common.types.JvmAnnotationTarget;
@@ -1239,7 +1241,8 @@ public class JvmTypesBuilder {
if (value instanceof XListLiteral) {
List<XExpression> elements = ((XListLiteral) value).getElements();
if(elements.isEmpty()) {
- StandardTypeReferenceOwner typeReferenceOwner = new StandardTypeReferenceOwner(commonTypeComputationServices, value);
+ ResourceSet resourceSetOrNull = EcoreUtil2.getResourceSet(value);
+ StandardTypeReferenceOwner typeReferenceOwner = new StandardTypeReferenceOwner(commonTypeComputationServices, resourceSetOrNull);
final OwnedConverter ownedConverter = new OwnedConverter(typeReferenceOwner);
LightweightTypeReference type = ownedConverter.apply(explicitType);
if(type.isArray()) {
diff --git a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typesystem/legacy/StandardTypeReferenceOwner.java b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typesystem/legacy/StandardTypeReferenceOwner.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typesystem/legacy/StandardTypeReferenceOwner.java
+++ b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typesystem/legacy/StandardTypeReferenceOwner.java
@@ -14,6 +14,7 @@ import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.jdt.annotation.NonNullByDefault;
+import org.eclipse.jdt.annotation.Nullable;
import org.eclipse.xtext.common.types.JvmTypeParameter;
import org.eclipse.xtext.xbase.typesystem.references.ITypeReferenceOwner;
import org.eclipse.xtext.xbase.typesystem.references.LightweightBoundTypeArgument;
@@ -31,7 +32,7 @@ public class StandardTypeReferenceOwner implements ITypeReferenceOwner {
private CommonTypeComputationServices services;
private ResourceSet context;
- public StandardTypeReferenceOwner(CommonTypeComputationServices services, ResourceSet context) {
+ public StandardTypeReferenceOwner(CommonTypeComputationServices services, @Nullable ResourceSet context) {
this.services = services;
this.context = context;
}
@@ -49,6 +50,9 @@ public class StandardTypeReferenceOwner implements ITypeReferenceOwner {
}
public ResourceSet getContextResourceSet() {
+ if (context == null) {
+ throw new IllegalStateException("Owner was instantiated without a resourceSet");
+ }
return context;
} | [regression] Fixed regression in JvmTypesBuilder
This should greenify the runtime tests. | eclipse_xtext-extras | train |
59e13561272fdf75a8ea40d95a6b1081522266e4 | diff --git a/hardware_modules/PiezoController.py b/hardware_modules/PiezoController.py
index <HASH>..<HASH> 100644
--- a/hardware_modules/PiezoController.py
+++ b/hardware_modules/PiezoController.py
@@ -103,6 +103,7 @@ class MDT693B:
# 8 data bits, No parity bit, 1 stop bit, no hardware
# handshake. These are all default for Serial and therefore not input
# below
+ #
if (outputAxis is not 'X' and outputAxis is not 'Y' and outputAxis is not 'Z'):
message = 'Piezo Controller Axis not correctly defined; must be either \'X\', \'Y\', or \'Z\'' | AS: Fixed pyserial update issues --- open port (string) "COM3" instead of (integer) 3 (pushing now) | LISE-B26_pylabcontrol | train |
f269d3d2f7e73c9a61ce665bdf6123806b72ecb4 | diff --git a/chef/lib/chef/knife/status.rb b/chef/lib/chef/knife/status.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/chef/knife/status.rb
+++ b/chef/lib/chef/knife/status.rb
@@ -68,7 +68,19 @@ class Chef
text = minutes_text
end
- highline.say("<%= color('#{text}', #{color}) %> ago, #{node.name}, #{node['platform']} #{node['platform_version']}, #{fqdn}, #{ipaddress}#{run_list}")
+ line_parts = Array.new
+ line_parts << "<%= color('#{text}', #{color}) %> ago" << node.name
+ if node['platform']
+ platform = node['platform']
+ if node['platform_version']
+ platform << " #{node['platform_version']}"
+ end
+ line_parts << platform
+ end
+ line_parts << ipaddress if ipaddress
+ line_parts << run_list if run_list
+
+ highline.say(line_parts.join(', ') + '.')
end
end | CHEF-<I>: Fix knife status when ipaddress is not set | chef_chef | train |
e8ec1a6ed4637f13839f645db396b9a04497e026 | diff --git a/default_app/renderer.js b/default_app/renderer.js
index <HASH>..<HASH> 100644
--- a/default_app/renderer.js
+++ b/default_app/renderer.js
@@ -1,9 +1,14 @@
const {remote, shell} = require('electron')
const path = require('path')
+const URL = require('url')
const electronPath = path.relative(process.cwd(), remote.process.execPath)
Array.from(document.querySelectorAll('a[href]')).forEach(link => {
- const url = link.getAttribute('href')
+ // safely add `?utm_source=default_app
+ let url = URL.parse(link.getAttribute('href'), true)
+ url.query = Object.assign(url.query, {utm_source: 'default_app'})
+ url = URL.format(url)
+
link.addEventListener('click', (e) => {
e.preventDefault()
shell.openExternal(url) | add utm source to outbound links | electron_electron | train |
482e57ba716c21cd7b315e5803ecb3953c479b33 | diff --git a/gitlab/v4/objects.py b/gitlab/v4/objects.py
index <HASH>..<HASH> 100644
--- a/gitlab/v4/objects.py
+++ b/gitlab/v4/objects.py
@@ -4007,6 +4007,16 @@ class ProjectAdditionalStatisticsManager(GetWithoutIdMixin, RESTManager):
_from_parent_attrs = {"project_id": "id"}
+class ProjectIssuesStatistics(RefreshMixin, RESTObject):
+ _id_attr = None
+
+
+class ProjectIssuesStatisticsManager(GetWithoutIdMixin, RESTManager):
+ _path = "/projects/%(project_id)s/issues_statistics"
+ _obj_cls = ProjectIssuesStatistics
+ _from_parent_attrs = {"project_id": "id"}
+
+
class Project(SaveMixin, ObjectDeleteMixin, RESTObject):
_short_print_attr = "path"
_managers = (
@@ -4053,6 +4063,7 @@ class Project(SaveMixin, ObjectDeleteMixin, RESTObject):
("wikis", "ProjectWikiManager"),
("clusters", "ProjectClusterManager"),
("additionalstatistics", "ProjectAdditionalStatisticsManager"),
+ ("issuesstatistics", "ProjectIssuesStatisticsManager"),
)
@cli.register_custom_action("Project", ("submodule", "branch", "commit_sha")) | feat: access project's issues statistics
Fixes #<I> | python-gitlab_python-gitlab | train |
6ce06c07e7d22a3e51b0c43a0297fb771888a71b | diff --git a/src/MediaInfo.php b/src/MediaInfo.php
index <HASH>..<HASH> 100644
--- a/src/MediaInfo.php
+++ b/src/MediaInfo.php
@@ -100,4 +100,21 @@ class MediaInfo
$this->configuration[$key] = $value;
}
+
+ /**
+ * @param $key
+ * @return mixed
+ *
+ * @throws \Exception
+ */
+ public function getConfig($key)
+ {
+ if (!array_key_exists($key, $this->configuration)) {
+ throw new \Exception(
+ sprintf('key "%s" does\'t exist', $key)
+ );
+ }
+
+ return $this->configuration[$key];
+ }
}
diff --git a/test/Builder/MediaInfoCommandBuilderTest.php b/test/Builder/MediaInfoCommandBuilderTest.php
index <HASH>..<HASH> 100644
--- a/test/Builder/MediaInfoCommandBuilderTest.php
+++ b/test/Builder/MediaInfoCommandBuilderTest.php
@@ -112,6 +112,7 @@ class MediaInfoCommandBuilderTest extends TestCase
[
'command' => '/usr/bin/local/mediainfo',
'use_oldxml_mediainfo_output_format' => false,
+ 'urlencode' => true,
]
);
@@ -121,12 +122,14 @@ class MediaInfoCommandBuilderTest extends TestCase
$this->filePath,
'-f',
'--OUTPUT=XML',
+ '--urlencode',
],
null,
[
'MEDIAINFO_VAR_FILE_PATH' => $this->filePath,
'MEDIAINFO_VAR_FULL_DISPLAY' => '-f',
'MEDIAINFO_VAR_OUTPUT' => '--OUTPUT=XML',
+ 'MEDIAINFO_VAR_URLENCODE' => '--urlencode',
'LANG' => 'en_US.UTF-8',
]
));
diff --git a/test/MediaInfoTest.php b/test/MediaInfoTest.php
index <HASH>..<HASH> 100644
--- a/test/MediaInfoTest.php
+++ b/test/MediaInfoTest.php
@@ -12,10 +12,28 @@ class MediaInfoTest extends TestCase
$mediaInfo = new MediaInfo();
$mediaInfo->setConfig('command', 'new/mediainfo/path');
+ $mediaInfo = new MediaInfo();
+ $this->assertEquals(false, $mediaInfo->getConfig('urlencode'));
+ $mediaInfo->setConfig('urlencode', true);
+ $this->assertEquals(true, $mediaInfo->getConfig('urlencode'));
+
$this->expectException(\Exception::class);
$this->expectExceptionMessage('key "unknow_config" does\'t exist');
$mediaInfo = new MediaInfo();
$mediaInfo->setConfig('unknow_config', '');
}
+
+ public function testGetConfig(): void
+ {
+ $mediaInfo = new MediaInfo();
+ $this->assertEquals(false, $mediaInfo->getConfig('urlencode'));
+
+ $this->expectException(\Exception::class);
+ $this->expectExceptionMessage('key "unknow_config" does\'t exist');
+
+ $mediaInfo = new MediaInfo();
+ $mediaInfo->getConfig('unknow_config');
+ }
+
} | Added test coverage for urlencode | mhor_php-mediainfo | train |
ef68ca0e53665f2e720f448125e6f1dd5adffbb1 | diff --git a/Backoffice/Context/ContextManager.php b/Backoffice/Context/ContextManager.php
index <HASH>..<HASH> 100644
--- a/Backoffice/Context/ContextManager.php
+++ b/Backoffice/Context/ContextManager.php
@@ -105,25 +105,23 @@ class ContextManager implements CurrentSiteIdInterface
public function getAvailableSites()
{
$sites = array();
-
- if ($this->authorizationChecker->isGranted(ContributionRoleInterface::PLATFORM_ADMIN)) {
- return $this->siteRepository->findByDeleted(false);
- }
-
$token = $this->tokenStorage->getToken();
- if ($token instanceof TokenInterface &&
- ($user = $token->getUser()) instanceof GroupableInterface
- ) {
- foreach ($user->getGroups() as $group) {
- /** @var SiteInterface $site */
- $site = $group->getSite();
- if (null !== $site && !$site->isDeleted()) {
- $sites[$site->getId()] = $site;
+ if ($token instanceof TokenInterface) {
+ if ($this->authorizationChecker->isGranted(ContributionRoleInterface::PLATFORM_ADMIN)) {
+ return $this->siteRepository->findByDeleted(false);
+ }
+
+ if (($user = $token->getUser()) instanceof GroupableInterface) {
+ foreach ($user->getGroups() as $group) {
+ /** @var SiteInterface $site */
+ $site = $group->getSite();
+ if (null !== $site && !$site->isDeleted()) {
+ $sites[$site->getId()] = $site;
+ }
}
}
}
-
return $sites;
} | fix context manager (#<I>)
* fix context manager
* remove useless line | open-orchestra_open-orchestra-cms-bundle | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.