hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
6403ab1aa92803e6c316ab4f99ac430aa34bb927
diff --git a/src/View/Helper/DocBlockHelper.php b/src/View/Helper/DocBlockHelper.php index <HASH>..<HASH> 100644 --- a/src/View/Helper/DocBlockHelper.php +++ b/src/View/Helper/DocBlockHelper.php @@ -75,9 +75,13 @@ class DocBlockHelper extends BakeDocBlockHelper { if ($info['kind'] === 'association') { $type = $this->associatedEntityTypeToHintType($info['type'], $info['association']); if ($info['association']->type() === Association::MANY_TO_ONE) { + $key = $info['association']->getForeignKey(); + if (is_array($key)) { + $key = implode('-', $key); + } $properties = $this->_insertAfter( $properties, - $info['association']->getForeignKey(), + $key, [$property => $this->columnTypeNullable($info, $type)] ); } else {
Allows array support for foreignKey's
dereuromark_cakephp-ide-helper
train
f5bebc9306a9050923a859f136c7be4e2dd13ee5
diff --git a/cake/libs/controller/components/auth.php b/cake/libs/controller/components/auth.php index <HASH>..<HASH> 100644 --- a/cake/libs/controller/components/auth.php +++ b/cake/libs/controller/components/auth.php @@ -255,6 +255,13 @@ class AuthComponent extends Component { public $params = array(); /** + * AclComponent instance if using Acl + Auth + * + * @var AclComponent + */ + public $Acl; + +/** * Method list for bound controller * * @var array diff --git a/cake/tests/cases/libs/controller/components/auth.test.php b/cake/tests/cases/libs/controller/components/auth.test.php index <HASH>..<HASH> 100644 --- a/cake/tests/cases/libs/controller/components/auth.test.php +++ b/cake/tests/cases/libs/controller/components/auth.test.php @@ -490,7 +490,7 @@ class AuthTest extends CakeTestCase { $this->Controller = new AuthTestController(); $this->Controller->Components->init($this->Controller); - $this->Controller->Components->initialize($this->Controller); + $this->Controller->Components->trigger('initialize', array(&$this->Controller)); $this->Controller->beforeFilter(); ClassRegistry::addObject('view', new View($this->Controller)); @@ -800,7 +800,7 @@ class AuthTest extends CakeTestCase { $this->Controller->params['controller'] = 'auth_test'; $this->Controller->params['action'] = 'add'; - $this->Controller->Acl = $this->getMock('AclComponent'); + $this->Controller->Acl = $this->getMock('AclComponent', array(), array(), '', false); $this->Controller->Acl->expects($this->atLeastOnce())->method('check')->will($this->returnValue(true)); $this->Controller->Auth->initialize($this->Controller); @@ -1552,7 +1552,7 @@ class AuthTest extends CakeTestCase { 'Session' ); $this->Controller->Components->init($this->Controller); - $this->Controller->Components->initialize($this->Controller); + $this->Controller->Components->trigger('initialize', array(&$this->Controller)); Router::reload(); $this->AuthUserCustomField = new AuthUserCustomField();
Adding an Acl property to AuthComponent and updating tests to use new API's
cakephp_cakephp
train
3b81a75465aaeb1bbd66fa127edee79192f34a49
diff --git a/robotium-solo/src/main/java/com/jayway/android/robotium/solo/Clicker.java b/robotium-solo/src/main/java/com/jayway/android/robotium/solo/Clicker.java index <HASH>..<HASH> 100644 --- a/robotium-solo/src/main/java/com/jayway/android/robotium/solo/Clicker.java +++ b/robotium-solo/src/main/java/com/jayway/android/robotium/solo/Clicker.java @@ -351,11 +351,17 @@ class Clicker { line = 0; ArrayList<View> views = new ArrayList<View>(); - final ListView listView = waiter.waitForAndGetView(index, ListView.class); - if(listView == null) + final AbsListView absListView = waiter.waitForAndGetView(index, AbsListView.class); + if(absListView == null) Assert.assertTrue("ListView is null!", false); - View view = listView.getChildAt(line); + int numberOfLines = absListView.getChildCount(); + + if(line > absListView.getChildCount()){ + Assert.assertTrue("Can not click line number " + line + " as there are only " + numberOfLines + " lines available", false); + } + View view = absListView.getChildAt(line); + if(view != null){ views = viewFetcher.getViews(view, true); views = RobotiumUtils.removeInvisibleViews(views);
Bugfix for issue <I> and <I>
RobotiumTech_robotium
train
1d2fefd64760afe22af90a59f4ae6478cc9fead6
diff --git a/src/Psalm/Internal/Type/NegatedAssertionReconciler.php b/src/Psalm/Internal/Type/NegatedAssertionReconciler.php index <HASH>..<HASH> 100644 --- a/src/Psalm/Internal/Type/NegatedAssertionReconciler.php +++ b/src/Psalm/Internal/Type/NegatedAssertionReconciler.php @@ -826,7 +826,11 @@ class NegatedAssertionReconciler extends Reconciler } if ($non_scalar_types) { - return new Type\Union($non_scalar_types); + $type = new Type\Union($non_scalar_types); + $type->ignore_falsable_issues = $existing_var_type->ignore_falsable_issues; + $type->ignore_nullable_issues = $existing_var_type->ignore_nullable_issues; + $type->from_docblock = $existing_var_type->from_docblock; + return $type; } $failed_reconciliation = 2; @@ -890,7 +894,11 @@ class NegatedAssertionReconciler extends Reconciler } if ($non_object_types) { - return new Type\Union($non_object_types); + $type = new Type\Union($non_object_types); + $type->ignore_falsable_issues = $existing_var_type->ignore_falsable_issues; + $type->ignore_nullable_issues = $existing_var_type->ignore_nullable_issues; + $type->from_docblock = $existing_var_type->from_docblock; + return $type; } $failed_reconciliation = 2; @@ -950,7 +958,11 @@ class NegatedAssertionReconciler extends Reconciler } if ($non_numeric_types) { - return new Type\Union($non_numeric_types); + $type = new Type\Union($non_numeric_types); + $type->ignore_falsable_issues = $existing_var_type->ignore_falsable_issues; + $type->ignore_nullable_issues = $existing_var_type->ignore_nullable_issues; + $type->from_docblock = $existing_var_type->from_docblock; + return $type; } $failed_reconciliation = 2; @@ -1018,7 +1030,11 @@ class NegatedAssertionReconciler extends Reconciler } if ($non_string_types) { - return new Type\Union($non_string_types); + $type = new Type\Union($non_string_types); + $type->ignore_falsable_issues = $existing_var_type->ignore_falsable_issues; + $type->ignore_nullable_issues = $existing_var_type->ignore_nullable_issues; + $type->from_docblock = $existing_var_type->from_docblock; + return $type; } $failed_reconciliation = 2; @@ -1091,7 +1107,11 @@ class NegatedAssertionReconciler extends Reconciler } if ($non_array_types) { - return new Type\Union($non_array_types); + $type = new Type\Union($non_array_types); + $type->ignore_falsable_issues = $existing_var_type->ignore_falsable_issues; + $type->ignore_nullable_issues = $existing_var_type->ignore_nullable_issues; + $type->from_docblock = $existing_var_type->from_docblock; + return $type; } $failed_reconciliation = 2;
Preserve more information after removing types
vimeo_psalm
train
4b8e1199eaf67fee9496776de0fcad813e0de209
diff --git a/reactor-netty-http/src/main/java/reactor/netty/http/client/HttpClientConnect.java b/reactor-netty-http/src/main/java/reactor/netty/http/client/HttpClientConnect.java index <HASH>..<HASH> 100644 --- a/reactor-netty-http/src/main/java/reactor/netty/http/client/HttpClientConnect.java +++ b/reactor-netty-http/src/main/java/reactor/netty/http/client/HttpClientConnect.java @@ -406,20 +406,7 @@ class HttpClientConnect extends HttpClient { this.decoder = configuration.decoder; this.proxyProvider = configuration.proxyProvider(); this.responseTimeout = configuration.responseTimeout; - - HttpHeaders defaultHeaders = configuration.headers; - if (compress) { - if (defaultHeaders == null) { - this.defaultHeaders = new DefaultHttpHeaders(); - } - else { - this.defaultHeaders = defaultHeaders; - } - this.defaultHeaders.set(HttpHeaderNames.ACCEPT_ENCODING, HttpHeaderValues.GZIP); - } - else { - this.defaultHeaders = defaultHeaders; - } + this.defaultHeaders = configuration.headers; String baseUrl = configuration.baseUrl; @@ -473,7 +460,7 @@ class HttpClientConnect extends HttpClient { ch.path = HttpOperations.resolvePath(ch.uri()); - if (defaultHeaders != null) { + if (!defaultHeaders.isEmpty()) { headers.set(defaultHeaders); }
fix #<I> Fix NPE when executing parallel requests The headers on HttpClient level are initialised when the HttpClient configuration is created, no need to initialise it when preparing the request. ACCEPT_ENCODING header is properly set when the compression is enabled, no need to set it when preparing the request.
reactor_reactor-netty
train
bf73aab3f6754318a241ebb290f4b857a84310ea
diff --git a/framework/widgets/ActiveField.php b/framework/widgets/ActiveField.php index <HASH>..<HASH> 100644 --- a/framework/widgets/ActiveField.php +++ b/framework/widgets/ActiveField.php @@ -641,6 +641,15 @@ class ActiveField extends Component * If you want to use a widget that does not have `model` and `attribute` properties, * please use [[render()]] instead. * + * For example to use the [[MaskedInput]] widget to get some date input, you can use + * the following code, assuming that `$form` is your [[ActiveForm]] instance: + * + * ```php + * $form->field($model, 'date')->widget(\yii\widgets\MaskedInput::className(), [ + * 'mask' => '99/99/9999', + * ]); + * ``` + * * @param string $class the widget class name * @param array $config name-value pairs that will be used to initialize the widget * @return static the field object itself
added usage example of ActiveField::widget()
yiisoft_yii-core
train
8e2fb9cc6b6178e9a770e2a1d20f26933101e74e
diff --git a/spec/full_usage_spec.rb b/spec/full_usage_spec.rb index <HASH>..<HASH> 100644 --- a/spec/full_usage_spec.rb +++ b/spec/full_usage_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" RSpec.describe 'DeepCover usage' do it 'run `ruby spec/full_usage/simple/simple.rb` successfully' do reader, writer = IO.pipe - options = {writer.fileno => writer.fileno} #, in: File::NULL, out: File::NULL, err: File::NULL) + options = {writer.fileno => writer.fileno, in: File::NULL, out: File::NULL, err: File::NULL} pid = spawn('ruby', 'spec/full_usage/simple/simple.rb', writer.fileno.to_s, options) writer.close
Make full_usage spec swallow OUT/ERR of program
deep-cover_deep-cover
train
943e3dcdd27eeb89ef8e9707f274a59699bf165e
diff --git a/lib/formslib.php b/lib/formslib.php index <HASH>..<HASH> 100644 --- a/lib/formslib.php +++ b/lib/formslib.php @@ -1246,6 +1246,9 @@ function validate_' . $this->_formName . '(frm) { function _getElNamesRecursive($element, $group=null){ if ($group==null){ + if (!$this->elementExists($element)) { + return array(); + } $el = $this->getElement($element); } else { $el = &$element;
MDL-<I> new fix, I hope it works now
moodle_moodle
train
daa287d32e46092256d8771d28d11881a474a6ac
diff --git a/presto-tests/src/test/java/com/facebook/presto/tests/TestDistributedSpilledQueries.java b/presto-tests/src/test/java/com/facebook/presto/tests/TestDistributedSpilledQueries.java index <HASH>..<HASH> 100644 --- a/presto-tests/src/test/java/com/facebook/presto/tests/TestDistributedSpilledQueries.java +++ b/presto-tests/src/test/java/com/facebook/presto/tests/TestDistributedSpilledQueries.java @@ -72,12 +72,6 @@ public class TestDistributedSpilledQueries } @Test(enabled = false) - public void testJoinPredicatePushdown() - { - // TODO: disabled until join spilling is reworked - } - - @Test(enabled = false) @Override public void testAssignUniqueId() { @@ -86,18 +80,6 @@ public class TestDistributedSpilledQueries } @Test(enabled = false) - public void testLimitWithJoin() - { - // TODO: disable until https://github.com/prestodb/presto/issues/13859 is resolved. - } - - @Test(enabled = false) - public void testJoinDoubleClauseWithRightOverlap() - { - // TODO: disable until https://github.com/prestodb/presto/issues/13859 is resolved. - } - - @Test(enabled = false) @Override public void testCorrelatedNonAggregationScalarSubqueries() {
Remove test "overrides" that are no longer relevant Those tests have moved, so the overrides are no longer overriding anything.
prestodb_presto
train
60454bb4dea3122c8b63ed9d00ef8c8ce5016281
diff --git a/packages/netlify-cms-core/src/formats/__tests__/frontmatter.spec.js b/packages/netlify-cms-core/src/formats/__tests__/frontmatter.spec.js index <HASH>..<HASH> 100644 --- a/packages/netlify-cms-core/src/formats/__tests__/frontmatter.spec.js +++ b/packages/netlify-cms-core/src/formats/__tests__/frontmatter.spec.js @@ -351,6 +351,18 @@ describe('Frontmatter', () => { }); }); + it('should parse JSON with { } delimiters ending with a nested object', () => { + expect( + FrontmatterInfer.fromFile( + '{\n "title": "The Title",\n "nested": {\n "inside": "Inside prop"\n }\n}\nContent', + ), + ).toEqual({ + title: 'The Title', + nested: { inside: 'Inside prop' }, + body: 'Content', + }); + }); + it('should stringify JSON with { } delimiters when it is explicitly set as the format without a custom delimiter', () => { expect( frontmatterJSON().toFile({ diff --git a/packages/netlify-cms-core/src/formats/frontmatter.js b/packages/netlify-cms-core/src/formats/frontmatter.js index <HASH>..<HASH> 100644 --- a/packages/netlify-cms-core/src/formats/frontmatter.js +++ b/packages/netlify-cms-core/src/formats/frontmatter.js @@ -13,10 +13,7 @@ const parsers = { let JSONinput = input.trim(); // Fix JSON if leading and trailing brackets were trimmed. if (JSONinput.substr(0, 1) !== '{') { - JSONinput = '{' + JSONinput; - } - if (JSONinput.substr(-1) !== '}') { - JSONinput = JSONinput + '}'; + JSONinput = '{' + JSONinput + '}'; } return jsonFormatter.fromFile(JSONinput); },
fix: json FM ending with object parse error (#<I>)
netlify_netlify-cms
train
0834a900d3d25871b6cd731a3f37e38d96d3241b
diff --git a/src/BoomCMS/Http/Controllers/CMS/Robots.php b/src/BoomCMS/Http/Controllers/CMS/Robots.php index <HASH>..<HASH> 100644 --- a/src/BoomCMS/Http/Controllers/CMS/Robots.php +++ b/src/BoomCMS/Http/Controllers/CMS/Robots.php @@ -10,7 +10,7 @@ class Robots extends Controller { protected $role = 'manageRobots'; - public function getIndex() + public function index() { $file = new RobotsFile(); @@ -19,7 +19,7 @@ class Robots extends Controller ]); } - public function postIndex(Request $request) + public function store(Request $request) { $file = new RobotsFile(); $file->saveRules($request->input('rules'), auth()->user()); diff --git a/src/routes.php b/src/routes.php index <HASH>..<HASH> 100644 --- a/src/routes.php +++ b/src/routes.php @@ -10,5 +10,5 @@ Route::group([ BoomCMS\Http\Middleware\DefineCMSViewSharedVariables::class, ], ], function () { - Route::controller('robots', 'Robots'); + Route::resource('robots', 'Robots'); });
Laravel <I>: Removed call to Route::controller()
boomcms_boom-robotstxt
train
9cabf428c4053bb00abd3bd1bf91befe331ba788
diff --git a/src/Psalm/Checker/Statements/ExpressionChecker.php b/src/Psalm/Checker/Statements/ExpressionChecker.php index <HASH>..<HASH> 100644 --- a/src/Psalm/Checker/Statements/ExpressionChecker.php +++ b/src/Psalm/Checker/Statements/ExpressionChecker.php @@ -1892,8 +1892,11 @@ class ExpressionChecker is_string($isset_var->name) ) { $var_id = '$this->' . $isset_var->name; - $context->vars_in_scope[$var_id] = Type::getMixed(); - $context->vars_possibly_in_scope[$var_id] = true; + + if (!isset($context->vars_in_scope[$var_id])) { + $context->vars_in_scope[$var_id] = Type::getMixed(); + $context->vars_possibly_in_scope[$var_id] = true; + } } self::analyzeIssetVar($statements_checker, $isset_var, $context); diff --git a/tests/ReturnTypeTest.php b/tests/ReturnTypeTest.php index <HASH>..<HASH> 100644 --- a/tests/ReturnTypeTest.php +++ b/tests/ReturnTypeTest.php @@ -15,19 +15,19 @@ class ReturnTypeTest extends TestCase 'returnTypeAfterUselessNullCheck' => [ '<?php class One {} - + class B { /** * @return One|null */ public function barBar() { $baz = rand(0,100) > 50 ? new One() : null; - + // should have no effect if ($baz === null) { $baz = null; } - + return $baz; } }' @@ -195,7 +195,7 @@ class ReturnTypeTest extends TestCase case 1: case 2: return true; - + default: throw new \Exception("badness"); } @@ -210,10 +210,10 @@ class ReturnTypeTest extends TestCase return new static(); } } - + class B extends A { } - + $b = B::load();', 'assertions' => [ ['B' => '$b'] @@ -227,10 +227,10 @@ class ReturnTypeTest extends TestCase return [new static()]; } } - + class B extends A { } - + $bees = B::loadMultiple();', 'assertions' => [ ['array<int, B>' => '$bees'] @@ -263,14 +263,14 @@ class ReturnTypeTest extends TestCase return rand(0, 10) === 4 ? "blah" : null; } } - + class B extends A { /** @return string */ public function blah() { return "blah"; } } - + $blah = (new B())->blah();', 'assertions' => [ ['string' => '$blah'] @@ -282,13 +282,13 @@ class ReturnTypeTest extends TestCase /** @return string|null */ public function blah(); } - + class B implements A { public function blah() { return rand(0, 10) === 4 ? "blah" : null; } } - + $blah = (new B())->blah();', 'assertions' => [ ['string|null' => '$blah'] @@ -300,16 +300,16 @@ class ReturnTypeTest extends TestCase /** @return string|null */ abstract public function blah(); } - + class B extends A { } - + class C extends B { public function blah() { return rand(0, 10) === 4 ? "blahblah" : null; } } - + $blah = (new C())->blah();', 'assertions' => [ ['string|null' => '$blah'] @@ -319,11 +319,26 @@ class ReturnTypeTest extends TestCase '<?php class A {} class B extends A {} - + /** @return B|A */ function foo() { return rand(0, 1) ? new A : new B; }' + ], + 'issetOnPropertyReturnType' => [ + '<?php + class Foo { + /** @var Foo|null */ + protected $bar; + + /** + * @return ?Foo + */ + function getBarWithIsset() { + if (isset($this->bar)) return $this->bar; + return null; + } + }' ] ]; } @@ -397,7 +412,7 @@ class ReturnTypeTest extends TestCase 'wrongReturnTypeInNamespace1' => [ '<?php namespace bar; - + function fooFoo() : string { return 5; }', @@ -406,7 +421,7 @@ class ReturnTypeTest extends TestCase 'wrongReturnTypeInNamespace2' => [ '<?php namespace bar; - + function fooFoo() : string { return rand(0, 5) ? "hello" : null; }', @@ -443,7 +458,7 @@ class ReturnTypeTest extends TestCase function fooFoo() : A { return array_pop([]); } - + fooFoo()->bar();', 'error_message' => 'UndefinedClass' ]
Only make isset $this->property mixed if not already defined
vimeo_psalm
train
420c53d79b87cef314beb94cee060f4bdf7885d2
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ reqs = [str(ir.req) for ir in install_reqs] setup(name='iopipe', - version='0.9.0', + version='0.9.1', description='IOpipe agent for serverless Application Performance Monitoring', author='IOpipe', author_email='[email protected]',
Upping version to <I>
iopipe_iopipe-python
train
77092ff8738c290673561f7a7bfd1d383f2fef48
diff --git a/runtest.py b/runtest.py index <HASH>..<HASH> 100644 --- a/runtest.py +++ b/runtest.py @@ -2441,6 +2441,7 @@ class DictImporter: if is_pkg: module.__path__ = [fullname] + global do_exec # do_exec() is defined in the exec() call below do_exec(co, module.__dict__) return sys.modules[fullname] @@ -2465,4 +2466,5 @@ if __name__ == "__main__": sys.meta_path.append(importer) entry = "import py; raise SystemExit(py.test.cmdline.main())" + global do_exec # do_exec() is defined in the exec() call above do_exec(entry, locals())
global do_exec to help linters and humans understand the code Declaring __global do_exec__ will help both linters and humans understand that do_exec() is defined in the exec() call. Discovered via #<I>.
alecthomas_injector
train
7bc291ed13a59d5d849c2bc871a55a37df7bf3a6
diff --git a/lib/faalis/generators/concerns/fieldset.rb b/lib/faalis/generators/concerns/fieldset.rb index <HASH>..<HASH> 100644 --- a/lib/faalis/generators/concerns/fieldset.rb +++ b/lib/faalis/generators/concerns/fieldset.rb @@ -1,3 +1,5 @@ +require 'set' + module Faalis module Generators module Concerns @@ -12,21 +14,34 @@ module Faalis end def fieldset_less_fields - fields - fields_with_attribute("fieldset") + fields = Set.new(raw_fields_data) - Set.new(fields_with_attribute("fieldset")) + puts ">>>>>>>>>>>((((((((((((( #{fields.to_a}" + fields.to_a end - end - def fieldsets - fieldsets = {resource.underscore.pluralize.humanize => fieldset_less_fields} - fields = fields_with_attribute("fieldset") - fields.each do |f| - if fieldsets.include? f["fieldset"] - fieldsets[f["fieldset"]] << f - else - fieldsets[f["fieldset"]] = [f] + # TODO: fix this method to allow usage on tabbed views too + # Return fields categorized by fieldsets. Only for + # views without tabs + def fieldsets + fieldsets = {resource.underscore.pluralize.humanize => fieldset_less_fields} + fields = fields_with_attribute('fieldset') + fields.each do |f| + if fieldsets.include? f['fieldset'] + fieldsets[f['fieldset']] << f + else + fieldsets[f['fieldset']] = [f] + end end + + # Convert hashes to proper field structure to use in templates + fieldsets.each do |fieldset_name, fieldset_fields| + if fieldset_fields[0].is_a? Hash + fieldsets[fieldset_name] = fields(fieldset_fields) + end + end + + fieldsets end - fieldsets end end end diff --git a/lib/faalis/generators/concerns/resource_fields.rb b/lib/faalis/generators/concerns/resource_fields.rb index <HASH>..<HASH> 100644 --- a/lib/faalis/generators/concerns/resource_fields.rb +++ b/lib/faalis/generators/concerns/resource_fields.rb @@ -35,12 +35,12 @@ module Faalis # An array of fields like # [name, type] - def fields + def fields(fields_source = resource_data['fields']) fields = [] relations = ['belongs_to', 'has_many'] if fields? - resource_data['fields'].each do |field| + fields_source.each do |field| name = field['name'] type = field['type'] to = field['to'] @@ -121,9 +121,10 @@ module Faalis end def fields_with_attribute(attr) - raw_fields_data.select do |f| - f.include? attr ? true : false + field_list = raw_fields_data.select do |f| + f.include?(attr) ? true : false end + field_list end def no_filter? diff --git a/lib/faalis/version.rb b/lib/faalis/version.rb index <HASH>..<HASH> 100644 --- a/lib/faalis/version.rb +++ b/lib/faalis/version.rb @@ -18,5 +18,5 @@ # ----------------------------------------------------------------------------- module Faalis - VERSION = "0.24.4" + VERSION = "0.25.0" end
fieldset support added to dashboard scaffolds
Yellowen_Faalis
train
d5b8343fbddad6d9fb42c7f11551ffb9cf93d942
diff --git a/aegea/batch.py b/aegea/batch.py index <HASH>..<HASH> 100644 --- a/aegea/batch.py +++ b/aegea/batch.py @@ -25,40 +25,27 @@ from .util.aws.spot import SpotFleetBuilder bash_cmd_preamble = ["/bin/bash", "-c", 'for i in "$@"; do eval "$i"; done', __name__] ebs_vol_mgr_shellcode = """iid=$(http http://169.254.169.254/latest/dynamic/instance-identity/document) - aws configure set default.region $(echo "$iid" | jq -r .region) - az=$(echo "$iid" | jq -r .availabilityZone) - vid=$(aws ec2 create-volume --availability-zone $az --size %s --volume-type st1 | jq -r .VolumeId) - aws ec2 create-tags --resource $vid --tags Key=aegea_batch_job,Value=$AWS_BATCH_JOB_ID - trap "umount /mnt || umount -l /mnt; aws ec2 detach-volume --volume-id $vid; let delay=2; while ! aws ec2 describe-volumes --volume-ids $vid | jq -re .Volumes[0].Attachments==[]; do if [[ $delay -gt 30 ]]; then aws ec2 detach-volume --force --volume-id $vid; break; fi; sleep $delay; let delay=$delay*2; done; aws ec2 delete-volume --volume-id $vid" EXIT - while [[ $(aws ec2 describe-volumes --volume-ids $vid | jq -r .Volumes[0].State) != available ]]; do sleep 1; done - # let each process start trying from a different /dev/xvd{letter} let pid=$$ - # when N processes compete, for every success there can be N-1 failures; so the appropriate number of retries is O(N^2) # let us size this for 10 competitors # NOTE: the constants 9 and 10 in the $ind and $devnode calculation below are based on strlen("/dev/xvda") -let delay=2+$pid%5 -for try in {1..100}; do if [[ $try == 100 ]]; then echo "Unable to mount $vid on $devnode"; exit 1; fi; if [[ $try -gt 1 ]]; then sleep $delay; fi; devices=$(echo /dev/xvd* /dev/xvd{a..z} /dev/xvd{a..z} | sed 's= =\n=g' | sort | uniq -c | sort -n | grep ' 2 ' | awk '{print $2}'); let devcnt=${#devices}/10+1; let ind=$pid%devcnt; devnode=${devices:10*$ind:9}; aws ec2 attach-volume --instance-id $(echo "$iid" | jq -r .instanceId) --volume-id $vid --device $devnode || continue; break; done - +let delay=2+$pid%%5 +for try in {1..100}; do if [[ $try == 100 ]]; then echo "Unable to mount $vid on $devnode"; exit 1; fi; if [[ $try -gt 1 ]]; then sleep $delay; fi; devices=$(echo /dev/xvd* /dev/xvd{a..z} /dev/xvd{a..z} | sed 's= =\n=g' | sort | uniq -c | sort -n | grep ' 2 ' | awk '{print $2}'); let devcnt=${#devices}/10+1; let ind=$pid%%devcnt; devnode=${devices:10*$ind:9}; aws ec2 attach-volume --instance-id $(echo "$iid" | jq -r .instanceId) --volume-id $vid --device $devnode || continue; break; done # attach-volume is not instantaneous, and describe-volume requests are rate-limited -let delay=5+pid%11 +let delay=5+pid%%11 sleep $delay - let try=1 let max_tries=32 -while [[ $(aws ec2 describe-volumes --volume-ids $vid | jq -r .Volumes[0].State) != in-use ]]; do if [[ $try == $max_tries ]]; break; fi; let foo=1+$try%5; let delay=2**$foo+pid%11; sleep $delay; done - +while [[ $(aws ec2 describe-volumes --volume-ids $vid | jq -r .Volumes[0].State) != in-use ]]; do if [[ $try == $max_tries ]]; break; fi; let foo=1+$try%%5; let delay=2**$foo+pid%%11; sleep $delay; done while [[ ! -e $devnode ]]; do sleep 1; done - mkfs.ext4 $devnode - mount $devnode %s""" # noqa efs_vol_shellcode = """mkdir -p {efs_mountpoint}
fix batch: newlines and percent characters have special meaning (#<I>)
kislyuk_aegea
train
4a062f1e69f4e195962bd1c3b8a0e456dfb892aa
diff --git a/salt/states/gem.py b/salt/states/gem.py index <HASH>..<HASH> 100644 --- a/salt/states/gem.py +++ b/salt/states/gem.py @@ -43,7 +43,11 @@ def installed(name, ruby=None, runas=None, version=None, rdoc=False, ri=False): ret = {'name': name, 'result': None, 'comment': '', 'changes': {}} gems = __salt__['gem.list'](name, ruby, runas=runas) - if name in gems and version in gems[name]: + if name in gems and version and version in gems[name]: + ret['result'] = True + ret['comment'] = 'Gem is already installed.' + return ret + elif name in gems: ret['result'] = True ret['comment'] = 'Gem is already installed.' return ret diff --git a/tests/unit/states/gem_test.py b/tests/unit/states/gem_test.py index <HASH>..<HASH> 100644 --- a/tests/unit/states/gem_test.py +++ b/tests/unit/states/gem_test.py @@ -19,7 +19,7 @@ gem.__opts__ = {'test': False} class TestGemState(TestCase): def test_installed(self): - gems = ['foo', 'bar'] + gems = {'foo' : ['1.0'], 'bar' : ['2.0']} gem_list = MagicMock(return_value=gems) gem_install_succeeds = MagicMock(return_value=True) gem_install_fails = MagicMock(return_value=False) @@ -32,14 +32,14 @@ class TestGemState(TestCase): ret = gem.installed('quux') self.assertEqual(True, ret['result']) gem_install_succeeds.assert_called_once_with( - 'quux', None, runas=None) + 'quux', ruby=None, runas=None, version=None, rdoc=False, ri=False) with patch.dict(gem.__salt__, {'gem.install': gem_install_fails}): ret = gem.installed('quux') self.assertEqual(False, ret['result']) gem_install_fails.assert_called_once_with( - 'quux', None, runas=None) + 'quux', ruby=None, runas=None, version=None, rdoc=False, ri=False) def test_removed(self): gems = ['foo', 'bar']
Logic and test fixes for version, ri, rdoc support in gem state
saltstack_salt
train
36751ca48252002b4fe0f7307f6ee8a7c071ec7f
diff --git a/src/Friday/Foundation/Application.php b/src/Friday/Foundation/Application.php index <HASH>..<HASH> 100644 --- a/src/Friday/Foundation/Application.php +++ b/src/Friday/Foundation/Application.php @@ -82,7 +82,7 @@ class Application /** * Router instance. * - * @var \Friday\Http\FrontController + * @var Friday\Http\Router */ public $router;
$router (Friday\Http\FrontController) does not accept Friday\Http\Router
ironphp_ironphp
train
9d36fb5e6b6fe18fdf2b03600d4682d22067d9fb
diff --git a/test/base_test_case.py b/test/base_test_case.py index <HASH>..<HASH> 100644 --- a/test/base_test_case.py +++ b/test/base_test_case.py @@ -12,6 +12,7 @@ except ImportError: from mock import Mock, patch import consolemenu.console_menu +from consolemenu.screen import Screen class ThreadedReturnGetter(Thread): def __init__(self, function, args=None, kwargs=None): @@ -35,22 +36,9 @@ class ThreadedReturnGetter(Thread): class BaseTestCase(unittest.TestCase): def setUp(self): pass - ''' XXX - self.mock_curses = Mock(spec=curses) - self.mock_window = Mock(spec=['keypad', 'addstr', 'border', 'getch', 'refresh', 'clear', 'getmaxyx']) - self.mock_window.getch.return_value = ord('a') - self.mock_window.getmaxyx.return_value = (999999999, 999999999) - self.mock_curses.initscr.return_value = self.mock_window - self.mock_curses.wrapper.side_effect = lambda x: x(self.mock_window) - - self.patcher = patch(target='consolemenu.console_menu.curses', new=self.mock_curses) - self.patcher.start() - self.addCleanup(self.patcher.stop) - ''' - self.mock_screen = Mock(spec=['input']) - self.mock_screen.input.return_value = 'a' + self.mock_screen = Mock(spec=Screen()) + self.mock_screen.input.return_value = 4 self.patcher = patch(target='consolemenu.console_menu.Screen', new=self.mock_screen) self.patcher.start() self.addCleanup(self.patcher.stop) - diff --git a/test/test_command_item.py b/test/test_command_item.py index <HASH>..<HASH> 100644 --- a/test/test_command_item.py +++ b/test/test_command_item.py @@ -43,7 +43,7 @@ class TestCommandItem(TestExternalItem): if platform.system().lower() == "windows": return_command_item = CommandItem("return_command_item", "exit 1") else: - return_command_item = CommandItem("return_command_item", "return 1") + return_command_item = CommandItem("return_command_item", "exit 1") return_command_item.action() diff --git a/test/test_console_menu.py b/test/test_console_menu.py index <HASH>..<HASH> 100644 --- a/test/test_console_menu.py +++ b/test/test_console_menu.py @@ -99,3 +99,5 @@ class TestConsoleMenu(BaseTestCase): menu2.start() menu2.wait_for_start(10) self.assertIs(ConsoleMenu.currently_active_menu, menu2) + menu2.join(timeout=10) + menu1.join(timeout=10)
clean up unit tests. all tests succeeding now.
aegirhall_console-menu
train
0365e7324fcb61b6b05a77d24d3aad4efe242a45
diff --git a/src/Publisher/Builder/DocResolver.js b/src/Publisher/Builder/DocResolver.js index <HASH>..<HASH> 100644 --- a/src/Publisher/Builder/DocResolver.js +++ b/src/Publisher/Builder/DocResolver.js @@ -150,7 +150,7 @@ export default class DocResolver { let link = (str)=>{ if (!str) return str; - return str.replace(/\{@link ([\w\#_\-.:\~\/]+)}/g, (str, longname)=>{ + return str.replace(/\{@link ([\w\#_\-.:\~\/$]+)}/g, (str, longname)=>{ return this._builder._buildDocLinkHTML(longname, longname); }); }; diff --git a/src/Publisher/Builder/template/script/inner-link.js b/src/Publisher/Builder/template/script/inner-link.js index <HASH>..<HASH> 100644 --- a/src/Publisher/Builder/template/script/inner-link.js +++ b/src/Publisher/Builder/template/script/inner-link.js @@ -10,7 +10,7 @@ if (el) el.classList.remove('inner-link-active'); // ``[ ] . ' " @`` are not valid in DOM id. so must escape these. - var id = location.hash.replace(/([\[\].'"@])/g, '\\$1'); + var id = location.hash.replace(/([\[\].'"@$])/g, '\\$1'); var el = document.querySelector(id); if (el) el.classList.add('inner-link-active'); } diff --git a/src/Publisher/Builder/template/script/pretty-print.js b/src/Publisher/Builder/template/script/pretty-print.js index <HASH>..<HASH> 100644 --- a/src/Publisher/Builder/template/script/pretty-print.js +++ b/src/Publisher/Builder/template/script/pretty-print.js @@ -17,7 +17,9 @@ } if (location.hash) { - var line = document.querySelector(location.hash); + // ``[ ] . ' " @`` are not valid in DOM id. so must escape these. + var id = location.hash.replace(/([\[\].'"@$])/g, '\\$1'); + var line = document.querySelector(id); if (line) line.classList.add('active'); } })();
fix: link does not work when identifier has `$` <URL>
esdoc_esdoc
train
01302e6d2b2629cca4ad9327abe0f7a317f8399f
diff --git a/lib/fetch/constants.js b/lib/fetch/constants.js index <HASH>..<HASH> 100644 --- a/lib/fetch/constants.js +++ b/lib/fetch/constants.js @@ -58,6 +58,7 @@ const requestCache = [ 'only-if-cached' ] +// https://fetch.spec.whatwg.org/#forbidden-response-header-name const forbiddenResponseHeaderNames = ['set-cookie', 'set-cookie2'] const requestBodyHeader = [ diff --git a/lib/fetch/response.js b/lib/fetch/response.js index <HASH>..<HASH> 100644 --- a/lib/fetch/response.js +++ b/lib/fetch/response.js @@ -8,7 +8,7 @@ const { responseURL, isValidReasonPhrase, toUSVString } = require('./util') const { redirectStatus, nullBodyStatus, - forbiddenHeaderNames + forbiddenResponseHeaderNames } = require('./constants') const { kState, kHeaders, kGuard, kRealm } = require('./symbols') const { kHeadersList } = require('../core/symbols') @@ -366,6 +366,7 @@ function makeNetworkError (reason) { }) } +// https://fetch.spec.whatwg.org/#concept-filtered-response function filterResponse (response, type) { // Set response to the following filtered response with response as its // internal response, depending on request’s response tainting: @@ -376,7 +377,7 @@ function filterResponse (response, type) { const headers = [] for (let n = 0; n < response.headersList.length; n += 2) { - if (!forbiddenHeaderNames.includes(response.headersList[n])) { + if (!forbiddenResponseHeaderNames.includes(response.headersList[n])) { headers.push(response.headersList[n + 0], response.headersList[n + 1]) } }
fetch: fix small spec inconsistency (#<I>) This condition is not *yet* possible to meet so I couldn't add a test (sorry!). The responseTainting is always set here or a network error is returned (.status = 0 is a network error which makes the check never pass): <URL>
mcollina_undici
train
6ebfa13bf7ba1d0dd8fc1ecf7a97b6958a095605
diff --git a/pinject/bindings.py b/pinject/bindings.py index <HASH>..<HASH> 100644 --- a/pinject/bindings.py +++ b/pinject/bindings.py @@ -150,7 +150,7 @@ def default_get_arg_names_from_class_name(class_name): if rest.startswith('_'): rest = rest[1:] while True: - m = re.match(r'([A-Z][a-z]+)(.*)', rest) + m = re.match(r'([A-Z][a-z]*)(.*)', rest) if m is None: break parts.append(m.group(1))
Allow class names with single letter camel case words
google_pinject
train
6a4ef446157f6c685d6de603b4a8d9b5143f8d94
diff --git a/lib/specinfra/command/base/routing_table.rb b/lib/specinfra/command/base/routing_table.rb index <HASH>..<HASH> 100644 --- a/lib/specinfra/command/base/routing_table.rb +++ b/lib/specinfra/command/base/routing_table.rb @@ -1,7 +1,10 @@ class Specinfra::Command::Base::RoutingTable < Specinfra::Command::Base class << self def check_has_entry(destination) - "ip route show #{destination} | grep #{destination}" + if destination == "default" + destination = "0.0.0.0/0" + end + "ip route show #{destination}" end alias :get_entry :check_has_entry diff --git a/lib/specinfra/processor.rb b/lib/specinfra/processor.rb index <HASH>..<HASH> 100644 --- a/lib/specinfra/processor.rb +++ b/lib/specinfra/processor.rb @@ -175,7 +175,7 @@ module Specinfra :interface => expected_attr[:interface] ? $3 : nil } else - matches = ret.stdout.scan(/^(\S+)(?: via (\S+))? dev (\S+).+\n|^(\S+).+\n|\s+nexthop via (\S+)\s+dev (\S+).+/) + matches = ret.stdout.scan(/^(\S+)(?: via (\S+))? dev (\S+).+\n|^(\S+).*\n|\s+nexthop via (\S+)\s+dev (\S+).+/) if matches.length > 1 # ECMP route destination = nil @@ -183,9 +183,9 @@ module Specinfra if groups[3] destination = groups[3] next - else - next unless expected_attr[:interface] == groups[5] end + next if expected_attr[:gateway] && expected_attr[:gateway] != groups[4] + next if expected_attr[:interface] && expected_attr[:interface] != groups[5] actual_attr = { :destination => destination,
Fix ECMP route in routing_table
mizzy_specinfra
train
ec83e300d19b54e43bac916b8b705c7f92fd426e
diff --git a/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/SQLExecuteBackendHandler.java b/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/SQLExecuteBackendHandler.java index <HASH>..<HASH> 100644 --- a/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/SQLExecuteBackendHandler.java +++ b/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/SQLExecuteBackendHandler.java @@ -104,6 +104,7 @@ public final class SQLExecuteBackendHandler implements BackendHandler { private CommandResponsePackets execute(final SQLStatement sqlStatement, final SQLExecutionUnit sqlExecutionUnit) { switch (sqlStatement.getType()) { case DQL: + case DAL: return executeQuery(ShardingRuleRegistry.getInstance().getDataSourceMap().get(sqlExecutionUnit.getDataSource()), sqlExecutionUnit.getSqlUnit().getSql()); case DML: case DDL: @@ -227,7 +228,7 @@ public final class SQLExecuteBackendHandler implements BackendHandler { if (SQLType.DML == sqlStatement.getType()) { return mergeDML(headPackets); } - if (SQLType.DQL == sqlStatement.getType()) { + if (SQLType.DQL == sqlStatement.getType() || SQLType.DAL == sqlStatement.getType()) { return mergeDQL(sqlStatement, packets); } return packets.get(0); diff --git a/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/StatementExecuteBackendHandler.java b/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/StatementExecuteBackendHandler.java index <HASH>..<HASH> 100644 --- a/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/StatementExecuteBackendHandler.java +++ b/sharding-proxy/src/main/java/io/shardingjdbc/proxy/backend/common/StatementExecuteBackendHandler.java @@ -111,6 +111,7 @@ public final class StatementExecuteBackendHandler implements BackendHandler { private CommandResponsePackets execute(final SQLStatement sqlStatement, final SQLExecutionUnit sqlExecutionUnit) { switch (sqlStatement.getType()) { case DQL: + case DAL: return executeQuery(ShardingRuleRegistry.getInstance().getDataSourceMap().get(sqlExecutionUnit.getDataSource()), sqlExecutionUnit.getSqlUnit().getSql()); case DML: case DDL: @@ -265,7 +266,7 @@ public final class StatementExecuteBackendHandler implements BackendHandler { if (SQLType.DML == sqlStatement.getType()) { return mergeDML(headPackets); } - if (SQLType.DQL == sqlStatement.getType()) { + if (SQLType.DQL == sqlStatement.getType() || SQLType.DAL == sqlStatement.getType()) { return mergeDQL(sqlStatement, packets); } return packets.get(0);
for #<I> fix bug that DAL didn't merge
apache_incubator-shardingsphere
train
f1e98333106492b489fc0cd25d485b4dc569cff9
diff --git a/gateway/gateway.go b/gateway/gateway.go index <HASH>..<HASH> 100644 --- a/gateway/gateway.go +++ b/gateway/gateway.go @@ -152,7 +152,10 @@ func (gw *Gateway) handleMessage(msg config.Message, dest *bridge.Bridge) []*BrM // only slack now, check will have to be done in the different bridges. // we need to check if we can't use fallback or text in other bridges if msg.Extra != nil { - if dest.Protocol != "slack" { + if dest.Protocol != "discord" && + dest.Protocol != "slack" && + dest.Protocol != "mattermost" && + dest.Protocol != "telegram" { if msg.Text == "" { return brMsgIDs } @@ -210,8 +213,8 @@ func (gw *Gateway) ignoreMessage(msg *config.Message) bool { return true } if msg.Text == "" { - // we have an attachment - if msg.Extra != nil && msg.Extra["attachments"] != nil { + // we have an attachment or actual bytes + if msg.Extra != nil && (msg.Extra["attachments"] != nil || len(msg.Extra["file"]) > 0) { return false } log.Debugf("ignoring empty message %#v from %s", msg, msg.Account)
Do not ignore empty messages with files for bridges that support it
42wim_matterbridge
train
7077ff77bcfd3d1476f754684687d7e76f175573
diff --git a/app/models/spree/adyen_common.rb b/app/models/spree/adyen_common.rb index <HASH>..<HASH> 100644 --- a/app/models/spree/adyen_common.rb +++ b/app/models/spree/adyen_common.rb @@ -218,14 +218,17 @@ module Spree response = provider.authorise_payment payment.order.number, amount, shopper, card, options if response.success? - last_digits = response.additional_data["cardSummary"] - if last_digits.blank? && payment_profiles_supported? - note = "Payment was authorized but could not fetch last digits. - Please request last digits to be sent back to support payment profiles" - raise Adyen::MissingCardSummaryError, note + if payment.source.last_digits.blank? + last_digits = response.additional_data["cardSummary"] + if last_digits.blank? && payment_profiles_supported? + note = "Payment was authorized but could not fetch last digits. + Please request last digits to be sent back to support payment profiles" + raise Adyen::MissingCardSummaryError, note + end + + payment.source.last_digits = last_digits end - payment.source.last_digits = last_digits fetch_and_update_contract payment.source, shopper[:reference] # Avoid this payment from being processed and so authorised again
Only set source last digits when they're blank
StemboltHQ_solidus-adyen
train
264843740b1d903af5a63e0beeb2d9ea9965d7ac
diff --git a/annis-gui/src/main/java/annis/gui/resultview/SingleResultPanel.java b/annis-gui/src/main/java/annis/gui/resultview/SingleResultPanel.java index <HASH>..<HASH> 100644 --- a/annis-gui/src/main/java/annis/gui/resultview/SingleResultPanel.java +++ b/annis-gui/src/main/java/annis/gui/resultview/SingleResultPanel.java @@ -265,8 +265,9 @@ public class SingleResultPanel extends VerticalLayout implements longValue() - 1, MatchedNodeColors.values().length - 1)); - RelannisNodeFeature feat = RelannisNodeFeature.extract(markedEntry.getKey()); - + RelannisNodeFeature feat = RelannisNodeFeature.extract(markedEntry. + getKey()); + if (feat != null) { markedCoveredMap.put("" + feat.getInternalID(), @@ -315,8 +316,9 @@ public class SingleResultPanel extends VerticalLayout implements for (SNode segNode : segNodes) { - RelannisNodeFeature featSegNode = (RelannisNodeFeature) segNode.getSFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); - + RelannisNodeFeature featSegNode = (RelannisNodeFeature) segNode. + getSFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); + if (segNode != null && !covered.containsKey(segNode)) { long leftTok = featSegNode.getLeftToken(); @@ -325,7 +327,8 @@ public class SingleResultPanel extends VerticalLayout implements // check for each covered token if this segment is covering it for (Map.Entry<SToken, Long> e : coveredToken.entrySet()) { - RelannisNodeFeature featTok = (RelannisNodeFeature) e.getKey().getSFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); + RelannisNodeFeature featTok = (RelannisNodeFeature) e.getKey(). + getSFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); long entryTokenIndex = featTok.getTokenIndex(); if (entryTokenIndex <= rightTok && entryTokenIndex >= leftTok) { @@ -392,9 +395,11 @@ public class SingleResultPanel extends VerticalLayout implements @Override public int compare(SNode o1, SNode o2) { - RelannisNodeFeature feat1 = (RelannisNodeFeature) o1.getSFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); - RelannisNodeFeature feat2 = (RelannisNodeFeature) o2.getSFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); - + RelannisNodeFeature feat1 = (RelannisNodeFeature) o1.getSFeature( + ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); + RelannisNodeFeature feat2 = (RelannisNodeFeature) o2.getSFeature( + ANNIS_NS, FEAT_RELANNIS_NODE).getValue(); + long leftTokIdxO1 = feat1.getLeftToken(); long rightTokIdxO1 = feat1.getRightToken(); long leftTokIdxO2 = feat2.getLeftToken(); @@ -402,7 +407,18 @@ public class SingleResultPanel extends VerticalLayout implements int intervallO1 = (int) Math.abs(leftTokIdxO1 - rightTokIdxO1); int intervallO2 = (int) Math.abs(leftTokIdxO2 - rightTokIdxO2); - return intervallO1 - intervallO2; + + if (intervallO1 - intervallO2 != 0) + { + return intervallO1 - intervallO2; + } else if (feat1.getLeftToken() - feat2.getRightToken() != 0) + { + return (int) (feat1.getLeftToken() - feat2.getRightToken()); + } else if (feat1.getRightToken() - feat2.getRightToken()!= 0) + { + return (int)(feat1.getRightToken() - feat2.getRightToken()); + } else + return (int)(feat1.getInternalID() - feat2.getInternalID()); } });
The seems really fixing the issue highlighting issue
korpling_ANNIS
train
b07273fd4c52eec49577802e2f74c97f5c0c31c0
diff --git a/pyqode/core/modes/case_converter.py b/pyqode/core/modes/case_converter.py index <HASH>..<HASH> 100644 --- a/pyqode/core/modes/case_converter.py +++ b/pyqode/core/modes/case_converter.py @@ -1,6 +1,7 @@ """ Contains a case converter mode. """ +import sys from pyqode.qt import QtCore, QtGui from pyqode.core.mode import Mode @@ -20,16 +21,15 @@ class CaseConverterMode(Mode): Mode.__init__(self) self._actions_created = False - def convert(self, function): + def toLower(self): tc = self.editor.textCursor() - tc.insertText(function(tc.selectedText())) + tc.insertText(tc.selectedText().lower()) self.editor.setTextCursor(tc) - def toLower(self): - self.convert(str.lower) - def toUpper(self): - self.convert(str.upper) + tc = self.editor.textCursor() + tc.insertText(tc.selectedText().upper()) + self.editor.setTextCursor(tc) def _create_actions(self): self.aToLower = QtGui.QAction(self.editor)
#<I> Fix for python2 Cannot user str in python 2 while working on unicode. It is better to use the return type's lower/uper function directly
pyQode_pyqode.core
train
edb3f1aae07f0c79c51b23ec45c2e32e813aa0fc
diff --git a/openquake/engine/engine.py b/openquake/engine/engine.py index <HASH>..<HASH> 100644 --- a/openquake/engine/engine.py +++ b/openquake/engine/engine.py @@ -300,8 +300,6 @@ def check_obsolete_version(calculation_mode='WebUI'): headers = {'User-Agent': 'OpenQuake Engine %s;%s;%s' % (__version__, calculation_mode, platform.platform())} try: - # we are not using requests.get here because it causes a segfault - # on macOS: https://github.com/gem/oq-engine/issues/3161 req = Request(OQ_API + '/engine/latest', headers=headers) # NB: a timeout < 1 does not work data = urlopen(req, timeout=1).read() # bytes
Updated the changelog [skip CI]
gem_oq-engine
train
194c58694ff148d0d17217790498a7319978f20d
diff --git a/acorn-es7-plugin.js b/acorn-es7-plugin.js index <HASH>..<HASH> 100644 --- a/acorn-es7-plugin.js +++ b/acorn-es7-plugin.js @@ -171,6 +171,8 @@ function asyncAwaitPlugin (parser,options){ // Because we don't know if the top level parser supprts preserveParens, we have to re-parse // without it set rhs = subParse(this,st.start,parseHooks).parseExpression() ; + if (rhs.type==='SequenceExpression') + rhs = rhs.expressions[0] ; rhs.async = true ; rhs.start = start; rhs.loc && (rhs.loc.start = startLoc);
Fix case where async function is followed by a comma
MatAtBread_acorn-es7-plugin
train
8516424ac9b3293817202a9bdfb8634dd11e2e08
diff --git a/lib/netsuite/records/inventory_item.rb b/lib/netsuite/records/inventory_item.rb index <HASH>..<HASH> 100644 --- a/lib/netsuite/records/inventory_item.rb +++ b/lib/netsuite/records/inventory_item.rb @@ -7,7 +7,20 @@ module NetSuite include Support::Actions include Namespaces::ListAcct - actions :get, :add, :delete, :search + # NOTE NetSuite doesn't have a InventoryItemSearch object. So we use + # the ItemSearch instead. In order to actually get Inventory Items only + # you will still have to specify the type: + # + # basic: [ + # { + # field: 'type', + # operator: 'anyOf', + # type: 'SearchEnumMultiSelectField', + # value: ['_inventoryItem'] + # } + # ] + # + actions :get, :add, :delete, :search, :update fields :auto_lead_time, :auto_preferred_stock_level, :auto_reorder_point, :available_to_partners, :average_cost, :copy_description, :cost, :cost_estimate, :cost_estimate_type, :cost_estimate_units, :cost_units, :costing_method,
Add update to action to InventoryItem plus NOTE on search
NetSweet_netsuite
train
810e70d234b37885bdca3a7a3536c6d576f477a6
diff --git a/tests/Composer/Test/Repository/PathRepositoryTest.php b/tests/Composer/Test/Repository/PathRepositoryTest.php index <HASH>..<HASH> 100644 --- a/tests/Composer/Test/Repository/PathRepositoryTest.php +++ b/tests/Composer/Test/Repository/PathRepositoryTest.php @@ -96,8 +96,12 @@ class PathRepositoryTest extends TestCase $loader = new ArrayLoader(new VersionParser()); $versionGuesser = null; - $repositoryUrl = implode(DIRECTORY_SEPARATOR, array(__DIR__, 'Fixtures', 'path', 'with-version')); - $relativeUrl = ltrim(substr($repositoryUrl, strlen(getcwd())), DIRECTORY_SEPARATOR); + // realpath() does not fully expand the paths + // PHP Bug https://bugs.php.net/bug.php?id=72642 + $repositoryUrl = implode(DIRECTORY_SEPARATOR, array(realpath(realpath(__DIR__)), 'Fixtures', 'path', 'with-version')); + // getcwd() not necessarily match __DIR__ + // PHP Bug https://bugs.php.net/bug.php?id=73797 + $relativeUrl = ltrim(substr($repositoryUrl, strlen(realpath(realpath(getcwd())))), DIRECTORY_SEPARATOR); $repository = new PathRepository(array('url' => $relativeUrl), $ioInterface, $config, $loader); $packages = $repository->getPackages();
Fix __DIR__/getcwd() inconsistency.
composer_composer
train
bc0c5bb97641fde38d9acdbfc2dd58a6ece5b586
diff --git a/conf.js b/conf.js index <HASH>..<HASH> 100644 --- a/conf.js +++ b/conf.js @@ -118,7 +118,7 @@ if (typeof process === 'object' && typeof process.versions === 'object' && typeo // after merging the custom confs, set defaults if they are still not set if (exports.storage === 'mysql'){ - exports.database.max_connections = exports.database.max_connections || 30; + exports.database.max_connections = exports.database.max_connections || 1; exports.database.host = exports.database.host || 'localhost'; exports.database.name = exports.database.name || 'byteball'; exports.database.user = exports.database.user || 'byteball';
max_connections 1 for mysql
byteball_ocore
train
249001241c26d3ee0ddace9ceb9f9a1a889874a8
diff --git a/src/ol/interaction/DragBox.js b/src/ol/interaction/DragBox.js index <HASH>..<HASH> 100644 --- a/src/ol/interaction/DragBox.js +++ b/src/ol/interaction/DragBox.js @@ -8,6 +8,34 @@ import {always, mouseOnly, mouseActionButton} from '../events/condition.js'; import PointerInteraction from '../interaction/Pointer.js'; import RenderBox from '../render/Box.js'; + +/** + * @enum {string} + */ +const DragBoxEventType = { + /** + * Triggered upon drag box start. + * @event ol.interaction.DragBox.Event#boxstart + * @api + */ + BOXSTART: 'boxstart', + + /** + * Triggered on drag when box is active. + * @event ol.interaction.DragBox.Event#boxdrag + * @api + */ + BOXDRAG: 'boxdrag', + + /** + * Triggered upon drag box end. + * @event ol.interaction.DragBox.Event#boxend + * @api + */ + BOXEND: 'boxend' +}; + + /** * @classdesc * Allows the user to draw a vector box by clicking and dragging on the map, @@ -28,9 +56,9 @@ import RenderBox from '../render/Box.js'; const DragBox = function(opt_options) { PointerInteraction.call(this, { - handleDownEvent: DragBox.handleDownEvent_, - handleDragEvent: DragBox.handleDragEvent_, - handleUpEvent: DragBox.handleUpEvent_ + handleDownEvent: handleDownEvent, + handleDragEvent: handleDragEvent, + handleUpEvent: handleUpEvent }); const options = opt_options ? opt_options : {}; @@ -90,18 +118,17 @@ DragBox.defaultBoxEndCondition = function(mapBrowserEvent, startPixel, endPixel) /** * @param {ol.MapBrowserPointerEvent} mapBrowserEvent Event. * @this {ol.interaction.DragBox} - * @private */ -DragBox.handleDragEvent_ = function(mapBrowserEvent) { +function handleDragEvent(mapBrowserEvent) { if (!mouseOnly(mapBrowserEvent)) { return; } this.box_.setPixels(this.startPixel_, mapBrowserEvent.pixel); - this.dispatchEvent(new DragBox.Event(DragBox.EventType_.BOXDRAG, + this.dispatchEvent(new DragBox.Event(DragBoxEventType.BOXDRAG, mapBrowserEvent.coordinate, mapBrowserEvent)); -}; +} /** @@ -127,9 +154,8 @@ DragBox.prototype.onBoxEnd = nullFunction; * @param {ol.MapBrowserPointerEvent} mapBrowserEvent Event. * @return {boolean} Stop drag sequence? * @this {ol.interaction.DragBox} - * @private */ -DragBox.handleUpEvent_ = function(mapBrowserEvent) { +function handleUpEvent(mapBrowserEvent) { if (!mouseOnly(mapBrowserEvent)) { return true; } @@ -139,20 +165,19 @@ DragBox.handleUpEvent_ = function(mapBrowserEvent) { if (this.boxEndCondition_(mapBrowserEvent, this.startPixel_, mapBrowserEvent.pixel)) { this.onBoxEnd(mapBrowserEvent); - this.dispatchEvent(new DragBox.Event(DragBox.EventType_.BOXEND, + this.dispatchEvent(new DragBox.Event(DragBoxEventType.BOXEND, mapBrowserEvent.coordinate, mapBrowserEvent)); } return false; -}; +} /** * @param {ol.MapBrowserPointerEvent} mapBrowserEvent Event. * @return {boolean} Start drag sequence? * @this {ol.interaction.DragBox} - * @private */ -DragBox.handleDownEvent_ = function(mapBrowserEvent) { +function handleDownEvent(mapBrowserEvent) { if (!mouseOnly(mapBrowserEvent)) { return false; } @@ -162,41 +187,13 @@ DragBox.handleDownEvent_ = function(mapBrowserEvent) { this.startPixel_ = mapBrowserEvent.pixel; this.box_.setMap(mapBrowserEvent.map); this.box_.setPixels(this.startPixel_, this.startPixel_); - this.dispatchEvent(new DragBox.Event(DragBox.EventType_.BOXSTART, + this.dispatchEvent(new DragBox.Event(DragBoxEventType.BOXSTART, mapBrowserEvent.coordinate, mapBrowserEvent)); return true; } else { return false; } -}; - - -/** - * @enum {string} - * @private - */ -DragBox.EventType_ = { - /** - * Triggered upon drag box start. - * @event ol.interaction.DragBox.Event#boxstart - * @api - */ - BOXSTART: 'boxstart', - - /** - * Triggered on drag when box is active. - * @event ol.interaction.DragBox.Event#boxdrag - * @api - */ - BOXDRAG: 'boxdrag', - - /** - * Triggered upon drag box end. - * @event ol.interaction.DragBox.Event#boxend - * @api - */ - BOXEND: 'boxend' -}; +} /**
Remove private static members from DragBox interaction
openlayers_openlayers
train
b318404e539bf82197211ef931a78463944dcf6c
diff --git a/src/java/org/apache/cassandra/cql/QueryProcessor.java b/src/java/org/apache/cassandra/cql/QueryProcessor.java index <HASH>..<HASH> 100644 --- a/src/java/org/apache/cassandra/cql/QueryProcessor.java +++ b/src/java/org/apache/cassandra/cql/QueryProcessor.java @@ -750,6 +750,7 @@ public class QueryProcessor case CREATE_KEYSPACE: CreateKeyspaceStatement create = (CreateKeyspaceStatement)statement.statement; create.validate(); + ThriftValidation.validateKeyspaceNotSystem(create.getName()); clientState.hasKeyspaceSchemaAccess(Permission.WRITE); validateSchemaAgreement(); @@ -893,6 +894,7 @@ public class QueryProcessor case DROP_KEYSPACE: String deleteKeyspace = (String)statement.statement; + ThriftValidation.validateKeyspaceNotSystem(deleteKeyspace); clientState.hasKeyspaceSchemaAccess(Permission.WRITE); validateSchemaAgreement(); diff --git a/src/java/org/apache/cassandra/service/ClientState.java b/src/java/org/apache/cassandra/service/ClientState.java index <HASH>..<HASH> 100644 --- a/src/java/org/apache/cassandra/service/ClientState.java +++ b/src/java/org/apache/cassandra/service/ClientState.java @@ -122,10 +122,6 @@ public class ClientState public void hasKeyspaceSchemaAccess(Permission perm) throws InvalidRequestException { validateLogin(); - - // hardcode disallowing messing with system keyspace - if (keyspace != null && keyspace.equalsIgnoreCase(Table.SYSTEM_TABLE) && perm == Permission.WRITE) - throw new InvalidRequestException("system keyspace is not user-modifiable"); resourceClear(); Set<Permission> perms = DatabaseDescriptor.getAuthority().authorize(user, resource); diff --git a/src/java/org/apache/cassandra/thrift/CassandraServer.java b/src/java/org/apache/cassandra/thrift/CassandraServer.java index <HASH>..<HASH> 100644 --- a/src/java/org/apache/cassandra/thrift/CassandraServer.java +++ b/src/java/org/apache/cassandra/thrift/CassandraServer.java @@ -925,6 +925,7 @@ public class CassandraServer implements Cassandra.Iface throws InvalidRequestException, SchemaDisagreementException, TException { logger.debug("add_keyspace"); + ThriftValidation.validateKeyspaceNotSystem(ks_def.name); state().hasKeyspaceSchemaAccess(Permission.WRITE); validateSchemaAgreement(); ThriftValidation.validateKeyspaceNotYetExisting(ks_def.name); @@ -971,6 +972,7 @@ public class CassandraServer implements Cassandra.Iface throws InvalidRequestException, SchemaDisagreementException, TException { logger.debug("drop_keyspace"); + ThriftValidation.validateKeyspaceNotSystem(keyspace); state().hasKeyspaceSchemaAccess(Permission.WRITE); validateSchemaAgreement(); @@ -999,6 +1001,7 @@ public class CassandraServer implements Cassandra.Iface throws InvalidRequestException, SchemaDisagreementException, TException { logger.debug("update_keyspace"); + ThriftValidation.validateKeyspaceNotSystem(ks_def.name); state().hasKeyspaceSchemaAccess(Permission.WRITE); ThriftValidation.validateTable(ks_def.name); if (ks_def.getCf_defs() != null && ks_def.getCf_defs().size() > 0) diff --git a/src/java/org/apache/cassandra/thrift/ThriftValidation.java b/src/java/org/apache/cassandra/thrift/ThriftValidation.java index <HASH>..<HASH> 100644 --- a/src/java/org/apache/cassandra/thrift/ThriftValidation.java +++ b/src/java/org/apache/cassandra/thrift/ThriftValidation.java @@ -737,4 +737,10 @@ public class ThriftValidation ksName)); } } + + public static void validateKeyspaceNotSystem(String modifiedKeyspace) throws InvalidRequestException + { + if (modifiedKeyspace.equalsIgnoreCase(Table.SYSTEM_TABLE)) + throw new InvalidRequestException("system keyspace is not user-modifiable"); + } }
fixes create/update/drop other keyspaces when system keyspace is used, introduced by CASSANDRA-<I>
Stratio_stratio-cassandra
train
52295281751bb98bed1b1eca3f321ac6ddcf3ce1
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryStreamReader.java index <HASH>..<HASH> 100644 --- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryStreamReader.java +++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryStreamReader.java @@ -184,7 +184,7 @@ public class SliceDictionaryStreamReader else { int[] ids = Arrays.copyOfRange(dataVector, 0, nextBatchSize); boolean[] isNullVector = Arrays.copyOfRange(this.isNullVector, 0, nextBatchSize); - Slice[] values = Arrays.copyOf(dictionary, dictionary.length); + Slice[] values = dictionary; sliceVector.setDictionary(values, ids, isNullVector); } @@ -197,11 +197,9 @@ public class SliceDictionaryStreamReader { // read the dictionary if (!dictionaryOpen && dictionarySize > 0) { - // resize the dictionary array if necessary - if (dictionary.length < dictionarySize) { - dictionary = new Slice[dictionarySize]; - dictionaryLength = new int[dictionarySize]; - } + // create a new dictionary array + dictionary = new Slice[dictionarySize]; + dictionaryLength = new int[dictionarySize]; // read the lengths LongStream lengthStream = dictionaryLengthStreamSource.openStream();
Create a new dictionary for each read We try to reuse the already allocated dictionary (if possible) for multiple reads. This forces us to make a copy of the dictionary for every batch causing excessive object allocation. Create a new dictionary for every read, to avoid copying the dictionary each time.
prestodb_presto
train
b364c8da6c852a8a1bb07ab589869dbae9fdea63
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -154,6 +154,7 @@ Peer.prototype.signal = function (data) { self._debug('signal()') if (data.sdp) { self._pc.setRemoteDescription(new (self._wrtc.RTCSessionDescription)(data), function () { + if (self.destroyed) return if (self._pc.remoteDescription.type === 'offer') self._createAnswer() }, self._onError.bind(self)) }
handle destroy between async calls
feross_simple-peer
train
b3bcce4404064bb4ba478b582bb18abca016c905
diff --git a/python/turbodbc/cursor.py b/python/turbodbc/cursor.py index <HASH>..<HASH> 100644 --- a/python/turbodbc/cursor.py +++ b/python/turbodbc/cursor.py @@ -83,6 +83,7 @@ class Cursor(object): @translate_exceptions def executemany(self, sql, parameters=None): """Execute an SQL query""" + self.rowcount = -1 self._assert_valid() self.impl.prepare(sql) diff --git a/python/turbodbc_test/test_cursor_basics.py b/python/turbodbc_test/test_cursor_basics.py index <HASH>..<HASH> 100644 --- a/python/turbodbc_test/test_cursor_basics.py +++ b/python/turbodbc_test/test_cursor_basics.py @@ -3,7 +3,7 @@ import six from turbodbc import connect, InterfaceError, Error -from helpers import for_one_database, for_each_database, get_credentials, open_cursor +from helpers import for_one_database, get_credentials, open_cursor from query_fixture import query_fixture @@ -84,7 +84,7 @@ def test_setoutputsize_does_not_raise(dsn, configuration): cursor.setoutputsize(1000) # without column -@for_each_database +@for_one_database def test_rowcount_is_reset_after_execute_raises(dsn, configuration): with open_cursor(configuration) as cursor: with query_fixture(cursor, configuration, 'INSERT INTEGER') as table_name: @@ -93,3 +93,14 @@ def test_rowcount_is_reset_after_execute_raises(dsn, configuration): with pytest.raises(Error): cursor.execute("this is not even a valid SQL statement") assert cursor.rowcount == -1 + + +@for_one_database +def test_rowcount_is_reset_after_executemany_raises(dsn, configuration): + with open_cursor(configuration) as cursor: + with query_fixture(cursor, configuration, 'INSERT INTEGER') as table_name: + cursor.execute("INSERT INTO {} VALUES (?)".format(table_name), [42]) + assert cursor.rowcount == 1 + with pytest.raises(Error): + cursor.executemany("this is not even a valid SQL statement") + assert cursor.rowcount == -1
Fix bug with forgetting to reset rowcount to -1 if executemany() fails. Fixes #<I>
blue-yonder_turbodbc
train
d5812e9ee6c2c1475f2524607af6b3d90a74676e
diff --git a/spec/chewy/fields/base_spec.rb b/spec/chewy/fields/base_spec.rb index <HASH>..<HASH> 100644 --- a/spec/chewy/fields/base_spec.rb +++ b/spec/chewy/fields/base_spec.rb @@ -317,7 +317,7 @@ describe Chewy::Fields::Base do stub_model(:country) City.belongs_to :country - Country.has_many :cities + Country.has_many :cities, -> { order(:id) } stub_index(:countries) do define_type Country do
fix cities ordering to prevent random test failing
toptal_chewy
train
de07f60e8c714de103815c93e0ec415fa816c55a
diff --git a/client/api/notebook.py b/client/api/notebook.py index <HASH>..<HASH> 100644 --- a/client/api/notebook.py +++ b/client/api/notebook.py @@ -1,6 +1,6 @@ import logging -import os.path import json +import os.path import time from client.api.assignment import load_assignment @@ -9,7 +9,9 @@ from client.utils import auth as ok_auth log = logging.getLogger(__name__) class Notebook: - def __init__(self, filepath=None, cmd_args=None): + def __init__(self, filepath=None, cmd_args=None, debug=False): + ok_logger = logging.getLogger('client') # Get top-level ok logger + ok_logger.setLevel(logging.DEBUG if debug else logging.ERROR) self.assignment = load_assignment(filepath, cmd_args) def run(self, protocol, messages, **kwargs):
Increase notebook log level to ERROR by default (#<I>)
okpy_ok-client
train
248e818162163d69739e466289e0a15c93512db7
diff --git a/src/flux/EditorStore.js b/src/flux/EditorStore.js index <HASH>..<HASH> 100644 --- a/src/flux/EditorStore.js +++ b/src/flux/EditorStore.js @@ -513,6 +513,10 @@ class EditorStore { } } + // normalize line endings (CRLF -> LF, CR -> LF) + value = value.replace(/\r\n/g, '\n') + value = value.replace(/\r/g, '\n') + this.replica.insertCharsAt(position, value, attributes) let relativeMove = value.length
Normalize line endings when inserting into the editor (e.g. paste) When inserting on Windows, a CR - LF was being inserted into the editor. The CR means nothing to Ritzy. Therefore, add a normalization to line endings that are inserted: CRLF -> LF CR -> LF
ritzyed_ritzy
train
86ffb8426a27f9c70264f41e35c749a85f42c0b1
diff --git a/src/main/java/com/buschmais/jqassistant/plugin/yaml/impl/scanner/YAMLEmitter.java b/src/main/java/com/buschmais/jqassistant/plugin/yaml/impl/scanner/YAMLEmitter.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/buschmais/jqassistant/plugin/yaml/impl/scanner/YAMLEmitter.java +++ b/src/main/java/com/buschmais/jqassistant/plugin/yaml/impl/scanner/YAMLEmitter.java @@ -115,7 +115,7 @@ class YAMLEmitter implements Emitable { bbb.getValues().add(value); } else { -// unsupportedYAMLStructure(); + unsupportedYAMLStructure(event); } }
Commented in reporting of unsupported YAML structure.
buschmais_jqa-yaml-plugin
train
e3c8f4f465f612f2b3cb00010bc5076067f74071
diff --git a/lib/serverengine/socket_manager_win.rb b/lib/serverengine/socket_manager_win.rb index <HASH>..<HASH> 100644 --- a/lib/serverengine/socket_manager_win.rb +++ b/lib/serverengine/socket_manager_win.rb @@ -137,7 +137,7 @@ module ServerEngine proto = WinSock::WSAPROTOCOL_INFO.malloc unless WinSock.WSADuplicateSocketA(sock.handle, pid, proto) == 0 - raise "WSADuplicateSocketA faild (0x%x)" % WinSock.WSAGetLastError + RbWinSock.raise_last_error("WSADuplicateSocketA(3)") end SocketManager.send_peer(peer, proto.to_bin) diff --git a/lib/serverengine/winsock.rb b/lib/serverengine/winsock.rb index <HASH>..<HASH> 100644 --- a/lib/serverengine/winsock.rb +++ b/lib/serverengine/winsock.rb @@ -109,7 +109,9 @@ module ServerEngine def self.wrap_io_handle(sock_class, handle, flags) begin fd = rb_w32_wrap_io_handle(handle, flags) - raise_last_error("rb_w32_wrap_io_handle(3)") if fd < 0 + if fd < 0 + raise_last_error("rb_w32_wrap_io_handle(3)") + end sock = sock_class.for_fd(fd) sock.define_singleton_method(:handle) { handle }
Error handle for WSADuplicateSocketA should also use raise_last_error
treasure-data_serverengine
train
ac13fe2b6d8537e1ea59cb7f6ccac40b1e1f8ea9
diff --git a/bosh-director/lib/bosh/director/cloud_factory.rb b/bosh-director/lib/bosh/director/cloud_factory.rb index <HASH>..<HASH> 100644 --- a/bosh-director/lib/bosh/director/cloud_factory.rb +++ b/bosh-director/lib/bosh/director/cloud_factory.rb @@ -62,7 +62,7 @@ module Bosh::Director raise 'AZ name must not be nil' if az_name.nil? az = @cloud_planner.availability_zone(az_name) - raise "AZ #{az_name} not found in cloud config" if az.nil? + return nil if az.nil? az.cpi end diff --git a/bosh-director/spec/unit/cloud_factory_spec.rb b/bosh-director/spec/unit/cloud_factory_spec.rb index <HASH>..<HASH> 100644 --- a/bosh-director/spec/unit/cloud_factory_spec.rb +++ b/bosh-director/spec/unit/cloud_factory_spec.rb @@ -55,11 +55,11 @@ module Bosh::Director expect(cloud).to eq(default_cloud) end - it 'raises an error when asking for the cloud of a non-existing AZ' do + it 'returns the default cloud when asking for the cloud of a non-existing AZ' do expect(cloud_planner).to receive(:availability_zone).with('some-az').and_return(nil) - expect { - cloud_factory.for_availability_zone('some-az') - }.to raise_error /AZ some-az not found in cloud config/ + + cloud = cloud_factory.for_availability_zone('some-az') + expect(cloud).to eq(default_cloud) end it 'returns the default cloud from director config when asking for the cloud of an existing AZ without cpi' do diff --git a/spec/gocli/integration/cli_cloud_config_spec.rb b/spec/gocli/integration/cli_cloud_config_spec.rb index <HASH>..<HASH> 100644 --- a/spec/gocli/integration/cli_cloud_config_spec.rb +++ b/spec/gocli/integration/cli_cloud_config_spec.rb @@ -32,6 +32,39 @@ describe 'cli cloud config', type: :integration do end end + context 'when an az is removed' do + let(:initial_cloud_config) { Bosh::Spec::Deployments.simple_cloud_config_with_multiple_azs } + + let(:new_cloud_config) do + cloud_config = initial_cloud_config + cloud_config['azs'].pop + cloud_config['networks'][0]['subnets'].pop + cloud_config + end + + let(:initial_manifest) do + manifest = Bosh::Spec::Deployments::simple_manifest + manifest['jobs'][0]['azs'] = ['z1', 'z2'] + manifest + end + + let(:new_manifest) do + manifest = Bosh::Spec::Deployments::simple_manifest + manifest['jobs'][0]['azs'] = ['z1'] + manifest + end + + it 'successfully deploys' do + create_and_upload_test_release + upload_cloud_config(cloud_config_hash: initial_cloud_config) + upload_stemcell + deploy_simple_manifest(manifest_hash: initial_manifest) + + upload_cloud_config(cloud_config_hash: new_cloud_config) + expect{ deploy_simple_manifest(manifest_hash: new_manifest) }.to_not raise_error + end + end + it 'can download a cloud config' do # none present yet expect(bosh_runner.run('cloud-config', failure_expected: true)).to match(/Using environment 'https:\/\/127\.0\.0\.1:\d+' as user 'test'/) diff --git a/spec/support/deployments.rb b/spec/support/deployments.rb index <HASH>..<HASH> 100644 --- a/spec/support/deployments.rb +++ b/spec/support/deployments.rb @@ -56,6 +56,16 @@ module Bosh::Spec end def self.simple_cloud_config_with_multiple_azs_and_cpis + cloud_config = simple_cloud_config_with_multiple_azs + + cloud_config['azs'].each_index do |i| + cloud_config['azs'][i]['cpi'] = "cpi_name#{i}" + end + + cloud_config + end + + def self.simple_cloud_config_with_multiple_azs networks = [ { 'name' => 'a', @@ -75,12 +85,10 @@ module Bosh::Spec azs = [ { 'name' => 'z1', - 'cpi' => 'cpi-name', 'cloud_properties' => {'a' => 'b'} }, { 'name' => 'z2', - 'cpi' => 'cpi-name2', 'cloud_properties' => {'a' => 'b'} } ]
Fall back to default CPI when requested AZ is no longer present [#<I>]
cloudfoundry_bosh
train
8c693f2ebc106ae9c9a005a13872ac14bfdfae37
diff --git a/src/yield_handler.js b/src/yield_handler.js index <HASH>..<HASH> 100644 --- a/src/yield_handler.js +++ b/src/yield_handler.js @@ -25,6 +25,9 @@ export function isGeneratorFunction( obj ) { } } +class YieldException extends TypeError { +} + function objectToPromise( obj ) { var results = new obj.constructor(); var promises = []; @@ -145,6 +148,8 @@ function toPromise( value ) { } ); } + } else { + throw new YieldException( `You may only yield a function, promise, generator, array, or object, but the following object was passed: "${value}"` ); } } @@ -156,7 +161,12 @@ if( !addedYieldHandler ) { return toPromise.call( this, value ); } catch( err ) { - return Promise.reject( err ); + if( err instanceof YieldException ) { + return void 0; + + } else { + return Promise.reject( err ); + } } } );
src: To prevent silent errors, use an internal exception class to force returning undefined on the top yield handler call.
novacrazy_bluebird-co
train
d92131cffb0fadcbca97b885a3a2e2f021cc34e4
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from arangodb import get_version setup( name='ArangoPy', - version= '0.3.0', + version= '0.3.1', packages=find_packages(), requires=[ 'slumber',
New version with ManyToMany field working and hash index query
saeschdivara_ArangoPy
train
02f4612812775aab325952bdbd6c03c91b01a53f
diff --git a/examples/fake.rb b/examples/fake.rb index <HASH>..<HASH> 100644 --- a/examples/fake.rb +++ b/examples/fake.rb @@ -62,14 +62,6 @@ class FakeFTPDriver false end - def move_file(from, to) - false - end - - def move_dir(from, to) - false - end - def rename(from, to) false end
these two methods aren't part of the required driver contract
yob_em-ftpd
train
d20c0ab0ef11590ac2916cf7df1547e80636ab25
diff --git a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerClientHandler.java b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerClientHandler.java index <HASH>..<HASH> 100644 --- a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerClientHandler.java +++ b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerClientHandler.java @@ -43,6 +43,9 @@ public class ResponseStatsPerClientHandler implements ResultSetHandler<ResponseS ResponseStatsPerClientBean rval = new ResponseStatsPerClientBean(); while (rs.next()) { String client = rs.getString(1); + if (client == null) { + continue; + } String rtype = rs.getString(2); long count = rs.getLong(3); diff --git a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerPlanHandler.java b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerPlanHandler.java index <HASH>..<HASH> 100644 --- a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerPlanHandler.java +++ b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/ResponseStatsPerPlanHandler.java @@ -43,6 +43,9 @@ public class ResponseStatsPerPlanHandler implements ResultSetHandler<ResponseSta ResponseStatsPerPlanBean rval = new ResponseStatsPerPlanBean(); while (rs.next()) { String plan = rs.getString(1); + if (plan == null) { + continue; + } String rtype = rs.getString(2); long count = rs.getLong(3); diff --git a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerClientHandler.java b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerClientHandler.java index <HASH>..<HASH> 100644 --- a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerClientHandler.java +++ b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerClientHandler.java @@ -42,6 +42,9 @@ public class UsagePerClientHandler implements ResultSetHandler<UsagePerClientBea UsagePerClientBean rval = new UsagePerClientBean(); while (rs.next()) { String clientId = rs.getString(1); + if (clientId == null) { + continue; + } long count = rs.getLong(2); rval.getData().put(clientId, count); } diff --git a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerPlanHandler.java b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerPlanHandler.java index <HASH>..<HASH> 100644 --- a/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerPlanHandler.java +++ b/manager/api/jdbc/src/main/java/io/apiman/manager/api/jdbc/handlers/UsagePerPlanHandler.java @@ -43,7 +43,9 @@ public class UsagePerPlanHandler implements ResultSetHandler<UsagePerPlanBean> { while (rs.next()) { String planId = rs.getString(1); long count = rs.getLong(2); - rval.getData().put(planId, count); + if (planId != null) { + rval.getData().put(planId, count); + } } return rval; }
handle public api data better in the jdbc metrics handlers
apiman_apiman
train
006a141b2535dac6bf3e1b30cce054ee27d31a75
diff --git a/src/Valkyrja/HttpKernel/Kernels/Kernel.php b/src/Valkyrja/HttpKernel/Kernels/Kernel.php index <HASH>..<HASH> 100644 --- a/src/Valkyrja/HttpKernel/Kernels/Kernel.php +++ b/src/Valkyrja/HttpKernel/Kernels/Kernel.php @@ -257,6 +257,9 @@ class Kernel implements Contract protected function getExceptionResponse(Throwable $exception): Response { if ($this->debug) { + // Log the error + $this->logException($exception); + throw $exception; }
HttpKernel: Adding exception logging when debug is on and exception is thrown.
valkyrjaio_valkyrja
train
398ace918394ebe3cd10ec08b135acf8bd00e1fa
diff --git a/go/api-frontend/aaa/authorization.go b/go/api-frontend/aaa/authorization.go index <HASH>..<HASH> 100644 --- a/go/api-frontend/aaa/authorization.go +++ b/go/api-frontend/aaa/authorization.go @@ -202,6 +202,8 @@ var methodSuffixMap = map[string]string{ "DELETE": "_DELETE", } +var InvalidTokenInfoErr = "Invalid token info" + type TokenAuthorizationMiddleware struct { tokenBackend TokenBackend } @@ -235,7 +237,7 @@ func (tam *TokenAuthorizationMiddleware) BearerRequestIsAuthorized(ctx context.C tokenInfo, _ := tam.tokenBackend.TokenInfoForToken(token) if tokenInfo == nil { - return false, errors.New("Invalid token info") + return false, errors.New(InvalidTokenInfoErr) } var tenantId int @@ -272,7 +274,7 @@ func (tam *TokenAuthorizationMiddleware) BearerRequestIsAuthorized(ctx context.C // Checks whether or not that request is authorized based on the path and method func (tam *TokenAuthorizationMiddleware) IsAuthorized(ctx context.Context, method, path string, tenantId int, tokenInfo *TokenInfo) (bool, error) { if tokenInfo == nil { - return false, errors.New("Invalid token info") + return false, errors.New(InvalidTokenInfoErr) } authAdminRoles, err := tam.isAuthorizedAdminActions(ctx, method, path, tokenInfo.AdminActions()) diff --git a/go/caddy/api-aaa/api-aaa.go b/go/caddy/api-aaa/api-aaa.go index <HASH>..<HASH> 100644 --- a/go/caddy/api-aaa/api-aaa.go +++ b/go/caddy/api-aaa/api-aaa.go @@ -234,7 +234,11 @@ func (h ApiAAAHandler) HandleAAA(w http.ResponseWriter, r *http.Request) bool { if auth { return true } else { - w.WriteHeader(http.StatusForbidden) + if err.Error() == InvalidTokenInfoErr { + w.WriteHeader(http.StatusUnauthorized) + } else { + w.WriteHeader(http.StatusForbidden) + } res, _ := json.Marshal(map[string]string{ "message": err.Error(), })
if we're dealing with an invalid token info then its unauthorized not forbidden
inverse-inc_packetfence
train
6d4ce5310fc0e3dcfb756f9af65882bfbea3977e
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -15,7 +15,11 @@ export { STAKE_INSTRUCTION_LAYOUTS, StakeProgram, } from './stake-program'; -export {SystemInstruction, SystemProgram} from './system-program'; +export { + SystemInstruction, + SystemProgram, + SYSTEM_INSTRUCTION_LAYOUTS, +} from './system-program'; export {Transaction, TransactionInstruction} from './transaction'; export {VALIDATOR_INFO_KEY, ValidatorInfo} from './validator-info'; export {VOTE_PROGRAM_ID, VoteAccount} from './vote-account'; diff --git a/src/system-program.js b/src/system-program.js index <HASH>..<HASH> 100644 --- a/src/system-program.js +++ b/src/system-program.js @@ -363,7 +363,7 @@ export type SystemInstructionType = $Keys<typeof SYSTEM_INSTRUCTION_LAYOUTS>; /** * An enumeration of valid system InstructionType's */ -const SYSTEM_INSTRUCTION_LAYOUTS = Object.freeze({ +export const SYSTEM_INSTRUCTION_LAYOUTS = Object.freeze({ Create: { index: 0, layout: BufferLayout.struct([
fix: properly export SYSTEM_INSTRUCTION_LAYOUTS
solana-labs_solana-web3.js
train
919e650ad1efa06ecc3ef201d5139e59ae3cb208
diff --git a/cmd/juju/endpoint_test.go b/cmd/juju/endpoint_test.go index <HASH>..<HASH> 100644 --- a/cmd/juju/endpoint_test.go +++ b/cmd/juju/endpoint_test.go @@ -28,3 +28,49 @@ func (s *EndpointSuite) TestEndpoint(c *gc.C) { info := s.APIInfo(c) c.Assert(output, gc.Equals, fmt.Sprintf("%s\n", info.Addrs[0])) } + +func (s *EndpointSuite) TestMultipleEndpoints(c *gc.C) { + // Run command once to create store in test. + _, err := coretesting.RunCommand(c, envcmd.Wrap(&EndpointCommand{})) + c.Assert(err, gc.IsNil) + s.modifyAddresses(c, []string{"10.0.0.1:17070", "10.0.0.2:17070"}) + ctx, err := coretesting.RunCommand(c, envcmd.Wrap(&EndpointCommand{})) + c.Assert(err, gc.IsNil) + c.Assert(ctx.Stderr.(*bytes.Buffer).String(), gc.Equals, "") + output := string(ctx.Stdout.(*bytes.Buffer).Bytes()) + info := s.APIInfo(c) + c.Assert(output, gc.Equals, fmt.Sprintf("%s\n", info.Addrs[0])) +} + +func (s *EndpointSuite) TestNoEndpoints(c *gc.C) { + // Run command once to create store in test. + _, err := coretesting.RunCommand(c, envcmd.Wrap(&EndpointCommand{})) + c.Assert(err, gc.IsNil) + s.modifyAddresses(c, nil) + ctx, err := coretesting.RunCommand(c, envcmd.Wrap(&EndpointCommand{})) + c.Assert(err, gc.IsNil) + c.Assert(ctx.Stderr.(*bytes.Buffer).String(), gc.Equals, "") + output := string(ctx.Stdout.(*bytes.Buffer).Bytes()) + info := s.APIInfo(c) + c.Assert(output, gc.Equals, fmt.Sprintf("%s\n", info.Addrs[0])) +} + +// modifyAddresses adds more endpoint addresses or removes all +// in case of nil. +func (s *EndpointSuite) modifyAddresses(c *gc.C, addresses []string) { + env, err := s.State.Environment() + c.Assert(err, gc.IsNil) + info, err := s.ConfigStore.ReadInfo(env.Name()) + c.Assert(err, gc.IsNil) + endpoint := info.APIEndpoint() + if len(addresses) == 0 { + // Remove all endpoint addresses. + endpoint.Addresses = []string{} + } else { + // Add additional addresses. + endpoint.Addresses = append(endpoint.Addresses, addresses...) + } + info.SetAPIEndpoint(endpoint) + err = info.Write() + c.Assert(err, gc.IsNil) +}
Added two more tests for multiple and no endpoints.
juju_juju
train
ba61c8c2b5c32cb7abbddb0bb272f0fe1c68cc54
diff --git a/injector.py b/injector.py index <HASH>..<HASH> 100644 --- a/injector.py +++ b/injector.py @@ -232,7 +232,11 @@ class Binding(_BindingBase): class Binder(object): - """Bind interfaces to implementations.""" + """Bind interfaces to implementations. + + .. note:: This class is instantiated internally for you and there's no need + to instantiate it on your own. + """ @private def __init__(self, injector, auto_bind=True, parent=None): @@ -299,7 +303,33 @@ class Binder(object): def install(self, module): """Install a module into this binder. - :param module: A Module instance, Module subclass, or a function. + In this context the module is one of the following: + + * function taking the :class:`Binder` as it's only parameter + + :: + + def configure(binder): + bind(str, to='s') + + binder.install(configure) + + * instance of :class:`Module` (instance of it's subclass counts) + + :: + + class MyModule(Module): + def configure(self, binder): + binder.bind(str, to='s') + + binder.install(MyModule()) + + * subclass of :class:`Module` - the subclass needs to be instantiable so if it + expects any parameters they need to be injected + + :: + + binder.install(MyModule) """ if type(module) is type and issubclass(module, Module): instance = self.injector.create_object(module) @@ -503,23 +533,19 @@ class Module(object): class Injector(object): - """Initialise and use an object dependency graph.""" + """Initialise and use an object dependency graph. - def __init__(self, modules=None, auto_bind=True, parent=None, use_annotations=False): - """Construct a new Injector. + :param modules: Optional - a configuration module or iterable of configuration modules. + Each module will be installed in current :class:`Binder` using :meth:`Binder.install`. - :param modules: A callable, class, or list of callables/classes, used to configure the - Binder associated with this Injector. Typically these - callables will be subclasses of :class:`Module`. + Consult :meth:`Binder.install` documentation for the details. - In case of class, it's instance will be created using parameterless - constructor before the configuration process begins. + :param auto_bind: Whether to automatically bind missing types. + :param parent: Parent injector. + :param use_annotations: Attempt to infer injected arguments using Python3 argument annotations. + """ - Signature is ``configure(binder)``. - :param auto_bind: Whether to automatically bind missing types. - :param parent: Parent injector. - :param use_annotations: Attempt to infer injected arguments using Python3 argument annotations. - """ + def __init__(self, modules=None, auto_bind=True, parent=None, use_annotations=False): # Stack of keys currently being injected. Used to detect circular # dependencies. self._stack = () @@ -723,7 +749,7 @@ def with_injector(*injector_args, **injector_kwargs): """Decorator for a method. Installs Injector object which the method belongs to before the decorated method is executed. - Parameters are the same as for Injector constructor. + Parameters are the same as for :class:`Injector` constructor. """ def wrapper(f): @functools.wraps(f) @@ -870,6 +896,7 @@ def inject(**bindings): return multi_wrapper +@private class BaseAnnotation(object): """Annotation base type.""" @@ -885,6 +912,7 @@ def Annotation(name): return type(name, (BaseAnnotation,), {}) +@private class BaseKey(object): """Base type for binding keys.""" @@ -902,7 +930,7 @@ def Key(name): Keys are a convenient alternative to binding to (type, annotation) pairs, particularly when non-unique types such as str or int are being bound. - eg. if using @provides(str), chances of collision are almost guaranteed. + eg. if using :func:`@provides(str) <provides>`, chances of collision are almost guaranteed. One solution is to use @provides(str, annotation='unique') everywhere you wish to inject the value, but this is verbose and error prone. Keys solve this problem: @@ -921,6 +949,7 @@ def Key(name): return type(name, (BaseKey,), {}) +@private class BaseMappingKey(dict): """Base type for mapping binding keys.""" def __init__(self): @@ -938,6 +967,7 @@ def MappingKey(name): return type(name, (BaseMappingKey,), {}) +@private class BaseSequenceKey(list): """Base type for mapping sequence keys.""" def __init__(self):
Improve docstrings and hide some more private API
alecthomas_injector
train
f72d7cb4eb7070a54dbc66b13ade6ccd5164daca
diff --git a/concrete/src/Page/Type/Composer/Control/CorePageProperty/NameCorePageProperty.php b/concrete/src/Page/Type/Composer/Control/CorePageProperty/NameCorePageProperty.php index <HASH>..<HASH> 100644 --- a/concrete/src/Page/Type/Composer/Control/CorePageProperty/NameCorePageProperty.php +++ b/concrete/src/Page/Type/Composer/Control/CorePageProperty/NameCorePageProperty.php @@ -2,6 +2,7 @@ namespace Concrete\Core\Page\Type\Composer\Control\CorePageProperty; +use Concrete\Core\Utility\Service\Text; use Core; use Concrete\Core\Page\Page; use Concrete\Core\Attribute\FontAwesomeIconFormatter; @@ -38,8 +39,8 @@ class NameCorePageProperty extends CorePageProperty }); $this->addPageTypeComposerControlRequestValue('cName', $data['name']); if (!count($slug) && $c->isPageDraft()) { - $txt = new \URLify(); - $this->addPageTypeComposerControlRequestValue('cHandle', $txt->filter($data['name'])); + $txt = new Text(); + $this->addPageTypeComposerControlRequestValue('cHandle', $txt->urlify($data['name'], \Config::get('concrete.seo.segment_max_length'))); } parent::publishToPage($c, $data, $controls); }
fixing uppercase url slug when url slug isn't included in composer Former-commit-id: c4dcf<I>bde<I>e<I>aa<I>cbf<I>cbe<I>cd Former-commit-id: 8f5c2f<I>fdea3f<I>a<I>bdec<I>b9ade5efb4b<I>d
concrete5_concrete5
train
e3c16834eea234fda9bd33c9fdebdfc748509c39
diff --git a/ph-commons/src/main/java/com/helger/commons/ValueEnforcer.java b/ph-commons/src/main/java/com/helger/commons/ValueEnforcer.java index <HASH>..<HASH> 100644 --- a/ph-commons/src/main/java/com/helger/commons/ValueEnforcer.java +++ b/ph-commons/src/main/java/com/helger/commons/ValueEnforcer.java @@ -134,6 +134,54 @@ public final class ValueEnforcer } /** + * Check that the passed value is an instance of the passed class. + * + * @param aValue + * The value to check. May be <code>null</code>. + * @param aClass + * The class of which the passed value must be an instance. May not be + * <code>null</code>. + * @param sMsg + * The message to be emitted in case the value is <code>false</code> + * @throws IllegalArgumentException + * if the passed value is not <code>null</code>. + */ + public static <T> void isInstanceOf (@Nullable final T aValue, + @Nonnull final Class <? extends T> aClass, + final String sMsg) + { + isInstanceOf (aValue, aClass, () -> sMsg); + } + + /** + * Check that the passed value is an instance of the passed class. + * + * @param aValue + * The value to check. May be <code>null</code>. + * @param aClass + * The class of which the passed value must be an instance. May not be + * <code>null</code>. + * @param aMsg + * The message to be emitted in case the value is <code>false</code> + * @throws IllegalArgumentException + * if the passed value is not <code>null</code>. + */ + public static <T> void isInstanceOf (@Nullable final T aValue, + @Nonnull final Class <? extends T> aClass, + @Nonnull final Supplier <String> aMsg) + { + notNull (aValue, "Value"); + notNull (aClass, "Class"); + if (isEnabled ()) + if (!aClass.isInstance (aValue)) + throw new IllegalArgumentException (aMsg.get () + + " must be of class " + + aClass.getName () + + " but is of type " + + aValue.getClass ().getName ()); + } + + /** * Check that the passed value is not <code>null</code>. * * @param <T>
Added isInstanceOf check
phax_ph-commons
train
f5ab4e50f11a62dcb6f2911bfc3760afb2a2de21
diff --git a/src/CarouselComponent.js b/src/CarouselComponent.js index <HASH>..<HASH> 100644 --- a/src/CarouselComponent.js +++ b/src/CarouselComponent.js @@ -30,7 +30,7 @@ const defaultProps = { dismissOnHardwareBackPress: true, navigatorStyle: null, carouselStyle: null, - show: false, + show: null, }; class CarouselComponent extends Component { @@ -41,6 +41,10 @@ class CarouselComponent extends Component { constructor(props: Props) { super(props); + this.state = { + show: null, + }; + (this: any).renderScene = this.renderScene.bind(this); (this: any).show = this.show.bind(this); (this: any).dismiss = this.dismiss.bind(this); @@ -49,15 +53,15 @@ class CarouselComponent extends Component { componentDidMount() { if (this.props.show) { - this.show(this.props.onShow); + this.show(); } if (Platform.OS === 'android') { - const { dismissOnHardwareBackPress, onDismiss } = this.props; + const { dismissOnHardwareBackPress } = this.props; BackAndroid.addEventListener(HARDWARE_BACK_PRESS_EVENT, () => { if (dismissOnHardwareBackPress) { - this.dismiss(onDismiss); + this.dismiss(); return false; } return true; @@ -68,10 +72,10 @@ class CarouselComponent extends Component { componentWillReceiveProps(nextProps) { if (this.props.show !== nextProps.show) { if (nextProps.show) { - this.show(this.props.onShow); + this.show(); return; } - this.dismiss(this.props.onDismiss); + this.dismiss(); } } @@ -83,21 +87,26 @@ class CarouselComponent extends Component { show(callback?: Function = () => {}): void { this.navigator.push({ show: true }); + this.setState({ show: true }); callback(); } dismiss(callback?: Function = () => {}): void { this.navigator.pop(); + this.setState({ show: false }); callback(); } - didFocus({ show }) { + didFocus() { + const { show } = this.state; + if (show === null) { return; } const callback = show ? this.props.onShow : this.props.onDismiss; callback(); + this.setState({ show: !show }); } configureScene() {
bug fix - should call onShow/onDimiss callback when Did Focus
jacklam718_react-native-carousel-component
train
335d99cbb30bbb45921d63104298a3a4f6808ba8
diff --git a/api/common/context.go b/api/common/context.go index <HASH>..<HASH> 100644 --- a/api/common/context.go +++ b/api/common/context.go @@ -30,10 +30,16 @@ type ContextKey int const ctxKeyEngine = ContextKey(1) +// WithEngine sets the k6 running Engine in the under the hood context. +// +// Deprecated: Use directly the Engine as dependency. func WithEngine(ctx context.Context, engine *core.Engine) context.Context { return context.WithValue(ctx, ctxKeyEngine, engine) } +// GetEngine returns the k6 running Engine fetching it from the context. +// +// Deprecated: Use directly the Engine as dependency. func GetEngine(ctx context.Context) *core.Engine { return ctx.Value(ctxKeyEngine).(*core.Engine) } diff --git a/js/common/bridge.go b/js/common/bridge.go index <HASH>..<HASH> 100644 --- a/js/common/bridge.go +++ b/js/common/bridge.go @@ -134,6 +134,9 @@ func BindToGlobal(rt *goja.Runtime, data map[string]interface{}) func() { } // Bind the provided value v to the provided runtime +// +// Deprecated: JS modules can implement the modules.VU interface for getting the context, +// goja runtime and the VU State, so the goja.Runtime.Set method can be used for data binding. func Bind(rt *goja.Runtime, v interface{}, ctxPtr *context.Context) map[string]interface{} { exports := make(map[string]interface{}) diff --git a/js/common/context.go b/js/common/context.go index <HASH>..<HASH> 100644 --- a/js/common/context.go +++ b/js/common/context.go @@ -26,6 +26,9 @@ import ( "github.com/dop251/goja" ) +// TODO: https://github.com/grafana/k6/issues/2385 +// Rid all the context-based utils functions + type ctxKey int const ( @@ -34,11 +37,15 @@ const ( ) // WithRuntime attaches the given goja runtime to the context. +// +// Deprecated: Implement the modules.VU interface for sharing the Runtime. func WithRuntime(ctx context.Context, rt *goja.Runtime) context.Context { return context.WithValue(ctx, ctxKeyRuntime, rt) } // GetRuntime retrieves the attached goja runtime from the given context. +// +// Deprecated: Use modules.VU for get the Runtime. func GetRuntime(ctx context.Context) *goja.Runtime { v := ctx.Value(ctxKeyRuntime) if v == nil { @@ -48,11 +55,15 @@ func GetRuntime(ctx context.Context) *goja.Runtime { } // WithInitEnv attaches the given init environment to the context. +// +// Deprecated: Implement the modules.VU interface for sharing the init environment. func WithInitEnv(ctx context.Context, initEnv *InitEnvironment) context.Context { return context.WithValue(ctx, ctxKeyInitEnv, initEnv) } // GetInitEnv retrieves the attached init environment struct from the given context. +// +// Deprecated: Use modules.VU for get the init environment. func GetInitEnv(ctx context.Context) *InitEnvironment { v := ctx.Value(ctxKeyInitEnv) if v == nil { diff --git a/js/initcontext.go b/js/initcontext.go index <HASH>..<HASH> 100644 --- a/js/initcontext.go +++ b/js/initcontext.go @@ -28,6 +28,7 @@ import ( "path/filepath" "runtime" "strings" + "sync" "github.com/dop251/goja" "github.com/sirupsen/logrus" @@ -201,6 +202,9 @@ func toESModuleExports(exp modules.Exports) interface{} { return result } +// TODO: https://github.com/grafana/k6/issues/2385 +var onceBindWarning sync.Once //nolint: gochecknoglobals + func (i *InitContext) requireModule(name string) (goja.Value, error) { mod, ok := i.modules[name] if !ok { @@ -214,6 +218,12 @@ func (i *InitContext) requireModule(name string) (goja.Value, error) { mod = perInstance.NewModuleInstancePerVU() } + onceBindWarning.Do(func() { + i.logger.Warnf(`Module '%s' is using deprecated APIs that will be removed in k6 v0.38.0,`+ + ` for more details on how to update it see`+ + ` https://k6.io/docs/extensions/guides/create-an-extension/#advanced-javascript-extension`, name) + }) + return i.moduleVUImpl.runtime.ToValue(common.Bind(i.moduleVUImpl.runtime, mod, i.moduleVUImpl.ctxPtr)), nil } diff --git a/lib/context.go b/lib/context.go index <HASH>..<HASH> 100644 --- a/lib/context.go +++ b/lib/context.go @@ -20,7 +20,9 @@ package lib -import "context" +import ( + "context" +) type ctxKey int @@ -30,12 +32,19 @@ const ( ctxKeyScenario ) +// TODO: https://github.com/grafana/k6/issues/2385 +// Rid the State's context-based utils functions + // WithState embeds a State in ctx. +// +// Deprecated: Implement the modules.VU interface for sharing the State. func WithState(ctx context.Context, state *State) context.Context { return context.WithValue(ctx, ctxKeyState, state) } // GetState returns a State from ctx. +// +// Deprecated: Use modules.VU for get the State. func GetState(ctx context.Context) *State { v := ctx.Value(ctxKeyState) if v == nil {
Deprecated context-based utils
loadimpact_k6
train
c8246cb40a0882722f9288dc1727e5a59df8f214
diff --git a/src/wavesurfer.js b/src/wavesurfer.js index <HASH>..<HASH> 100644 --- a/src/wavesurfer.js +++ b/src/wavesurfer.js @@ -29,6 +29,9 @@ var WaveSurfer = { this.createBackend(); this.bindClick(); + + // Used to save the current volume when muting so we can restore once unmuted + this.savedVolume = -1; }, createBackend: function () { @@ -105,10 +108,17 @@ var WaveSurfer = { }, /** - * Toggle the volume on and off. + * Toggle the volume on and off. It not currenly muted it will save the current volume value and turn the volume off. + * If currently muted then it will restore the volume to the saved value, and then rest the saved value. */ - mute: function() { - + toggleMute: function() { + if (this.savedVolume == -1) { + this.savedVolume = this.backend.getVolume(); + this.backend.setVolume(-1); + } else { + this.backend.setVolume(this.savedVolume); + this.savedVolume = -1; + } }, mark: function (options) {
- Added value for saving the current volume - Renamed the function mute to toggleMute - Implemented and the toggleMute method
katspaugh_wavesurfer.js
train
285315a4240c73d31318cba495910a8429dda73a
diff --git a/nunaliit2-js/src/main/js/nunaliit2/n2.couch.indexedDb.js b/nunaliit2-js/src/main/js/nunaliit2/n2.couch.indexedDb.js index <HASH>..<HASH> 100644 --- a/nunaliit2-js/src/main/js/nunaliit2/n2.couch.indexedDb.js +++ b/nunaliit2-js/src/main/js/nunaliit2/n2.couch.indexedDb.js @@ -325,6 +325,10 @@ var Database = $n2.Class({ getUrl: function(){ return this.wrappedDb.getUrl(); }, + + getServer: function(){ + return this.wrappedDb.getServer(); + }, getDesignDoc: function(opts_) { var couchDesignDoc = this.wrappedDb.getDesignDoc(opts_); diff --git a/nunaliit2-js/src/main/js/nunaliit2/n2.couch.js b/nunaliit2-js/src/main/js/nunaliit2/n2.couch.js index <HASH>..<HASH> 100644 --- a/nunaliit2-js/src/main/js/nunaliit2/n2.couch.js +++ b/nunaliit2-js/src/main/js/nunaliit2/n2.couch.js @@ -855,6 +855,10 @@ var Database = $n2.Class({ return this.dbUrl; } + ,getServer: function(){ + return this.server; + } + ,getDesignDoc: function(opts_) { var ddOpts = $.extend({ ddUrl: null diff --git a/nunaliit2-js/src/main/js/nunaliit2/n2.couchDocument.js b/nunaliit2-js/src/main/js/nunaliit2/n2.couchDocument.js index <HASH>..<HASH> 100644 --- a/nunaliit2-js/src/main/js/nunaliit2/n2.couchDocument.js +++ b/nunaliit2-js/src/main/js/nunaliit2/n2.couchDocument.js @@ -827,7 +827,7 @@ var CouchDocumentSource = $n2.Class($n2.document.DocumentSource, { ,onError: function(errorMsg){} },opts_); - var server = this.db.server; + var server = this.db.getServer(); server.getUniqueId(opts); }, diff --git a/nunaliit2-js/src/main/js/nunaliit2/n2.geometry.js b/nunaliit2-js/src/main/js/nunaliit2/n2.geometry.js index <HASH>..<HASH> 100644 --- a/nunaliit2-js/src/main/js/nunaliit2/n2.geometry.js +++ b/nunaliit2-js/src/main/js/nunaliit2/n2.geometry.js @@ -599,6 +599,7 @@ var WktParser = $n2.Class({ }; // Check if we reached end + stream.skipSpaces(); c = stream.peekChar(); if( ')' === c ){ stream.getChar(); @@ -618,9 +619,77 @@ var WktParser = $n2.Class({ var multiPoint = new MultiPoint({points:points}); return multiPoint; + + } else if( stream.startsWith("LINESTRING(",true) ){ + // LINESTRING(x y,x y) + var points = []; + stream.skipCharacters("LINESTRING(".length); + stream.skipSpaces(); + + var done = false; + while( !done ){ + // x y + // x y z + var point = this._parsePoint(stream); + points.push(point); + + // Check if we reached end + stream.skipSpaces(); + c = stream.peekChar(); + if( ')' === c ){ + stream.getChar(); + done = true; + }; + + // If not done, we are expecting a "," + if( !done ){ + stream.skipSpaces(); + var comma = stream.getChar(); + if( ',' !== comma ){ + throw new Error('Expected character "," at position: '+stream.getPosition()); + }; + stream.skipSpaces(); + }; + }; + + if( points.length < 2 ){ + throw new Error('LineString requires more than one point: '+stream.getPosition()); + }; + + var lineString = new LineString({points:points}); + return lineString; + }; + }, + + /** + * Parses '(' <point> [',' <point>]+ ')' + */ + _parseLineString: function(stream){ + stream.skipSpaces(); + var x = this._parseNumber(stream); + stream.skipSpaces(); + var y = this._parseNumber(stream); + + // Third position? + var z; + var c = stream.peekChar(); + if( ' ' === c ){ + stream.skipSpaces(); + z = this._parseNumber(stream); }; + + var point = new Point({ + x: x + ,y: y + ,z: z + }); + + return point; }, + /** + * Parses <number> <space>+ <number> [<space>+ <number>]? + */ _parsePoint: function(stream){ stream.skipSpaces(); var x = this._parseNumber(stream);
nunaliit2-js: Fix error when uploading a media file Issue #<I>
GCRC_nunaliit
train
2a704427c31222559744d3d26118fd9eaa77e679
diff --git a/modules/impl/src/test/java/de/odysseus/el/tree/impl/CacheTest.java b/modules/impl/src/test/java/de/odysseus/el/tree/impl/CacheTest.java index <HASH>..<HASH> 100644 --- a/modules/impl/src/test/java/de/odysseus/el/tree/impl/CacheTest.java +++ b/modules/impl/src/test/java/de/odysseus/el/tree/impl/CacheTest.java @@ -86,7 +86,7 @@ public class CacheTest extends TestCase { } long result = 0; - for (Future<Long> future : service.invokeAll(tasks, 1L, TimeUnit.SECONDS)) { + for (Future<Long> future : service.invokeAll(tasks, 10L, TimeUnit.SECONDS)) { if (!future.isDone() || future.isCancelled()) { fail(); }
relaxed timeout for multi-thread cache test from 1 to <I> seconds
beckchr_juel
train
8bdf1cdd2a1690599151357da2099314087b738b
diff --git a/openquake/commonlib/source.py b/openquake/commonlib/source.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/source.py +++ b/openquake/commonlib/source.py @@ -503,6 +503,7 @@ source_model_dt = numpy.dtype([ ('path', (str, 255)), ('num_rlzs', numpy.uint32), ('trts', (str, 255)), + ('num_ruptures', (str, 255)), ('samples', numpy.uint32), ]) @@ -519,6 +520,14 @@ def get_trts(smodel): for tmodel in smodel.trt_models) +def get_num_ruptures(smodel): + """ + Extract the number of ruptures per each tectonic region model (in order) + and return a single space separated string. + """ + return ' '.join(str(tmodel.num_ruptures) for tmodel in smodel.trt_models) + + class CompositionInfo(object): """ An object to collect information about the composition of @@ -559,7 +568,8 @@ class CompositionInfo(object): def __toh5__(self): lst = [(sm.name, sm.weight, '_'.join(sm.path), - sm.gsim_lt.get_num_paths(), get_trts(sm), sm.samples) + sm.gsim_lt.get_num_paths(), get_trts(sm), + get_num_ruptures(sm), sm.samples) for sm in self.source_models] return (numpy.array(lst, source_model_dt), dict(seed=self.seed, num_samples=self.num_samples, @@ -575,9 +585,9 @@ class CompositionInfo(object): gsim_lt = logictree.GsimLogicTree( io.BytesIO(self.gsim_lt_xml), trts) trtmodels = [] - for trt in trts: - tm = TrtModel(trt) - tm.id = trt_id + num_ruptures = map(int, rec['num_ruptures'].split()) + for nr, trt in zip(num_ruptures, trts): + tm = TrtModel(trt, num_ruptures=nr, id=trt_id) tm.gsims = gsim_lt.values[trt] trtmodels.append(tm) trt_id += 1
Serialized also num_ruptures in CompositionInfo
gem_oq-engine
train
2de5a086f72f9ac43a9fdddc750362b4651175a9
diff --git a/pyocd/coresight/core_ids.py b/pyocd/coresight/core_ids.py index <HASH>..<HASH> 100644 --- a/pyocd/coresight/core_ids.py +++ b/pyocd/coresight/core_ids.py @@ -1,5 +1,5 @@ # pyOCD debugger -# Copyright (c) 019 Arm Limited +# Copyright (c) 2019 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from enum import Enum + # pylint: disable=invalid_name # CPUID PARTNO values @@ -41,3 +43,12 @@ CORE_TYPE_NAME = { ARM_CortexM33 : "Cortex-M33", ARM_CortexM35P : "Cortex-M35P", } + +class CoreArchitecture(Enum): + """! @brief CPU architectures.""" + ARMv6M = 1 + ARMv7M = 2 + ARMv8M_BASE = 3 + ARMv8M_MAIN = 4 + + diff --git a/pyocd/coresight/cortex_m.py b/pyocd/coresight/cortex_m.py index <HASH>..<HASH> 100644 --- a/pyocd/coresight/cortex_m.py +++ b/pyocd/coresight/cortex_m.py @@ -1,5 +1,5 @@ # pyOCD debugger -# Copyright (c) 2006-2019 Arm Limited +# Copyright (c) 2006-2020 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -25,7 +25,7 @@ from ..utility.notification import Notification from .component import CoreSightCoreComponent from .fpb import FPB from .dwt import DWT -from .core_ids import CORE_TYPE_NAME +from .core_ids import (CORE_TYPE_NAME, CoreArchitecture) from ..debug.breakpoints.manager import BreakpointManager from ..debug.breakpoints.software import SoftwareBreakpointProvider @@ -234,8 +234,8 @@ class CortexM(Target, CoreSightCoreComponent): CPUID_REVISION_POS = 0 CPUID_IMPLEMENTER_ARM = 0x41 - ARMv6M = 0xC # also ARMv8-M without Main Extension - ARMv7M = 0xF # also ARMv8-M with Main Extension + ARMv6M = 0xC + ARMv7M = 0xF # Debug Core Register Selector Register DCRSR = 0xE000EDF4 @@ -399,6 +399,7 @@ class CortexM(Target, CoreSightCoreComponent): CoreSightCoreComponent.__init__(self, ap, cmpid, address) self.arch = 0 + self._architecture = None self.core_type = 0 self.has_fpu = False self.core_number = core_num @@ -437,6 +438,11 @@ class CortexM(Target, CoreSightCoreComponent): self.dwt = cmp @property + def architecture(self): + """! @brief @ref pyocd.coresight.core_ids.CoreArchitecture "CoreArchitecture" for this core.""" + return self._architecture + + @property def elf(self): return self._elf @@ -581,7 +587,10 @@ class CortexM(Target, CoreSightCoreComponent): # Only v7-M supports VECTRESET. if self.arch == CortexM.ARMv7M: + self._architecture = CoreArchitecture.ARMv7M self._supports_vectreset = True + else: + self._architecture = CoreArchitecture.ARMv6M if self.core_type in CORE_TYPE_NAME: LOG.info("CPU core #%d is %s r%dp%d", self.core_number, CORE_TYPE_NAME[self.core_type], self.cpu_revision, self.cpu_patch) diff --git a/pyocd/coresight/cortex_m_v8m.py b/pyocd/coresight/cortex_m_v8m.py index <HASH>..<HASH> 100644 --- a/pyocd/coresight/cortex_m_v8m.py +++ b/pyocd/coresight/cortex_m_v8m.py @@ -1,5 +1,5 @@ # pyOCD debugger -# Copyright (c) 2019 Arm Limited +# Copyright (c) 2019-2020 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,7 +17,7 @@ import logging from .cortex_m import CortexM -from .core_ids import CORE_TYPE_NAME +from .core_ids import (CORE_TYPE_NAME, CoreArchitecture) from ..core import exceptions from ..core.target import Target @@ -76,6 +76,11 @@ class CortexM_v8M(CortexM): pfr1 = self.read32(self.PFR1) self.has_security_extension = ((pfr1 & self.PFR1_SECURITY_MASK) >> self.PFR1_SECURITY_SHIFT) == 1 + if self.arch == self.ARMv8M_BASE: + self._architecture = CoreArchitecture.ARMv8M_BASE + else: + self._architecture = CoreArchitecture.ARMv8M_MAIN + if self.core_type in CORE_TYPE_NAME: if self.has_security_extension: LOG.info("CPU core #%d is %s r%dp%d (security ext present)", self.core_number, CORE_TYPE_NAME[self.core_type], self.cpu_revision, self.cpu_patch)
Core architecture enum and property.
mbedmicro_pyOCD
train
0b3f7d8a713d89fcbda8b478221c4143fb1113fb
diff --git a/lib/metasploit/model/version.rb b/lib/metasploit/model/version.rb index <HASH>..<HASH> 100644 --- a/lib/metasploit/model/version.rb +++ b/lib/metasploit/model/version.rb @@ -4,6 +4,6 @@ module Metasploit # considered unstable because certain code may not be shared between metasploit_data_models, metasploit-framework, # and pro, so support code for that may be removed in the future. Because of the unstable API the version should # remain below 1.0.0 - VERSION = '0.4.5' + VERSION = '0.4.6' end end diff --git a/spec/dummy/app/models/dummy/architecture.rb b/spec/dummy/app/models/dummy/architecture.rb index <HASH>..<HASH> 100644 --- a/spec/dummy/app/models/dummy/architecture.rb +++ b/spec/dummy/app/models/dummy/architecture.rb @@ -192,6 +192,9 @@ class Dummy::Architecture < Metasploit::Model::Base raise Metasploit::Model::Invalid.new(instance) end + # freeze object to prevent specs from modifying them and interfering with other specs. + instance.freeze + @instance_by_abbreviation[instance.abbreviation] = instance end end diff --git a/spec/dummy/app/models/dummy/authority.rb b/spec/dummy/app/models/dummy/authority.rb index <HASH>..<HASH> 100644 --- a/spec/dummy/app/models/dummy/authority.rb +++ b/spec/dummy/app/models/dummy/authority.rb @@ -112,6 +112,9 @@ class Dummy::Authority < Metasploit::Model::Base raise Metasploit::Model::Invalid.new(instance) end + # freeze object to prevent specs from modifying them and interfering with other specs. + instance.freeze + @instance_by_abbreviation[instance.abbreviation] = instance end end diff --git a/spec/lib/metasploit/model/architecture_spec.rb b/spec/lib/metasploit/model/architecture_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/metasploit/model/architecture_spec.rb +++ b/spec/lib/metasploit/model/architecture_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Metasploit::Model::Architecture do it_should_behave_like 'Metasploit::Model::Architecture' do subject(:architecture) do - FactoryGirl.generate(:dummy_architecture) + Dummy::Architecture.new end end end \ No newline at end of file diff --git a/spec/lib/metasploit/model/authority_spec.rb b/spec/lib/metasploit/model/authority_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/metasploit/model/authority_spec.rb +++ b/spec/lib/metasploit/model/authority_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' describe Metasploit::Model::Authority do it_should_behave_like 'Metasploit::Model::Authority' do subject(:authority) do - FactoryGirl.generate :seeded_dummy_authority + FactoryGirl.build :dummy_authority end end
Fix specs modifying seeds [#<I>]
rapid7_metasploit-model
train
7a6dfb14f2b99ebb8e577e5a415f930539597667
diff --git a/lib/honeybadger/config/yaml.rb b/lib/honeybadger/config/yaml.rb index <HASH>..<HASH> 100644 --- a/lib/honeybadger/config/yaml.rb +++ b/lib/honeybadger/config/yaml.rb @@ -4,40 +4,39 @@ require 'erb' module Honeybadger class Config - class Yaml < ::Hash + module Yaml DISALLOWED_KEYS = [:'config.path'].freeze - def initialize(path, env = 'production') - @path = path.kind_of?(Pathname) ? path : Pathname.new(path) + def self.new(path, env = 'production') + path = path.kind_of?(Pathname) ? path : Pathname.new(path) - if [email protected]? - raise ConfigError, "The configuration file #{@path} was not found." - elsif [email protected]? - raise ConfigError, "The configuration file #{@path} is not a file." - elsif [email protected]? - raise ConfigError, "The configuration file #{@path} is not readable." - else - yaml = load_yaml - yaml.merge!(yaml[env]) if yaml[env].kind_of?(Hash) - update(dotify_keys(yaml)) + if !path.exist? + raise ConfigError, "The configuration file #{path} was not found." + elsif !path.file? + raise ConfigError, "The configuration file #{path} is not a file." + elsif !path.readable? + raise ConfigError, "The configuration file #{path} is not readable." end - end - private + yaml = load_yaml(path) + yaml.merge!(yaml[env]) if yaml[env].kind_of?(Hash) + + dotify_keys(yaml) + end - def load_yaml - yaml = YAML.load(ERB.new(@path.read).result) + def self.load_yaml(path) + yaml = YAML.load(ERB.new(path.read).result) case yaml when Hash yaml when NilClass, FalseClass {} else - raise ConfigError, "The configuration file #{@path} is invalid." + raise ConfigError, "The configuration file #{path} is invalid." end end - def dotify_keys(hash, key_prefix = nil) + def self.dotify_keys(hash, key_prefix = nil) {}.tap do |new_hash| hash.each_pair do |k,v| k = [key_prefix, k].compact.join('.')
Refactor Yaml config source.
honeybadger-io_honeybadger-ruby
train
d14dc32d1800e61a4b509ab8410c848ae472bdd1
diff --git a/lib/puppet/storage.rb b/lib/puppet/storage.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/storage.rb +++ b/lib/puppet/storage.rb @@ -4,26 +4,44 @@ module Puppet # a class for storing state class Storage include Singleton - @@state = Hash.new { |hash,key| - hash[key] = Hash.new(nil) - } - @@splitchar = "\t" def initialize self.class.load end + def Storage.clear + @@state = nil + Storage.init + end + + def Storage.init + @@state = Hash.new { |hash,key| + hash[key] = Hash.new(nil) + } + @@splitchar = "\t" + end + + self.init + def Storage.load - # XXX I should probably use a better default state dir - Puppet[:statefile] ||= "/var/tmp/puppetstate" - return unless File.exists?(Puppet[:statefile]) + if Puppet[:statefile].nil? + raise "Somehow the statefile is nil" + end + + unless File.exists?(Puppet[:statefile]) + Puppet.info "Statefile %s does not exist" % Puppet[:statefile] + return + end + Puppet.debug "Loading statefile %s" % Puppet[:statefile] File.open(Puppet[:statefile]) { |file| - file.gets { |line| + file.each { |line| myclass, key, value = line.split(@@splitchar) - @@state[myclass][key] = Marshal::load(value) + @@state[eval(myclass)][key] = Marshal::load(value) } } + + Puppet.debug "Loaded state is %s" % @@state.inspect end def Storage.state(myclass) @@ -39,7 +57,7 @@ module Puppet begin Puppet.recmkdir(Puppet[:statefile]) Puppet.info "Creating state directory %s" % - File.basename(Puppet[:statefile]) + File.dirname(Puppet[:statefile]) rescue => detail Puppet.err "Could not create state file: %s" % detail return @@ -58,6 +76,8 @@ module Puppet } } } + + Puppet.debug "Stored state is %s" % @@state.inspect end end end diff --git a/test/other/tc_state.rb b/test/other/tc_state.rb index <HASH>..<HASH> 100644 --- a/test/other/tc_state.rb +++ b/test/other/tc_state.rb @@ -9,12 +9,20 @@ require 'test/unit' # $Id$ +class StorageTestingClass +end + class TestStorage < Test::Unit::TestCase def setup Puppet[:loglevel] = :debug if __FILE__ == $0 Puppet[:statefile] = "/var/tmp/puppetteststate" end + def teardown + system("rm -f %s" % Puppet[:statefile]) + Puppet::Storage.clear + end + def test_simple state = nil assert_nothing_raised { @@ -28,6 +36,12 @@ class TestStorage < Test::Unit::TestCase assert_nothing_raised { Puppet::Storage.store } + + # clear the memory, so we're sure we're hitting the state file + assert_nothing_raised { + Puppet::Storage.clear + Puppet::Storage.init + } assert_nothing_raised { Puppet::Storage.load } @@ -54,7 +68,25 @@ class TestStorage < Test::Unit::TestCase assert(state) end - def teardown - system("rm -f %s" % Puppet[:statefile]) + def test_update + state = Puppet::Storage.state(StorageTestingClass) + state["testing"] = "yayness" + Puppet::Storage.store + assert(FileTest.exists?(Puppet[:statefile])) + end + + def test_hashstorage + state = Puppet::Storage.state(StorageTestingClass) + hash = { + :yay => "boo", + :rah => "foo" + } + state["testing"] = hash + Puppet::Storage.store + Puppet::Storage.clear + Puppet::Storage.init + Puppet::Storage.load + state = Puppet::Storage.state(StorageTestingClass) + assert_equal(hash, state["testing"]) end end
fixing storage class; it was not actually correctly retrieving state from disk git-svn-id: <URL>
puppetlabs_puppet
train
0dcefddae4db4a9dbd70176a455316a2c881819c
diff --git a/plugins/commands/serve/util/exception_logger.rb b/plugins/commands/serve/util/exception_logger.rb index <HASH>..<HASH> 100644 --- a/plugins/commands/serve/util/exception_logger.rb +++ b/plugins/commands/serve/util/exception_logger.rb @@ -4,7 +4,13 @@ module VagrantPlugins # Adds exception logging to all public instance methods module ExceptionLogger def self.included(klass) - klass.public_instance_methods.each do |m_name| + # Get all the public instance methods. Need to search ancestors as well + # for modules like the Guest service which includes the CapabilityPlatform + # module + klass_public_instance_methods = klass.public_instance_methods + # Remove all generic instance methods from the list of ones to modify + logged_methods = klass_public_instance_methods - Object.public_instance_methods + logged_methods.each do |m_name| klass.define_method(m_name) do |*args, **opts, &block| begin super(*args, **opts, &block)
Only add exception logger to non-generic methods
hashicorp_vagrant
train
34dfa7a2d8679b874ad624ecd735e448eda4f258
diff --git a/codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2ConnectionEncoder.java b/codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2ConnectionEncoder.java index <HASH>..<HASH> 100644 --- a/codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2ConnectionEncoder.java +++ b/codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2ConnectionEncoder.java @@ -315,10 +315,9 @@ public class DefaultHttp2ConnectionEncoder implements Http2ConnectionEncoder { * </p> */ private final class FlowControlledData extends FlowControlledBase { - private final CoalescingBufferQueue queue; - private FlowControlledData(Http2Stream stream, ByteBuf buf, int padding, boolean endOfStream, + public FlowControlledData(Http2Stream stream, ByteBuf buf, int padding, boolean endOfStream, ChannelPromise promise) { super(stream, padding, endOfStream, promise); queue = new CoalescingBufferQueue(promise.channel()); @@ -384,7 +383,7 @@ public class DefaultHttp2ConnectionEncoder implements Http2ConnectionEncoder { private final short weight; private final boolean exclusive; - private FlowControlledHeaders(Http2Stream stream, Http2Headers headers, int streamDependency, short weight, + public FlowControlledHeaders(Http2Stream stream, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endOfStream, ChannelPromise promise) { super(stream, padding, endOfStream, promise); this.headers = headers;
DefaultHttp2ConnectionEncoder private constructors on inner classes Motivation: DefaultHttp2ConnectionEncoder.FlowControlledHeaders and DefaultHttp2ConnectionEncoder.FlowControlledData have private constructors which may result in static factory methods being generated to construct instances of these classes. Modifications: - Make constructors public for these private classes Result: Accessor for inner class constructor more correct and no possibiliy of synthetic method generation.
netty_netty
train
43fe7b50644a3613fd662c5fe28da29b1269f0a2
diff --git a/msm/mycroft_skills_manager.py b/msm/mycroft_skills_manager.py index <HASH>..<HASH> 100644 --- a/msm/mycroft_skills_manager.py +++ b/msm/mycroft_skills_manager.py @@ -53,6 +53,10 @@ class MycroftSkillsManager(object): func.__name__, skill.name, repr(e) )) return False + except: + LOG.exception('Error running {} on {}:'.format( + func.__name__, skill.name + )) return all(ThreadPool(100).map(run_item, skills)) def install_defaults(self):
Capture all exceptions in threadpool apply This makes sure no errors can prevent other functions from running
MycroftAI_mycroft-skills-manager
train
78c397a552df35d3bd0e5cbc5a7b954b41af0b20
diff --git a/lib/octopolo/github/label.rb b/lib/octopolo/github/label.rb index <HASH>..<HASH> 100644 --- a/lib/octopolo/github/label.rb +++ b/lib/octopolo/github/label.rb @@ -37,7 +37,7 @@ module Octopolo # # label - a label object def self.first_or_create(label) - unless all.include?(label) + unless all_from_repo.include?(label) GitHub.add_label(config.github_repo, label.name, label.color) end end diff --git a/spec/octopolo/github/label_spec.rb b/spec/octopolo/github/label_spec.rb index <HASH>..<HASH> 100644 --- a/spec/octopolo/github/label_spec.rb +++ b/spec/octopolo/github/label_spec.rb @@ -34,13 +34,13 @@ module Octopolo context "#first_or_create" do it "finds the existing label and doesn't do anything" do - allow(Label).to receive(:all).and_return([label1,label2]) + allow(Label).to receive(:all_from_repo).and_return([label1,label2]) expect(GitHub).not_to receive(:add_label) Label.first_or_create(label1) end it "doesn't find a label and creates one" do - allow(Label).to receive(:all).and_return([label1,label2]) + allow(Label).to receive(:all_from_repo).and_return([label1,label2]) expect(GitHub).to receive(:add_label).with(config.github_repo, "medium-risk", "454545") Label.first_or_create(Label.new(name: "medium-risk", color: "454545")) end diff --git a/spec/octopolo/scripts/pull_request_spec.rb b/spec/octopolo/scripts/pull_request_spec.rb index <HASH>..<HASH> 100644 --- a/spec/octopolo/scripts/pull_request_spec.rb +++ b/spec/octopolo/scripts/pull_request_spec.rb @@ -124,6 +124,13 @@ module Octopolo expect(cli).to receive(:ask).with("Label:",choices) subject.send(:ask_label) end + + it "asks for a label" do + allow(Octopolo::GitHub::Label).to receive(:all) {[label1,label2]} + allow(Octopolo::GitHub::Label).to receive(:get_names) {choices} + allow(cli).to receive(:ask) {"low-risk"} + expect(subject.send(:ask_label)).to eq(label1) + end end context "#ask_pivotal_ids" do
changed first_or_create to check all_from_repo instead of all
sportngin_octopolo
train
ef3830b9d58a14b8f5c2496887d330c59ef0a32a
diff --git a/api/transactions.js b/api/transactions.js index <HASH>..<HASH> 100644 --- a/api/transactions.js +++ b/api/transactions.js @@ -181,15 +181,15 @@ function getTransactionHelper(request, response, callback) { return async_callback(new errors.InvalidRequestError('Transaction not found. Missing hash')); } - if (options.hash && entry) { + var transactionHash = entry.hash || (entry.transaction || {}).hash; + + if (options.hash) { // Verify that transaction hashes match - if (options.hash !== entry.transaction.hash) { + if (options.hash !== transactionHash) { return async_callback(new errors.InvalidRequestError('Transaction not found. Hashes do not match')); } } - var transactionHash = options.hash || entry.transaction.hash; - remote.requestTx(transactionHash, function(error, transaction) { if (error) { return async_callback(error);
Fix transaction hash lookup for validated db entries
ripple_ripple-rest
train
bfc77afa1723a6f6fab7e32766b1b4a087f21bd9
diff --git a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/broker/JavaFunctionBroker.java b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/broker/JavaFunctionBroker.java index <HASH>..<HASH> 100644 --- a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/broker/JavaFunctionBroker.java +++ b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/broker/JavaFunctionBroker.java @@ -1,17 +1,10 @@ package com.microsoft.azure.webjobs.script.broker; import java.io.File; -import java.io.FileFilter; import java.io.IOException; import java.net.*; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.*; import java.util.concurrent.*; -import java.util.logging.Logger; - -import com.microsoft.azure.webjobs.script.WorkerLogManager; import com.microsoft.azure.webjobs.script.binding.*; import com.microsoft.azure.webjobs.script.description.FunctionMethodDescriptor; import com.microsoft.azure.webjobs.script.reflect.ClassLoaderProvider; diff --git a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/description/FunctionMethodDescriptor.java b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/description/FunctionMethodDescriptor.java index <HASH>..<HASH> 100644 --- a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/description/FunctionMethodDescriptor.java +++ b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/description/FunctionMethodDescriptor.java @@ -1,12 +1,7 @@ package com.microsoft.azure.webjobs.script.description; -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.net.URL; -import java.nio.file.Paths; +import java.io.*; import java.util.Optional; - import org.apache.commons.lang3.ClassUtils; import org.apache.commons.lang3.StringUtils; diff --git a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/ClassLoaderProvider.java b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/ClassLoaderProvider.java index <HASH>..<HASH> 100644 --- a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/ClassLoaderProvider.java +++ b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/ClassLoaderProvider.java @@ -2,7 +2,6 @@ package com.microsoft.azure.webjobs.script.reflect; import java.io.File; import java.io.IOException; -import java.net.MalformedURLException; import java.net.URL; public interface ClassLoaderProvider { diff --git a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/DefaultClassLoaderProvider.java b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/DefaultClassLoaderProvider.java index <HASH>..<HASH> 100644 --- a/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/DefaultClassLoaderProvider.java +++ b/azure-functions-java-worker/src/main/java/com/microsoft/azure/webjobs/script/reflect/DefaultClassLoaderProvider.java @@ -1,18 +1,10 @@ package com.microsoft.azure.webjobs.script.reflect; -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; +import java.io.*; import java.lang.reflect.Method; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.Arrays; -import java.util.Collections; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.logging.Logger; - -import com.microsoft.azure.webjobs.script.WorkerLogManager; +import java.net.*; +import java.util.*; +import java.util.concurrent.*; /** * @author Kevin Hillinger @@ -96,8 +88,6 @@ public class DefaultClassLoaderProvider implements ClassLoaderProvider { } - Logger logger = WorkerLogManager.getHostLogger(); - private static final String systemClassLoaderAddUrlMethodName = "addURL"; private static final Class<?>[] parameters = new Class[] { URL.class }; private final Set<URL> urls;
styling clean up - import clean up - removing reference to logger
Azure_azure-functions-java-worker
train
4b7ec0273790cfe491d686b97894e04297d07c64
diff --git a/lib/em-hiredis/base_client.rb b/lib/em-hiredis/base_client.rb index <HASH>..<HASH> 100644 --- a/lib/em-hiredis/base_client.rb +++ b/lib/em-hiredis/base_client.rb @@ -21,7 +21,6 @@ module EventMachine::Hiredis @defs = [] @command_queue = [] - @closing_connection = false @reconnect_failed_count = 0 @reconnect_timer = nil @failed = false @@ -64,10 +63,12 @@ module EventMachine::Hiredis def reconnect!(new_uri = nil) @connection.close_connection configure(new_uri) if new_uri + @auto_reconnect = true EM.next_tick { reconnect_connection } end def connect + @auto_reconnect = true @connection = EM.connect(@host, @port, Connection, @host, @port) @connection.on(:closed) do @@ -76,14 +77,14 @@ module EventMachine::Hiredis @defs = [] @deferred_status = nil @connected = false - unless @closing_connection + if @auto_reconnect # Next tick avoids reconnecting after for example EM.stop EM.next_tick { reconnect } end emit(:disconnected) EM::Hiredis.logger.info("#{@connection} Disconnected") else - unless @closing_connection + if @auto_reconnect @reconnect_failed_count += 1 @reconnect_timer = EM.add_timer(EM::Hiredis.reconnect_timeout) { @reconnect_timer = nil @@ -168,13 +169,14 @@ module EventMachine::Hiredis def close_connection EM.cancel_timer(@reconnect_timer) if @reconnect_timer - @closing_connection = true + @auto_reconnect = false @connection.close_connection_after_writing end # Note: This method doesn't disconnect if already connected. You probably # want to use `reconnect!` def reconnect_connection + @auto_reconnect = true EM.cancel_timer(@reconnect_timer) if @reconnect_timer reconnect end
Rename variable for clarity and reset on reconnect
mloughran_em-hiredis
train
ee5dc926bb1c9f54030be9ec10451daaf85c24b6
diff --git a/tests/bootstrap.php b/tests/bootstrap.php index <HASH>..<HASH> 100644 --- a/tests/bootstrap.php +++ b/tests/bootstrap.php @@ -2,6 +2,8 @@ error_reporting(E_ALL | E_STRICT); -// include the composer autoloader and add our tests directory +// include the composer autoloader $autoloader = require __DIR__.'/../vendor/autoload.php'; + +// autoload abstract TestCase classes in test directory $autoloader->add('Omnipay', __DIR__);
Restructure namespace to make it easier to distribute separate packages in the future
thephpleague_omnipay-migs
train
a7ee95b33cecb4320af49469e26390b12ea8c54a
diff --git a/lib/couchrest/mixins/extended_attachments.rb b/lib/couchrest/mixins/extended_attachments.rb index <HASH>..<HASH> 100644 --- a/lib/couchrest/mixins/extended_attachments.rb +++ b/lib/couchrest/mixins/extended_attachments.rb @@ -14,7 +14,7 @@ module CouchRest # reads the data from an attachment def read_attachment(attachment_name) - Base64.decode64(database.fetch_attachment(self, attachment_name)) + database.fetch_attachment(self, attachment_name) end # modifies a file attachment on the current doc @@ -52,10 +52,6 @@ module CouchRest private - def encode_attachment(data) - ::Base64.encode64(data).gsub(/\r|\n/,'') - end - def get_mime_type(file) ::MIME::Types.type_for(file.path).empty? ? 'text\/plain' : MIME::Types.type_for(file.path).first.content_type.gsub(/\//,'\/')
fixed extended attachment encoding part 2 encoding was done twice, once while setting attachment and once while saving the document. Also the attachment was decoded while retreaving, but this is done by CouchDB itself automatically, as described in <URL>
couchrest_couchrest
train
dd478c24e32a62ad1dcc475dc93bdafe9eac4a7f
diff --git a/sos/plugins/lvm2.py b/sos/plugins/lvm2.py index <HASH>..<HASH> 100644 --- a/sos/plugins/lvm2.py +++ b/sos/plugins/lvm2.py @@ -38,13 +38,24 @@ class Lvm2(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin): self.add_cmd_output(cmd) def setup(self): - self.add_cmd_output("vgdisplay -vv", root_symlink = "vgdisplay") + # use locking_type 0 (no locks) when running LVM2 commands, from lvm.conf: + # Turn locking off by setting to 0 (dangerous: risks metadata corruption + # if LVM2 commands get run concurrently). + # None of the commands issued by sos ever modify metadata and this avoids + # the possibility of hanging lvm commands when another process or node + # holds a conflicting lock. + lvm_opts = '--config="global{locking_type=0}"' + + self.add_cmd_output( + "vgdisplay -vv %s" % lvm_opts, + root_symlink="vgdisplay" + ) self.add_cmd_outputs([ - "vgscan -vvv", - "pvscan -v", - "pvs -a -v", - "vgs -v", - "lvs -a -o +devices" + "vgscan -vvv %s" % lvm_opts, + "pvscan -v %s" % lvm_opts, + "pvs -a -v %s" % lvm_opts, + "vgs -v %s" % lvm_opts, + "lvs -a -o +devices %s" % lvm_opts ]) self.add_copy_spec("/etc/lvm")
Set global{locking_type=0} when running lvm2 commands The lvm2 commands that sos issues are all read-only. Disable locking when running the commands to avoid blocking if another process or node is holding a lock.
sosreport_sos
train
3650ba6f5862c6f518980480cdb8737633433ad4
diff --git a/admin/xmldb/actions/test/test.class.php b/admin/xmldb/actions/test/test.class.php index <HASH>..<HASH> 100644 --- a/admin/xmldb/actions/test/test.class.php +++ b/admin/xmldb/actions/test/test.class.php @@ -74,9 +74,13 @@ class test extends XMLDBAction { /// Silenty drop any previous test tables $table = new XMLDBTable('testtable'); - $status = drop_table($table, true, false); + if (table_exists($table)) { + $status = drop_table($table, true, false); + } $table = new XMLDBTable ('anothertest'); - $status = drop_table($table, true, false); + if (table_exists($table)) { + $status = drop_table($table, true, false); + } /// 1st test. Complete table creation. $table = new XMLDBTable('testtable');
Using the new table_exists() to initialize tests properly.
moodle_moodle
train
ec4017a31a063e62f66af2f3751fb4ce4d9b7b6b
diff --git a/isort/settings.py b/isort/settings.py index <HASH>..<HASH> 100644 --- a/isort/settings.py +++ b/isort/settings.py @@ -300,7 +300,11 @@ class Config(_Config): combined_config["import_headings"] = import_headings if "src_paths" not in combined_config: - combined_config["src_paths"] = frozenset((Path.cwd(),)) + combined_config["src_paths"] = frozenset((Path.cwd().absolute(),)) + else: + combined_config["src_paths"] = frozenset( + path.absolute() for path in combined_config["src_paths"] + ) super().__init__(sources=tuple(sources), **combined_config) # type: ignore
Ensure all provided src_paths are absolute
timothycrosley_isort
train
6d2dfefb3e2ec889ba2248dd9bac908612b43ec8
diff --git a/pkg/models/balanceinfo/balanceinfo.go b/pkg/models/balanceinfo/balanceinfo.go index <HASH>..<HASH> 100644 --- a/pkg/models/balanceinfo/balanceinfo.go +++ b/pkg/models/balanceinfo/balanceinfo.go @@ -25,3 +25,13 @@ func FromRaw(raw []interface{}) (o *BalanceInfo, err error) { return } + +func UpdateFromRaw(raw []interface{}) (Update, error) { + bi, err := FromRaw(raw) + if err != nil { + return Update{}, err + } + + u := Update(*bi) + return u, nil +}
ability to get balance info update instance as well as balance info
bitfinexcom_bitfinex-api-go
train
700041384f858380e914b796e8d26a2332d6080d
diff --git a/modules/custom/activity_creator/src/ActivityFactory.php b/modules/custom/activity_creator/src/ActivityFactory.php index <HASH>..<HASH> 100644 --- a/modules/custom/activity_creator/src/ActivityFactory.php +++ b/modules/custom/activity_creator/src/ActivityFactory.php @@ -26,6 +26,7 @@ class ActivityFactory extends ControllerBase { * ActivityFactory constructor. * * @param \Drupal\activity_creator\Plugin\ActivityDestinationManager $activityDestinationManager + * The activity destination manager. */ public function __construct(ActivityDestinationManager $activityDestinationManager) { $this->activityDestinationManager = $activityDestinationManager; diff --git a/modules/custom/activity_creator/src/Annotation/ActivityDestination.php b/modules/custom/activity_creator/src/Annotation/ActivityDestination.php index <HASH>..<HASH> 100644 --- a/modules/custom/activity_creator/src/Annotation/ActivityDestination.php +++ b/modules/custom/activity_creator/src/Annotation/ActivityDestination.php @@ -35,13 +35,13 @@ class ActivityDestination extends Plugin { * * @var bool */ - public $is_aggregatable = FALSE; + public $isAggregatable = FALSE; /** * Whether this destination is common or not. * * @var bool */ - public $is_common = FALSE; + public $isCommon = FALSE; } diff --git a/modules/custom/activity_creator/src/Entity/Activity.php b/modules/custom/activity_creator/src/Entity/Activity.php index <HASH>..<HASH> 100644 --- a/modules/custom/activity_creator/src/Entity/Activity.php +++ b/modules/custom/activity_creator/src/Entity/Activity.php @@ -9,7 +9,6 @@ use Drupal\Core\Entity\EntityChangedTrait; use Drupal\Core\Entity\EntityTypeInterface; use Drupal\activity_creator\ActivityInterface; use Drupal\user\UserInterface; -use Drupal\votingapi\Entity\Vote; /** * Defines the Activity entity. diff --git a/modules/custom/download_count/src/Plugin/Field/FieldFormatter/FieldDownloadCount.php b/modules/custom/download_count/src/Plugin/Field/FieldFormatter/FieldDownloadCount.php index <HASH>..<HASH> 100644 --- a/modules/custom/download_count/src/Plugin/Field/FieldFormatter/FieldDownloadCount.php +++ b/modules/custom/download_count/src/Plugin/Field/FieldFormatter/FieldDownloadCount.php @@ -11,6 +11,8 @@ use Drupal\Core\Template\Attribute; use Drupal\Component\Utility\Html; /** + * The FieldDownloadCount class. + * * @FieldFormatter( * id = "FieldDownloadCount", * label = @Translation("Generic file with download count"),
DS-<I> by frankgraave: fix coding standard for activity_creator and download_count
goalgorilla_open_social
train
00201ffaa8e0c725b1acd429f5e5a712f30378d1
diff --git a/cmd/helm/upgrade.go b/cmd/helm/upgrade.go index <HASH>..<HASH> 100644 --- a/cmd/helm/upgrade.go +++ b/cmd/helm/upgrade.go @@ -112,6 +112,7 @@ func newUpgradeCmd(cfg *action.Configuration, out io.Writer) *cobra.Command { instClient.Atomic = client.Atomic instClient.PostRenderer = client.PostRenderer instClient.DisableOpenAPIValidation = client.DisableOpenAPIValidation + instClient.SubNotes = client.SubNotes rel, err := runInstall(args, instClient, valueOpts, out) if err != nil {
pass subchart notes option to install client
helm_helm
train
b33d4a9d268dbfa9fe2dd41a7f6d31c794795f41
diff --git a/structr-ui/src/main/resources/structr/js/flows.js b/structr-ui/src/main/resources/structr/js/flows.js index <HASH>..<HASH> 100644 --- a/structr-ui/src/main/resources/structr/js/flows.js +++ b/structr-ui/src/main/resources/structr/js/flows.js @@ -618,8 +618,6 @@ let _Flows = { let entity = detail.entity; // proxy object let propertyName = detail.propertyName; - console.log(element, flowNodeType) - Structr.dialog("Edit " + flowNodeType, () => {}, () => {}, ['popup-dialog-with-editor']); dialogText.append('<div class="editor h-full"></div>'); @@ -628,7 +626,7 @@ let _Flows = { <button id="saveAndClose" disabled="disabled" class="disabled"> Save and close</button> `); - let contentBox = $('.editor', dialogText); + let contentBox = $('.editor', dialogText); let dialogSaveButton = dialogBtn[0].querySelector('#editorSave'); let saveAndClose = dialogBtn[0].querySelector('#saveAndClose'); @@ -636,7 +634,7 @@ let _Flows = { let editorConfig = { value: initialText, - language: 'javascript', + language: 'auto', lint: true, autocomplete: true, forceAllowAutoComplete: true, @@ -669,7 +667,7 @@ let _Flows = { } }; - let editor = _Editors.getMonacoEditor(entity, propertyName, $(contentBox), editorConfig); + let editor = _Editors.getMonacoEditor(entity, propertyName, contentBox, editorConfig); Structr.resize();
Bugfix: Set language in flow editor text editors to 'auto' and remove debug log
structr_structr
train
385a774a2ad6cc76c476dc88e6f44b940610e9ae
diff --git a/main.py b/main.py index <HASH>..<HASH> 100644 --- a/main.py +++ b/main.py @@ -1,6 +1,6 @@ # NOTE: These settings and imports should be the only things that change # across experiments on different datasets and ML model types. -import experiments.sample_2 as experiment +import experiments.sample as experiment from model_factories.TensorFlowModelFactory import ModelFactory from measurements import accuracy, complement_BER response_header = "Outcome" diff --git a/model_factories/TensorFlowModelFactory.py b/model_factories/TensorFlowModelFactory.py index <HASH>..<HASH> 100644 --- a/model_factories/TensorFlowModelFactory.py +++ b/model_factories/TensorFlowModelFactory.py @@ -15,8 +15,8 @@ class ModelFactory(AbstractModelFactory): def __init__(self, *args, **kwargs): super(ModelFactory, self).__init__(*args, **kwargs) - self.num_epochs = 100 - self.batch_size = 500 + self.num_epochs = 500 + self.batch_size = 300 self.learning_rate = 0.01 self.hidden_layer_sizes = [] # If empty, no hidden layers are used. self.layer_types = [tf.nn.softmax, # Input Layer @@ -166,7 +166,10 @@ def translate_dataset(response_index, data_set, trans_dict, headers, normalizers new_row.extend(val_list) else: norm = normalizers[header] - normed = (val-norm["mean"])/(norm["max"]-norm["min"]) + if (norm["max"]-norm["min"]) > 0: + normed = (val-norm["mean"])/(norm["max"]-norm["min"]) + else: + normed = 0.0 new_row.append(normed) translated_set.append(new_row) return translated_set
Fixed a corner-case in the normalization
algofairness_BlackBoxAuditing
train
c722ab9c768af64b3528a8fc780fad3ababd402d
diff --git a/lib/gulp-uglify-error.js b/lib/gulp-uglify-error.js index <HASH>..<HASH> 100644 --- a/lib/gulp-uglify-error.js +++ b/lib/gulp-uglify-error.js @@ -1,4 +1,13 @@ 'use strict'; var makeErrorCause = require('make-error-cause'); -module.exports = makeErrorCause('GulpUglifyError'); +var gulpUglifyError = makeErrorCause('GulpUglifyError'); +gulpUglifyError.prototype.toString = function() { + var cause = this.cause || {}; + + return makeErrorCause.BaseError.prototype.toString.call(this) + + (this.fileName ? '\nFile: ' + this.fileName : '') + + (cause.line ? '\nLine: ' + cause.line : ''); +}; + +module.exports = gulpUglifyError;
fix(errors): restore file and line info When upgrading to version <I> the errors implementation was changed, and the "filename" and "line" context was removed from the `toString` representation. This commit restores the information by extending the `make-error-cause` method. Fixes #<I>.
terinjokes_gulp-uglify
train
b54ceeaa8522e1e04fa300d80eeebbdea9df5dff
diff --git a/schema_salad/ref_resolver.py b/schema_salad/ref_resolver.py index <HASH>..<HASH> 100644 --- a/schema_salad/ref_resolver.py +++ b/schema_salad/ref_resolver.py @@ -179,7 +179,13 @@ class DefaultFetcher(Fetcher): raise ValueError('Unsupported scheme in url: %s' % url) def urljoin(self, base_url, url): # type: (Text, Text) -> Text - return urllib.parse.urljoin(base_url, url) + # On windows urljoin consider drive name as scheme and forces it over base url's scheme, + # here we are forcing base url's scheme over url + basesplit = urlparse.urlsplit(base_url) + if basesplit.scheme: + split = urlparse.urlsplit(url) + url = urlparse.urlunsplit(('', split.netloc, split.path, split.query, split.fragment)) + return urlparse.urljoin(base_url, url) class Loader(object): def __init__(self, @@ -282,7 +288,7 @@ class Loader(object): split = urllib.parse.urlsplit(url) - if (bool(split.scheme) or url.startswith(u"$(") + if ((bool(split.scheme) and split.scheme in [u'http', u'https', u'file']) or url.startswith(u"$(") or url.startswith(u"${")): pass elif scoped_id and not bool(split.fragment):
windows compatibility issue: windows Drive names are considered as scheme
common-workflow-language_schema_salad
train
c3b79a2a6ac00ec80c63050bf32d22bf2acb6816
diff --git a/tests/Divergence/AppTest.php b/tests/Divergence/AppTest.php index <HASH>..<HASH> 100644 --- a/tests/Divergence/AppTest.php +++ b/tests/Divergence/AppTest.php @@ -75,9 +75,6 @@ class AppTest extends TestCase //$this->App->init($this->ApplicationPath); } - /** - * @covers App::init - */ public function testAppInit() { $this->doInit(); @@ -98,9 +95,6 @@ class AppTest extends TestCase $this->doInit(); } - /** - * @covers App::config - */ public function testAppConfig() { $this->doInit(); @@ -111,9 +105,6 @@ class AppTest extends TestCase $this->cleanFakeDevEnv(); } - /** - * @covers App::registerErrorHandler - */ public function testAppRegisterErrorHandler() { $this->doInit(); diff --git a/tests/Divergence/IO/Database/MySQLTest.php b/tests/Divergence/IO/Database/MySQLTest.php index <HASH>..<HASH> 100644 --- a/tests/Divergence/IO/Database/MySQLTest.php +++ b/tests/Divergence/IO/Database/MySQLTest.php @@ -86,7 +86,8 @@ class MySQLTest extends TestCase * For older MySQL message is: "PDO failed to connect on config "mysql" mysql:host=localhost;port=3306;dbname=divergence" * For newer MySQL message is: "SQLSTATE[HY000] [1044] Access denied for user 'divergence'@'localhost' to database 'divergence'" */ - $this->expectExceptionCode(1044); // MySQL access denied + #$this->expectExceptionCode(1044); // MySQL access denied + $this->expectException(\Exception::class); //$this->expectExceptionMessage('PDO failed to connect on config "mysql" mysql:host=localhost;port=3306;dbname=divergence'); $this->assertInstanceOf(\PDO::class, DB::getConnection('mysql')); }
Remove @covers since it does it automagically and change MySQL test for interoperability
Divergence_framework
train
0d4bfe60f8437b0dd3e65a8668b300efc5a7f00d
diff --git a/lib/jumpstart/filetools.rb b/lib/jumpstart/filetools.rb index <HASH>..<HASH> 100644 --- a/lib/jumpstart/filetools.rb +++ b/lib/jumpstart/filetools.rb @@ -121,11 +121,13 @@ module JumpStart::FileTools # Hello there NAME from COUNTRY # Will also replace strings present in the target_file path. so if the method call looked like: FileUtils.replace_strings(target_file, :name => "Ian", :country => "England") # and target_file was: /Users/name/Sites/country the strings matching NAME and COUNTRY inside the file would be swapped out and then a new file at the path: /Users/Ian/Sites/England would be created and populated with the contents. The file at the previous path would be deleted. + # Finally if you specify a symbol and append _CLASS in the template, that instance will be replace with a capitalized version of the string. def replace_strings(target_file, args) if File.file?(target_file) txt = IO.read(target_file) new_file = target_file.dup args.each do |x, y| + txt.gsub!(/#{x.to_s.upcase}_CLASS/, y.capitalize) txt.gsub!(/#{x.to_s.upcase}/, y) new_file.gsub!(/#{x.to_s.downcase}/, y) end diff --git a/test/jumpstart/test_filetools.rb b/test/jumpstart/test_filetools.rb index <HASH>..<HASH> 100755 --- a/test/jumpstart/test_filetools.rb +++ b/test/jumpstart/test_filetools.rb @@ -295,6 +295,18 @@ class TestJumpstartFileTools < Test::Unit::TestCase assert !File.exists?(@target_file_3) end + should "replace strings that have _CLASS appended to them with a capitalised version of the replacement string." do + FileUtils.replace_strings(@target_file_3, :app_name => 'bungle', :remote_server => 'boxy') + file = IO.readlines(@new_file_3) + assert_equal "set :application, 'bungle'\n", file[0] + assert_equal "set :domain, 'boxy'\n", file[1] + assert_equal "run \"\#{sudo} nginx_auto_config /usr/local/bin/nginx.remote.conf /opt/nginx/conf/nginx.conf bungle\"\n", file[44] + assert_equal "# This is a test string Bungle\n", file[63] + assert_equal "# This is a test string Boxy\n", file[64] + assert File.exists?(@new_file_3) + assert !File.exists?(@target_file_3) + end + end context "Testing JumpStart::FileUtils#check_source_type class method.\n" do diff --git a/test/test_jumpstart_templates/test_fileutils/config_capistrano_source.rb b/test/test_jumpstart_templates/test_fileutils/config_capistrano_source.rb index <HASH>..<HASH> 100644 --- a/test/test_jumpstart_templates/test_fileutils/config_capistrano_source.rb +++ b/test/test_jumpstart_templates/test_fileutils/config_capistrano_source.rb @@ -59,4 +59,7 @@ end # deploy:symlink # deploy:restart -# eg: after 'deploy:symlink', 'deploy:restart' \ No newline at end of file +# eg: after 'deploy:symlink', 'deploy:restart' + +# This is a test string APP_NAME_CLASS +# This is a test string REMOTE_SERVER_CLASS \ No newline at end of file
added functionality for replacing strings with a capitalized version if the hook in the template is appended with _CLASS
i0n_jumpstart
train
ee669951f2d760298e139ac126cd286f4cefb6a4
diff --git a/seaglass/trunk/seaglass/src/main/java/com/seaglass/component/SeaGlassTitlePane.java b/seaglass/trunk/seaglass/src/main/java/com/seaglass/component/SeaGlassTitlePane.java index <HASH>..<HASH> 100644 --- a/seaglass/trunk/seaglass/src/main/java/com/seaglass/component/SeaGlassTitlePane.java +++ b/seaglass/trunk/seaglass/src/main/java/com/seaglass/component/SeaGlassTitlePane.java @@ -71,6 +71,8 @@ import sun.swing.plaf.synth.SynthUI; * @author Kathryn Huxtable */ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyChangeListener { + private static final String WINDOW_DOCUMENT_MODIFIED = "Window.documentModified"; + // Basic private JButton iconButton; private JButton maxButton; @@ -87,19 +89,19 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh private Action moveAction; private Action sizeAction; - private static final String CLOSE_CMD = UIManager.getString("InternalFrameTitlePane.closeButtonText"); - private static final String ICONIFY_CMD = UIManager.getString("InternalFrameTitlePane.minimizeButtonText"); - private static final String RESTORE_CMD = UIManager.getString("InternalFrameTitlePane.restoreButtonText"); - private static final String MAXIMIZE_CMD = UIManager.getString("InternalFrameTitlePane.maximizeButtonText"); - private static final String MOVE_CMD = UIManager.getString("InternalFrameTitlePane.moveButtonText"); - private static final String SIZE_CMD = UIManager.getString("InternalFrameTitlePane.sizeButtonText"); + private static final String CLOSE_CMD = UIManager.getString("InternalFrameTitlePane.closeButtonText"); + private static final String ICONIFY_CMD = UIManager.getString("InternalFrameTitlePane.minimizeButtonText"); + private static final String RESTORE_CMD = UIManager.getString("InternalFrameTitlePane.restoreButtonText"); + private static final String MAXIMIZE_CMD = UIManager.getString("InternalFrameTitlePane.maximizeButtonText"); + private static final String MOVE_CMD = UIManager.getString("InternalFrameTitlePane.moveButtonText"); + private static final String SIZE_CMD = UIManager.getString("InternalFrameTitlePane.sizeButtonText"); private String closeButtonToolTip; private String iconButtonToolTip; private String restoreButtonToolTip; private String maxButtonToolTip; - private int state = -1; + private int state = -1; private SeaGlassRootPaneUI rootPaneUI; // Synth @@ -112,7 +114,7 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh rootParent = (RootPaneContainer) rootPane.getParent(); installTitlePane(); } - + public JRootPane getRootPane() { return rootPane; } @@ -200,6 +202,7 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh } else if (rootParent instanceof JDialog) { ((JDialog) rootParent).addPropertyChangeListener(listener); } + rootPane.addPropertyChangeListener(listener); } private void removeParentPropertyChangeListener(PropertyChangeListener listener) { @@ -264,7 +267,6 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh protected void installListeners() { addParentPropertyChangeListener(this); - addParentPropertyChangeListener(this); } protected void uninstallListeners() { @@ -454,6 +456,12 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh // Basic (from Handler inner class) String prop = (String) evt.getPropertyName(); + if (closeButton != null && WINDOW_DOCUMENT_MODIFIED.equals(prop)) { + closeButton.revalidate(); + closeButton.repaint(); + return; + } + if (prop == JInternalFrame.IS_SELECTED_PROPERTY) { repaint(); return; @@ -729,6 +737,7 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh } else { setParentMaximum(false); } + setButtonTooltips(); } } } @@ -749,6 +758,7 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh setParentIcon(false); } } + setButtonTooltips(); } } @@ -768,6 +778,7 @@ public class SeaGlassTitlePane extends JComponent implements SynthUI, PropertyCh } else if (isParentIconifiable() && isParentIcon()) { setParentIcon(false); } + setButtonTooltips(); } }
Tooltips are working, I think. Added document modified listener on rootpane to update the close button.
khuxtable_seaglass
train
a3a2d1a34571fd72816d608b40e9ac574fed8eb7
diff --git a/scanner/src/main/java/com/buschmais/jqassistant/core/scanner/impl/FileScannerImpl.java b/scanner/src/main/java/com/buschmais/jqassistant/core/scanner/impl/FileScannerImpl.java index <HASH>..<HASH> 100644 --- a/scanner/src/main/java/com/buschmais/jqassistant/core/scanner/impl/FileScannerImpl.java +++ b/scanner/src/main/java/com/buschmais/jqassistant/core/scanner/impl/FileScannerImpl.java @@ -1,9 +1,9 @@ package com.buschmais.jqassistant.core.scanner.impl; -import com.buschmais.jqassistant.core.store.api.descriptor.Descriptor; import com.buschmais.jqassistant.core.scanner.api.FileScanner; import com.buschmais.jqassistant.core.scanner.api.FileScannerPlugin; import com.buschmais.jqassistant.core.store.api.Store; +import com.buschmais.jqassistant.core.store.api.descriptor.Descriptor; import org.apache.commons.io.DirectoryWalker; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; @@ -267,7 +267,7 @@ public class FileScannerImpl implements FileScanner { @Override protected String getName(URL element) { - return element.getPath() + "/" + element.getFile(); + return element.getPath(); } @Override
#<I> reworked node structure for properties and property files
buschmais_jqa-core-framework
train
430b30fa901ca378b735780a3fe54bc25d260ee6
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -63,6 +63,15 @@ If you do not specify `allowedTags` or `allowedAttributes` our default list is a // URL schemes we permit allowedSchemes: [ 'http', 'https', 'ftp', 'mailto' ] +"What if I want to allow all tags or all attributes?" + +Simple! instead of leaving `allowedTags` or `allowedAttributes` out of the options, set either +one or both to `false`: + + allowedTags: false, + allowedAttributes: false + + ### Transformations What if you want to add or change an attribute? What if you want to transform one tag to another? No problem, it's simple! @@ -224,4 +233,3 @@ We're rocking our tests and have been working great in production for months, so Feel free to open issues on [github](http://github.com/punkave/sanitize-html). <a href="http://punkave.com/"><img src="https://raw.github.com/punkave/sanitize-html/master/logos/logo-box-builtby.png" /></a> - diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -34,28 +34,36 @@ function sanitizeHtml(html, options) { script: true, style: true }; - var allowedTagsMap = {}; - _.each(options.allowedTags, function(tag) { - allowedTagsMap[tag] = true; - }); + var allowedTagsMap; + if(options.allowedTags) { + allowedTagsMap = {}; + _.each(options.allowedTags, function(tag) { + allowedTagsMap[tag] = true; + }); + } var selfClosingMap = {}; _.each(options.selfClosing, function(tag) { selfClosingMap[tag] = true; }); - var allowedAttributesMap = {}; - _.each(options.allowedAttributes, function(attributes, tag) { - allowedAttributesMap[tag] = {}; - _.each(attributes, function(name) { - allowedAttributesMap[tag][name] = true; + var allowedAttributesMap; + if(options.allowedAttributes) { + allowedAttributesMap = {}; + _.each(options.allowedAttributes, function(attributes, tag) { + allowedAttributesMap[tag] = {}; + _.each(attributes, function(name) { + allowedAttributesMap[tag][name] = true; + }); }); - }); + } var allowedClassesMap = {}; _.each(options.allowedClasses, function(classes, tag) { // Implicitly allows the class attribute - if (!allowedAttributesMap[tag]) { - allowedAttributesMap[tag] = {}; + if(allowedAttributesMap) { + if (!allowedAttributesMap[tag]) { + allowedAttributesMap[tag] = {}; + } + allowedAttributesMap[tag]['class'] = true; } - allowedAttributesMap[tag]['class'] = true; allowedClassesMap[tag] = {}; _.each(classes, function(name) { @@ -93,7 +101,7 @@ function sanitizeHtml(html, options) { } } - if (!_.has(allowedTagsMap, name)) { + if (allowedTagsMap && !_.has(allowedTagsMap, name)) { skip = true; if (_.has(nonTextTagsMap, name)) { skipText = true; @@ -106,9 +114,9 @@ function sanitizeHtml(html, options) { return; } result += '<' + name; - if (_.has(allowedAttributesMap, name)) { + if (!allowedAttributesMap || _.has(allowedAttributesMap, name)) { _.each(attribs, function(value, a) { - if (_.has(allowedAttributesMap[name], a)) { + if (!allowedAttributesMap || _.has(allowedAttributesMap[name], a)) { if ((a === 'href') || (a === 'src')) { if (naughtyHref(value)) { delete frame.attribs[a]; diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -7,6 +7,12 @@ describe('sanitizeHtml', function() { it('should pass through simple well-formed whitelisted markup', function() { assert.equal(sanitizeHtml('<div><p>Hello <b>there</b></p></div>'), '<div><p>Hello <b>there</b></p></div>'); }); + it('should pass through all markup if allowedTags and allowedAttributes are set to false', function() { + assert.equal(sanitizeHtml('<div><wiggly worms="ewww">hello</wiggly></div>', { + allowedTags: false, + allowedAttributes: false + }), '<div><wiggly worms="ewww">hello</wiggly></div>'); + }); it('should respect text nodes at top level', function() { assert.equal(sanitizeHtml('Blah blah blah<p>Whee!</p>'), 'Blah blah blah<p>Whee!</p>'); }); @@ -222,4 +228,3 @@ describe('sanitizeHtml', function() { ); }); }); -
Option to turn off whitelist for tags and attributes
punkave_sanitize-html
train
a9598c0bda22d72718e16c828be86788573b1a91
diff --git a/lib/beaker-pe/version.rb b/lib/beaker-pe/version.rb index <HASH>..<HASH> 100644 --- a/lib/beaker-pe/version.rb +++ b/lib/beaker-pe/version.rb @@ -3,7 +3,7 @@ module Beaker module PE module Version - STRING = '1.40.3' + STRING = '1.40.4' end end
(GEM) update beaker-pe version to <I>
puppetlabs_beaker-pe
train
a1734e3c63995b8bd21b2d4280bf85b7c84acd00
diff --git a/README.rst b/README.rst index <HASH>..<HASH> 100644 --- a/README.rst +++ b/README.rst @@ -3,4 +3,46 @@ Django-Choices ============================ Order and sanity for django model choices. ------------------------------------------------------ -Documentation in the works. +*Documentation is a work in progress.* + +Django choices provides a declarative way of using the choices_ option on django_ +fields. + +----------- +Basic Usage +----------- +To start you create a choices class in choices.py or const.py (I prefer const.py). +Then you point the choices property to the ``choices`` attribute of the new class. +Django will be able to use the choices and you will be able to access the values +by name. For example:: + + # In choices.py + from djchoices import DjangoChoices, ChoiceItem + + class PersonType(DjangoChoices): + Customer = ChoiceItem("C") + Employee = ChoiceItem("E") + Groundhog = ChoiceItem("G") + + # In models.py + class Person(models.Model): + name = models.CharField(max_length=32) + type = models.CharField(max_length=1, choices=choices.PersonType.choices) + + # In other code + Person.create(name="Phil", type=PersonType.Groundhog) + +------- +License +------- +Licensed under the `MIT License`_. + +---------- +Souce Code +---------- +The source code can be found on github_. + +.. _choices: http://docs.djangoproject.com/en/1.2/ref/models/fields/#choices +.. _MIT License: http://en.wikipedia.org/wiki/MIT_License +.. _django: http://www.djangoproject.com/ +.. _github: https://github.com/bigjason/django-choices \ No newline at end of file diff --git a/djchoices/choices.py b/djchoices/choices.py index <HASH>..<HASH> 100644 --- a/djchoices/choices.py +++ b/djchoices/choices.py @@ -1,5 +1,5 @@ -from collections import OrderedDict import re +from collections import OrderedDict __all__ = ["ChoiceItem", "ChoicesBase", "C"] diff --git a/djchoices/tests/test_choices.py b/djchoices/tests/test_choices.py index <HASH>..<HASH> 100644 --- a/djchoices/tests/test_choices.py +++ b/djchoices/tests/test_choices.py @@ -55,10 +55,21 @@ class DjangoChoices(unittest.TestCase): self.assertEqual(choices[3][0], 4) self.assertEqual(choices[4][0], 5) + def test_sub_class_level_1_values(self): + self.assertEqual(SubClass1.Item_1, 1) + self.assertEqual(SubClass1.Item_4, 4) + self.assertEqual(SubClass1.Item_5, 5) + def test_sub_class_level_2_choices(self): choices = SubClass2.choices self.assertEqual(choices[0][0], 1) self.assertEqual(choices[3][0], 4) self.assertEqual(choices[5][0], 6) self.assertEqual(choices[6][0], 7) + + def test_sub_class_level_2_values(self): + self.assertEqual(SubClass2.Item_1, 1) + self.assertEqual(SubClass2.Item_5, 5) + self.assertEqual(SubClass2.Item_6, 6) + self.assertEqual(SubClass2.Item_7, 7) \ No newline at end of file
More tests. Slightly better README.
bigjason_django-choices
train
65ef40c2dc49400d3e2f65068a44c4129cf9eb4f
diff --git a/Classes/Core/Acceptance/Extension/BackendEnvironment.php b/Classes/Core/Acceptance/Extension/BackendEnvironment.php index <HASH>..<HASH> 100644 --- a/Classes/Core/Acceptance/Extension/BackendEnvironment.php +++ b/Classes/Core/Acceptance/Extension/BackendEnvironment.php @@ -19,7 +19,6 @@ use Codeception\Event\SuiteEvent; use Codeception\Events; use Codeception\Extension; use TYPO3\CMS\Core\Cache\Backend\NullBackend; -use TYPO3\CMS\Core\Core\Bootstrap; use TYPO3\CMS\Core\Database\ConnectionPool; use TYPO3\CMS\Core\Utility\GeneralUtility; use TYPO3\CMS\Styleguide\TcaDataGenerator\Generator; @@ -260,7 +259,6 @@ abstract class BackendEnvironment extends Extension // Set some hard coded base settings for the instance. Those could be overruled by // $this->config['configurationToUseInTestInstance ']if needed again. $localConfiguration['BE']['debug'] = true; - $localConfiguration['BE']['lockHashKeyWords'] = ''; $localConfiguration['BE']['installToolPassword'] = '$P$notnotnotnotnotnot.validvalidva'; $localConfiguration['SYS']['displayErrors'] = false; $localConfiguration['SYS']['debugExceptionHandler'] = ''; diff --git a/Classes/Core/Functional/FunctionalTestCase.php b/Classes/Core/Functional/FunctionalTestCase.php index <HASH>..<HASH> 100644 --- a/Classes/Core/Functional/FunctionalTestCase.php +++ b/Classes/Core/Functional/FunctionalTestCase.php @@ -271,7 +271,6 @@ abstract class FunctionalTestCase extends BaseTestCase GeneralUtility::purgeInstances(); $this->container = $testbase->setUpBasicTypo3Bootstrap($this->instancePath); $testbase->initializeTestDatabaseAndTruncateTables(); - Bootstrap::initializeBackendRouter(); $testbase->loadExtensionTables(); } else { $testbase->removeOldInstanceIfExists($this->instancePath); @@ -339,7 +338,6 @@ abstract class FunctionalTestCase extends BaseTestCase } else { $testbase->setUpTestDatabase($dbPath, $originalDatabaseName); } - Bootstrap::initializeBackendRouter(); $testbase->loadExtensionTables(); $testbase->createDatabaseStructure(); } diff --git a/Classes/Core/Testbase.php b/Classes/Core/Testbase.php index <HASH>..<HASH> 100644 --- a/Classes/Core/Testbase.php +++ b/Classes/Core/Testbase.php @@ -572,7 +572,6 @@ class Testbase // Reset state from a possible previous run GeneralUtility::purgeInstances(); - GeneralUtility::resetApplicationContext(); $classLoader = require __DIR__ . '/../../../../autoload.php'; SystemEnvironmentBuilder::run(0, SystemEnvironmentBuilder::REQUESTTYPE_BE | SystemEnvironmentBuilder::REQUESTTYPE_CLI);
[TASK] Remove unused functionality Several properties and methods are unused in TYPO3 <I> and do not need to be instantiated anymore, so they can be removed.
TYPO3_testing-framework
train
5582ad0445029672daf0f18fb12a3d940dbc6bb7
diff --git a/testing/python/fixture.py b/testing/python/fixture.py index <HASH>..<HASH> 100644 --- a/testing/python/fixture.py +++ b/testing/python/fixture.py @@ -3790,17 +3790,28 @@ def test_pytest_fixture_setup_and_post_finalizer_hook(testdir): class TestScopeOrdering(object): """Class of tests that ensure fixtures are ordered based on their scopes (#2405)""" - @pytest.mark.parametrize("use_mark", [True, False]) - def test_func_closure_module_auto(self, testdir, use_mark): + @pytest.mark.parametrize("variant", ["mark", "autouse"]) + @pytest.mark.issue(github="#2405") + def test_func_closure_module_auto(self, testdir, variant, monkeypatch): """Semantically identical to the example posted in #2405 when ``use_mark=True``""" + monkeypatch.setenv("FIXTURE_ACTIVATION_VARIANT", variant) testdir.makepyfile( """ + import warnings + import os import pytest + VAR = 'FIXTURE_ACTIVATION_VARIANT' + VALID_VARS = ('autouse', 'mark') + + VARIANT = os.environ.get(VAR) + if VARIANT is None or VARIANT not in VALID_VARS: + warnings.warn(\"{!r}\" is not in {}, assuming autouse".format(VARIANT, VALID_VARS) ) + variant = 'mark' - @pytest.fixture(scope='module', autouse={autouse}) + @pytest.fixture(scope='module', autouse=VARIANT == 'autouse') def m1(): pass - if {use_mark}: + if VARIANT=='mark': pytestmark = pytest.mark.usefixtures('m1') @pytest.fixture(scope='function', autouse=True) @@ -3808,9 +3819,7 @@ class TestScopeOrdering(object): def test_func(m1): pass - """.format( - autouse=not use_mark, use_mark=use_mark - ) + """ ) items, _ = testdir.inline_genitems() request = FixtureRequest(items[0])
remove use of formatting in test_func_closure_module_auto this makes it apparent that pytester should supply some kind of variable support
pytest-dev_pytest
train
f9429987eea8a83b8126c450e5ff07c57312f59b
diff --git a/ci/datamgr.py b/ci/datamgr.py index <HASH>..<HASH> 100755 --- a/ci/datamgr.py +++ b/ci/datamgr.py @@ -8,7 +8,6 @@ from pathlib import Path import click import pandas as pd import sqlalchemy as sa -from plumbum import local from toolz import dissoc SCRIPT_DIR = Path(__file__).parent.absolute() @@ -243,19 +242,11 @@ def parquet(tables, data_directory, ignore_missing_dependency, **params): ), ) @click.option( - '-l', - '--psql-path', - type=click.Path(exists=True), - required=os.name == 'nt', - default=None if os.name == 'nt' else '/usr/bin/psql', -) [email protected]( '--plpython/--no-plpython', help='Create PL/Python extension in database', default=True, ) -def postgres(schema, tables, data_directory, psql_path, plpython, **params): - psql = local[psql_path] +def postgres(schema, tables, data_directory, plpython, **params): logger.info('Initializing PostgreSQL...') engine = init_database( 'postgresql', params, schema, isolation_level='AUTOCOMMIT' @@ -270,8 +261,6 @@ def postgres(schema, tables, data_directory, psql_path, plpython, **params): if plpython: engine.execute("CREATE EXTENSION IF NOT EXISTS PLPYTHONU") - query = "COPY {} FROM STDIN WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')" - database = params['database'] for table in tables: src = data_directory / f'{table}.csv' @@ -298,23 +287,21 @@ def postgres(schema, tables, data_directory, psql_path, plpython, **params): "geo_multipolygon": Geometry("MULTIPOLYGON", srid=srid), }, ) - continue - - load = psql[ - '--host', - params['host'], - '--port', - params['port'], - '--username', - params['user'], - '--dbname', - database, - '--command', - query.format(table), - ] - with local.env(PGPASSWORD=params['password']): - with src.open('r') as f: - load(stdin=f) + else: + # Here we insert rows using COPY table FROM STDIN, by way of + # psycopg2's `copy_expert` API. + # + # We could use DataFrame.to_sql(method=callable), but that incurs + # an unnecessary round trip and requires more code: the `data_iter` + # argument would have to be turned back into a CSV before being + # passed to `copy_expert`. + sql = ( + f"COPY {table} FROM STDIN " + "WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')" + ) + with src.open('r') as file: + with engine.begin() as con, con.connection.cursor() as cur: + cur.copy_expert(sql=sql, file=file) engine.execute('VACUUM FULL ANALYZE')
refactor: remove psql_path from postgres data loading (#<I>)
ibis-project_ibis
train
2fa8d65d5ced8183794bae581a11eb088b082fa3
diff --git a/gulp/tasks/docs.js b/gulp/tasks/docs.js index <HASH>..<HASH> 100644 --- a/gulp/tasks/docs.js +++ b/gulp/tasks/docs.js @@ -136,7 +136,8 @@ function parseJSONToMarkdown(json) { } function mergeMixinDocs(componentJson) { - if (componentJson[0].mixes == null || componentJson[0].mixes === undefined) { + if (componentJson.length === 0 || + componentJson[0].mixes == null || componentJson[0].mixes === undefined) { return componentJson; }
Cope with class file that has no jsDoc comments.
elix_elix
train
98e69ad3fe0c8b01df329594e4955c1e6d149aaa
diff --git a/src/drakonli/PhpUtils/Spl/File/Info/Collection/Basic/BasicSplFileInfoCollection.php b/src/drakonli/PhpUtils/Spl/File/Info/Collection/Basic/BasicSplFileInfoCollection.php index <HASH>..<HASH> 100644 --- a/src/drakonli/PhpUtils/Spl/File/Info/Collection/Basic/BasicSplFileInfoCollection.php +++ b/src/drakonli/PhpUtils/Spl/File/Info/Collection/Basic/BasicSplFileInfoCollection.php @@ -18,6 +18,6 @@ class BasicSplFileInfoCollection extends AbstractBasicImmutableCollection implem */ public function __construct(array $files) { - $this->elements = $files; + parent::__construct($files); } } \ No newline at end of file
+ fixed spl file info collection based on new fix for immutable parameter bag
drakonli_php-utils
train
c6fc8ef62c4d931991a302a7fa25b06a4695fad6
diff --git a/src/foremast/utils/deep_chain_map.py b/src/foremast/utils/deep_chain_map.py index <HASH>..<HASH> 100644 --- a/src/foremast/utils/deep_chain_map.py +++ b/src/foremast/utils/deep_chain_map.py @@ -48,10 +48,10 @@ class DeepChainMap(collections.ChainMap): for mapping in self.maps: try: value = mapping[key] + map_value = value if isinstance(value, dict): - return dict(DeepChainMap(*list(mapping.get(key, {}) for mapping in self.maps))) - else: - return value + map_value = dict(DeepChainMap(*list(mapping.get(key, {}) for mapping in self.maps))) + return map_value except KeyError: pass return self.__missing__(key)
refactor: Simply logic and set single return line
foremast_foremast
train
bc863b2dbc4bd07e53ed647f8c33b652fd099336
diff --git a/commerce-account-web/src/main/java/com/liferay/commerce/account/web/internal/frontend/CommerceAccountClayTable.java b/commerce-account-web/src/main/java/com/liferay/commerce/account/web/internal/frontend/CommerceAccountClayTable.java index <HASH>..<HASH> 100644 --- a/commerce-account-web/src/main/java/com/liferay/commerce/account/web/internal/frontend/CommerceAccountClayTable.java +++ b/commerce-account-web/src/main/java/com/liferay/commerce/account/web/internal/frontend/CommerceAccountClayTable.java @@ -15,6 +15,7 @@ package com.liferay.commerce.account.web.internal.frontend; import com.liferay.commerce.account.constants.CommerceAccountConstants; +import com.liferay.commerce.account.constants.CommerceAccountPortletKeys; import com.liferay.commerce.account.model.CommerceAccount; import com.liferay.commerce.account.service.CommerceAccountService; import com.liferay.commerce.account.web.internal.model.Account; @@ -39,10 +40,13 @@ import com.liferay.portal.kernel.language.LanguageUtil; import com.liferay.portal.kernel.portlet.PortletProvider; import com.liferay.portal.kernel.portlet.PortletProviderUtil; import com.liferay.portal.kernel.portlet.PortletQName; +import com.liferay.portal.kernel.portlet.PortletURLFactory; +import com.liferay.portal.kernel.portlet.PortletURLFactoryUtil; import com.liferay.portal.kernel.search.Sort; import com.liferay.portal.kernel.security.permission.ActionKeys; import com.liferay.portal.kernel.security.permission.resource.ModelResourcePermission; import com.liferay.portal.kernel.theme.ThemeDisplay; +import com.liferay.portal.kernel.util.Constants; import com.liferay.portal.kernel.util.Portal; import com.liferay.portal.kernel.util.WebKeys; import com.liferay.portal.kernel.webserver.WebServerServletTokenUtil; @@ -50,6 +54,8 @@ import com.liferay.portal.kernel.webserver.WebServerServletTokenUtil; import java.util.ArrayList; import java.util.List; +import javax.portlet.ActionRequest; +import javax.portlet.PortletRequest; import javax.portlet.PortletURL; import javax.servlet.http.HttpServletRequest; @@ -97,11 +103,32 @@ public class CommerceAccountClayTable String viewURL = _getAccountViewDetailURL( account.getAccountId(), httpServletRequest); - ClayTableAction clayTableAction = new ClayTableAction( + ClayTableAction clayTableViewAction = new ClayTableAction( viewURL, StringPool.BLANK, LanguageUtil.get(httpServletRequest, "view"), false, false); - clayTableActions.add(clayTableAction); + clayTableActions.add(clayTableViewAction); + } + + if (_modelResourcePermission.contains( + themeDisplay.getPermissionChecker(), account.getAccountId(), + ActionKeys.UPDATE)) { + + String setActiveURL = _getAccountSetActiveURL( + account.getAccountId(), httpServletRequest); + + ClayTableAction clayTableSetActiveAction = new ClayTableAction( + "commerce-button--good", setActiveURL, StringPool.BLANK, + LanguageUtil.get(httpServletRequest, "activate"), false, false); + + if (account.getActive()) { + clayTableSetActiveAction = new ClayTableAction( + "commerce-button--bad", setActiveURL, StringPool.BLANK, + LanguageUtil.get(httpServletRequest, "deactivate"), false, + false); + } + + clayTableActions.add(clayTableSetActiveAction); } return clayTableActions; @@ -143,6 +170,11 @@ public class CommerceAccountClayTable clayTableSchemaBuilder.addField("address", "address"); + ClayTableSchemaField statusField = clayTableSchemaBuilder.addField( + "active", "status"); + + statusField.setContentRenderer("commerceTableCellActive"); + return clayTableSchemaBuilder.build(); } @@ -194,11 +226,19 @@ public class CommerceAccountClayTable commerceAccount.getLogoId())); } + String statusLabel = "inactive"; + + if (commerceAccount.isActive()) { + statusLabel = "active"; + } + accounts.add( new Account( commerceAccount.getCommerceAccountId(), - commerceAccount.getName(), commerceAccount.getEmail(), + commerceAccount.isActive(), commerceAccount.getName(), + commerceAccount.getEmail(), _getDefaultBillingCommerceAddress(commerceAccount), + LanguageUtil.get(httpServletRequest, statusLabel), thumbnailSB.toString(), _getAccountViewDetailURL( commerceAccount.getCommerceAccountId(), @@ -213,6 +253,27 @@ public class CommerceAccountClayTable return true; } + private String _getAccountSetActiveURL( + long commerceAccountId, HttpServletRequest httpServletRequest) + throws PortalException { + + PortletURL activateURL = _portletURLFactory.create( + httpServletRequest, CommerceAccountPortletKeys.COMMERCE_ACCOUNT, + PortletRequest.ACTION_PHASE); + + activateURL.setParameter( + ActionRequest.ACTION_NAME, "editCommerceAccount"); + activateURL.setParameter(Constants.CMD, "setActive"); + activateURL.setParameter( + "commerceAccountId", String.valueOf(commerceAccountId)); + + String redirect = _portal.getCurrentURL(httpServletRequest); + + activateURL.setParameter("redirect", redirect); + + return activateURL.toString(); + } + private String _getAccountViewDetailURL( long commerceAccountId, HttpServletRequest httpServletRequest) throws PortalException { @@ -278,4 +339,7 @@ public class CommerceAccountClayTable @Reference private Portal _portal; + @Reference + private PortletURLFactory _portletURLFactory; + } \ No newline at end of file
COMMERCE-<I> Refactor Account table to change and show active status of accounts
liferay_com-liferay-commerce
train
a7fe64e43069f30ecce3c20bdd1c02da867845ac
diff --git a/libs/verysimple/Util/VsDateUtil.php b/libs/verysimple/Util/VsDateUtil.php index <HASH>..<HASH> 100644 --- a/libs/verysimple/Util/VsDateUtil.php +++ b/libs/verysimple/Util/VsDateUtil.php @@ -12,6 +12,18 @@ */ class VsDateUtil { + /** @var int one day in milliseconds */ + static $ONE_DAY = 86400000; + + /** @var int one hour in milliseconds */ + static $ONE_HOUR = 3600000; + + /** @var int one minute in milliseconds */ + static $ONE_MINUTE = 6000; + + /** @var int one second in milliseconds */ + static $ONE_SECOND = 1000; + /** * Return current date as string in the specified format * @param string $format
added utility vars for days, hours, minutes, seconds to VsDateUtil
jasonhinkle_phreeze
train
e994d6c706e625dbceece46df3a4b53d9d939634
diff --git a/ext/memory/src/test/java/org/minimalj/repository/memory/InMemoryDbSelfReferenceTest.java b/ext/memory/src/test/java/org/minimalj/repository/memory/InMemoryDbSelfReferenceTest.java index <HASH>..<HASH> 100644 --- a/ext/memory/src/test/java/org/minimalj/repository/memory/InMemoryDbSelfReferenceTest.java +++ b/ext/memory/src/test/java/org/minimalj/repository/memory/InMemoryDbSelfReferenceTest.java @@ -2,7 +2,6 @@ package org.minimalj.repository.memory; import org.junit.Assert; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Test; public class InMemoryDbSelfReferenceTest { @@ -23,7 +22,7 @@ public class InMemoryDbSelfReferenceTest { Assert.assertEquals(e, e.reference); } - @Test @Ignore // not yet solved + @Test public void testCycleWithOneInserts() { TestEntity e1 = new TestEntity(); TestEntity e2 = new TestEntity();
InMemoryDbSelfReferenceTest.testCycleWithOneInserts actually works no need to ignore
BrunoEberhard_minimal-j
train