hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
afc5ea6d192be6541374728223772d34652019b1
diff --git a/bin/generator.js b/bin/generator.js index <HASH>..<HASH> 100755 --- a/bin/generator.js +++ b/bin/generator.js @@ -29,6 +29,8 @@ var compositeName = { + (node.size || '') + ((node.sign === true) ? '_s' : '') + ((node.sign === false) ? '_u' : '') + + (node.offset ? (' offset=' + node.offset) : '') + + (node.align ? (' align=' + node.align) : '') `, store: `node.type diff --git a/test/basic.js b/test/basic.js index <HASH>..<HASH> 100644 --- a/test/basic.js +++ b/test/basic.js @@ -170,9 +170,27 @@ describe('basic', function () { it('store offset alight #16', function (done) { checker( -`(i64.store offset=8 align=4 +`(i64.store (get_local $0) - (i64.const 1))`, 2); + (i64.const 1)) +(i64.store offset=123 + (get_local $0) + (i64.const 1)) +(i64.store align=456 + (get_local $0) + (i64.const 1)) +(i64.store offset=123 align=456 + (get_local $0) + (i64.const 1)) +(i64.load + (get_local $0)) +(i64.load offset=123 + (get_local $0)) +(i64.load align=456 + (get_local $0)) +(i64.load offset=123 align=456 + (get_local $0)) +(nop)`, 2); done(); });
fixes #<I> ; offset and align not supported on load/store operations
drom_wast-codegen
train
747b7567d7ecd098ad7d1c5bda06638ac348f23e
diff --git a/sanic/response.py b/sanic/response.py index <HASH>..<HASH> 100644 --- a/sanic/response.py +++ b/sanic/response.py @@ -171,8 +171,8 @@ def text(body, status=200, headers=None, def raw(body, status=200, headers=None, content_type="application/octet-stream"): """ - Returns response object with body in text format. - :param body: Response data to be encoded. + Returns response object without encoding the body. + :param body: Response data. :param status: Response code. :param headers: Custom Headers. :param content_type:
Changed docstring for raw response
huge-success_sanic
train
f08c5343a530ad8152abace85ce66f3a13631f1d
diff --git a/classes/Pods.php b/classes/Pods.php index <HASH>..<HASH> 100644 --- a/classes/Pods.php +++ b/classes/Pods.php @@ -1061,7 +1061,8 @@ class Pods implements Iterator { $attachment_id = 0; switch ( $image_field ) { case 'post_thumbnail': - $attachment_id = get_post_thumbnail_id(); + // Pods will auto-get the thumbnail ID if this isn't an attachment. + $attachment_id = get_post_thumbnail_id( $this->id() ); break; case 'image_attachment': if ( isset( $traverse_names[0] ) ) {
Fix getting the post thumb by correct ID
pods-framework_pods
train
ac360170decd11aabdc717091e1405e88dc7f9ae
diff --git a/lib/l10n-en_GB.js b/lib/l10n-en_GB.js index <HASH>..<HASH> 100644 --- a/lib/l10n-en_GB.js +++ b/lib/l10n-en_GB.js @@ -151,13 +151,13 @@ exports.messages = { // structure/display-only , 'structure.display-only.broken-links': 'The document <strong>must not</strong> have any broken internal links or broken links to other resources at <code>w3.org</code>. The document <strong>should not</strong> have any other broken links.' , 'structure.display-only.customised-paragraph': 'The document <strong>must</strong> include at least one customized paragraph. \ - section <strong>should</strong> include the title page date (i.e., the one next to the maturity level at the top of the document). \ -e paragraphs <strong>should</strong> explain the publication context, including rationale and relationships to other work. \ -<a href="http://www.w3.org/2001/06/manual/#Status">examples and more discussion in the Manual of Style</a>.' +This section <strong>should</strong> include the title page date (i.e., the one next to the maturity level at the top of the document). \ +These paragraphs <strong>should</strong> explain the publication context, including rationale and relationships to other work. \ +See <a href="http://www.w3.org/2001/06/manual/#Status">examples and more discussion in the Manual of Style</a>.' , 'structure.display-only.known-disclosures': 'It <strong>must not</strong> indicate the number of known disclosures at the time of publication.' , 'structure.display-only.old-pubrules': 'This alternative pubrules checker is <strong>experimental</strong>, and provided only for early testing. <br> \ -se refer to <a href="http://www.w3.org/2005/07/pubrules?uimode=filter&uri=">Technical Report Publication Policy (Pubrules)</a> \ -extended information about the publication rules.' +Please refer to <a href="http://www.w3.org/2005/07/pubrules?uimode=filter&uri=">Technical Report Publication Policy (Pubrules)</a> \ +for extended information about the publication rules.' // style/sheet , "style.sheet.last": "W3C TR style sheet must be last." , "style.sheet.not-found": "Missing W3C TR style sheet." diff --git a/lib/l10n-es_ES.js b/lib/l10n-es_ES.js index <HASH>..<HASH> 100644 --- a/lib/l10n-es_ES.js +++ b/lib/l10n-es_ES.js @@ -151,13 +151,13 @@ exports.messages = { // structure/display-only , 'structure.display-only.broken-links': 'The document <strong>must not</strong> have any broken internal links or broken links to other resources at <code>w3.org</code>. The document <strong>should not</strong> have any other broken links.' , 'structure.display-only.customised-paragraph': 'The document <strong>must</strong> include at least one customized paragraph. \ - section <strong>should</strong> include the title page date (i.e., the one next to the maturity level at the top of the document). \ -e paragraphs <strong>should</strong> explain the publication context, including rationale and relationships to other work. \ -<a href="http://www.w3.org/2001/06/manual/#Status">examples and more discussion in the Manual of Style</a>.' +This section <strong>should</strong> include the title page date (i.e., the one next to the maturity level at the top of the document). \ +These paragraphs <strong>should</strong> explain the publication context, including rationale and relationships to other work. \ +See <a href="http://www.w3.org/2001/06/manual/#Status">examples and more discussion in the Manual of Style</a>.' , 'structure.display-only.known-disclosures': 'It <strong>must not</strong> indicate the number of known disclosures at the time of publication.' , 'structure.display-only.old-pubrules': 'This alternative pubrules checker is <strong>experimental</strong>, and provided only for early testing. <br> \ -se refer to <a href="http://www.w3.org/2005/07/pubrules?uimode=filter&uri=">Technical Report Publication Policy (Pubrules)</a> \ -extended information about the publication rules.' +Please refer to <a href="http://www.w3.org/2005/07/pubrules?uimode=filter&uri=">Technical Report Publication Policy (Pubrules)</a> \ +for extended information about the publication rules.' // style/sheet , "style.sheet.last": "W3C TR style sheet must be last." , "style.sheet.not-found": "Missing W3C TR style sheet."
Fix copy removed accidentally around PR #<I>.
w3c_specberus
train
1ad907f262359c67c90ccf57db084a339fa9f1bd
diff --git a/openquake/calculators/classical.py b/openquake/calculators/classical.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/classical.py +++ b/openquake/calculators/classical.py @@ -244,8 +244,8 @@ class ClassicalCalculator(base.HazardCalculator): oq = self.oqparam N = len(self.sitecol) trt_sources = self.csm.get_trt_sources(optimize_dupl=True) - maxweight = min(self.csm.get_maxweight( - trt_sources, weight, oq.concurrent_tasks), 1E6) + maxweight = self.csm.get_maxweight( + trt_sources, weight, oq.concurrent_tasks) maxdist = int(max(oq.maximum_distance.values())) if oq.task_duration is None: # inferred # from 1 minute up to 1 day
Removed maxweight limit to 1E6 [skip CI]
gem_oq-engine
train
6e59d4bb06eba465a8828eef4f7d330984620dcc
diff --git a/cxx-squid/src/main/java/org/sonar/cxx/preprocessor/StandardDefinitions.java b/cxx-squid/src/main/java/org/sonar/cxx/preprocessor/StandardDefinitions.java index <HASH>..<HASH> 100644 --- a/cxx-squid/src/main/java/org/sonar/cxx/preprocessor/StandardDefinitions.java +++ b/cxx-squid/src/main/java/org/sonar/cxx/preprocessor/StandardDefinitions.java @@ -55,6 +55,7 @@ public final class StandardDefinitions { .put("dynamic_cast", "__dynamic_cast") .put("explicit", "__explicit") .put("export", "__export") + .put("final", "__final") .put("friend", "__friend") .put("mutable", "__mutable") .put("namespace", "__namespace")
An argument called "final" generates a parsing error #<I> add final macro to let C code be parsed by C++ parser
SonarOpenCommunity_sonar-cxx
train
5a10417347eb01d6b379dace2838180d4a53a80f
diff --git a/bin/publish.py b/bin/publish.py index <HASH>..<HASH> 100755 --- a/bin/publish.py +++ b/bin/publish.py @@ -378,11 +378,11 @@ def check_new_version(new_version): should_continue = input( 'You appear to be releasing a new version, {new_version}, without having ' 'previously run a prerelease.\n(Last version found was {previous_version})\n' - 'Are you sure you know what you\'re doing? (Y/n)'.format( + 'Are you sure you know what you\'re doing? (N/!)'.format( new_version=new_version, previous_version=last_version['__version__'] ) ) - if not should_continue == 'Y': + if not should_continue == '!': raise Exception('Bailing! Run a pre-release before continuing.') return True @@ -424,11 +424,11 @@ def check_for_cruft(autoclean): 'Found potentially crufty directories:\n' ' {found_cruft}\n' '***We strongly recommend releasing from a fresh git clone!***\n' - 'Automatically remove these directories and continue? (y/N)'.format( + 'Automatically remove these directories and continue? (N/!)'.format( found_cruft='\n '.join(found_cruft) ) ) - if wipeout == 'y' or wipeout == 'Y': + if wipeout == '!': for cruft_dir in found_cruft: subprocess.check_output(['rm', '-rfv', cruft_dir]) else: @@ -451,11 +451,11 @@ def check_for_cruft(autoclean): wipeout = input( 'Found {n_files} .pyc files.\n' 'We strongly recommend releasing from a fresh git clone!\n' - 'Automatically remove these files and continue? (y/N)'.format( + 'Automatically remove these files and continue? (N/!)'.format( n_files=len(found_pyc_files) ) ) - if wipeout == 'y' or wipeout == 'Y': + if wipeout == '!': for file_ in found_pyc_files: os.unlink(file_) else:
Make it harder to do potentially foolish things during release Summary: Switch from y/N, Y/n, to N/! -- make em hit shift. Test Plan: N/A Reviewers: nate, alangenfeld Reviewed By: nate Differential Revision: <URL>
dagster-io_dagster
train
67bd68d10f053f0ea616b0990ebdbecf67417a58
diff --git a/django_q/__init__.py b/django_q/__init__.py index <HASH>..<HASH> 100644 --- a/django_q/__init__.py +++ b/django_q/__init__.py @@ -11,7 +11,8 @@ default_app_config = 'django_q.apps.DjangoQConfig' # root imports will slowly be deprecated. # please import from the relevant sub modules -if get_version().split('.')[1][0] != '9': +split_version = get_version().split('.') +if split_version[1][0] != '9' and split_version[1][:2]!='10': from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size from .models import Task, Schedule, Success, Failure from .cluster import Cluster
Recognize Django <I> versions
Koed00_django-q
train
7e581f488237aa9f926f1fcffa22450a8bab7ee2
diff --git a/microsoft-azure-storage/src/com/microsoft/azure/storage/core/ExecutionEngine.java b/microsoft-azure-storage/src/com/microsoft/azure/storage/core/ExecutionEngine.java index <HASH>..<HASH> 100644 --- a/microsoft-azure-storage/src/com/microsoft/azure/storage/core/ExecutionEngine.java +++ b/microsoft-azure-storage/src/com/microsoft/azure/storage/core/ExecutionEngine.java @@ -214,6 +214,13 @@ public final class ExecutionEngine { if (task.isSent()) { ExecutionEngine.fireRequestCompletedEvent(opContext, request, task.getResult()); } + + try { + request.getInputStream().close(); + } + catch (final IOException ex) { + request.disconnect(); + } } // Evaluate Retry Policy
[Android] Clean up HttpUrlConnection to fix strict mode failure
Azure_azure-storage-android
train
da6c0e80bc602dbf0f30f66a682a676c5b161ab4
diff --git a/src/graph/matching/encounter.js b/src/graph/matching/encounter.js index <HASH>..<HASH> 100644 --- a/src/graph/matching/encounter.js +++ b/src/graph/matching/encounter.js @@ -159,6 +159,23 @@ export default class Encounter { return promise.then(() => { if(pushedData) this.currentData.pop(); + + /* + * If the input token is skippable also evaluate the expression + * with the token skipped. + * + * This is done if: + * 1) The token is skippable + * 2) The current graph supports fuzzy matching + * 3) The token is not the last one + */ + const token = this.token(nextIndex); + if(token && token.skippable + && this.supportsFuzzy + && nextIndex !== this.tokens.length - 1 + ) { + return this.next(nodes, (score || 0), (consumedTokens || 0) + 1, data); + } }); } diff --git a/src/graph/sub.js b/src/graph/sub.js index <HASH>..<HASH> 100644 --- a/src/graph/sub.js +++ b/src/graph/sub.js @@ -70,7 +70,7 @@ export default class SubNode extends Node { } // Set the index we were called at - let previousIndex = this.state.currentIndex; + const previousIndex = this.state.currentIndex; this.state.currentIndex = encounter.currentIndex; const variants = []; @@ -94,7 +94,7 @@ export default class SubNode extends Node { promise = promise.then(() => { return encounter.next( - v.score - encounter.currentScore - PARSER_PENALTY, + v.score - PARSER_PENALTY, v.index - encounter.currentIndex, v.data ); @@ -114,6 +114,7 @@ export default class SubNode extends Node { return branchIntoVariants(cached); } + const baseScore = encounter.currentScore; const onMatch = match => { let result = match.data; if(result !== null && typeof result !== 'undefined') { @@ -126,7 +127,7 @@ export default class SubNode extends Node { variants.push({ index: match.index, - score: encounter.currentScore, + score: encounter.currentScore - baseScore, data: result }); @@ -135,7 +136,7 @@ export default class SubNode extends Node { if(previousNonSkipped !== match.index) { variants.push({ index: previousNonSkipped, - score: encounter.currentScore, + score: encounter.currentScore - baseScore, data: result }); } diff --git a/test/combined.test.js b/test/combined.test.js index <HASH>..<HASH> 100644 --- a/test/combined.test.js +++ b/test/combined.test.js @@ -50,6 +50,23 @@ describe('Intents', function() { }); }); + it('Match (skippable in input): show for orders', function() { + return intents.match('show for orders', { fuzzy: true }) + .then(results => { + expect(results.matches.length).to.equal(1); + expect(results.best.intent).to.equal('orders'); + }); + }); + + it('Match (skippable in expression): orders Test', function() { + // Test that skipping `for` works fine + return intents.match('orders Test') + .then(results => { + expect(results.matches.length).to.equal(1); + expect(results.best.intent).to.equal('customer:orders'); + }); + }); + it('No match: show', function() { return intents.match('show') .then(results => {
fix(graph): Support skipping tokens in input expression
aholstenson_ecolect-js
train
d56deb0208d90537c32b7bfd1fab6c30c96e064f
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -77,7 +77,7 @@ "dependencies": { "archy": "1.0.0", "eraro": "0.4.1", - "gate-executor": "1.1.0", + "gate-executor": "1.1.1", "gex": "0.2.2", "jsonic": "0.2.2", "json-stringify-safe": "5.0.1", diff --git a/seneca.js b/seneca.js index <HASH>..<HASH> 100644 --- a/seneca.js +++ b/seneca.js @@ -656,7 +656,14 @@ function make_seneca (initial_options) { var raw_pattern = args.pattern - var action = args.action || function action (msg, done) { + var pattern = self.util.clean(raw_pattern) + + if (!_.keys(pattern)) { + throw internals.error('add_empty_pattern', {args: Common.clean(args)}) + } + + + var action = args.action || function default_action (msg, done) { done.call(this, null, msg.default$ || null) } @@ -688,12 +695,6 @@ function make_seneca (initial_options) { var internal_catchall = (raw_pattern.internal$ && raw_pattern.internal$.catchall !== null) ? !!raw_pattern.internal$.catchall : !!so.internal.catchall - var pattern = self.util.clean(raw_pattern) - - if (!_.keys(pattern)) { - throw internals.error('add_empty_pattern', {args: Common.clean(args)}) - } - var pattern_rules = _.clone(action.validate || {}) _.each(pattern, function (v, k) { if (_.isObject(v)) { @@ -718,6 +719,7 @@ function make_seneca (initial_options) { // Canonical object form of the action pattern. actmeta.msgcanon = Jsonic(actmeta.pattern) + var priormeta = self.find(pattern) if (priormeta) { @@ -790,6 +792,7 @@ function make_seneca (initial_options) { } } + function modify_action (seneca, actmeta) { _.each(private$.action_modifiers, function (actmod) { actmeta = actmod.call(seneca, actmeta) diff --git a/test/close.test.js b/test/close.test.js index <HASH>..<HASH> 100644 --- a/test/close.test.js +++ b/test/close.test.js @@ -44,7 +44,6 @@ describe('close', function () { tmp.sc = 1 }) .close(function () { - console.log('tmp.sc', tmp.sc) expect(1).to.equal(tmp.sc) done() })
gate-executor <I>, resolves #<I>
senecajs_seneca
train
d2c94d44ba134c757aaa4201626bec04ac634368
diff --git a/microcosm_logging/factories.py b/microcosm_logging/factories.py index <HASH>..<HASH> 100644 --- a/microcosm_logging/factories.py +++ b/microcosm_logging/factories.py @@ -29,7 +29,7 @@ from microcosm.api import defaults debug=[], info=["boto", "newrelic"], warn=["bravado_core", "requests", "botocore.vendored.requests", "swagger_spec_validator"], - error=["bravado.requests_client"], + error=["bravado.requests_client", "FuturesSession"], ), override=dict( debug=[],
Add another logging protection for requests-futures' FuturesSession (#<I>) The latest release of requests-futures ([<I>](<URL>)) causes errors when used with the loggly client. Loggly calls the futures handler in such a way that causes it to emit a deprecation warning, which then needs to be logged, which which is then handled, which emits the deprecation, and on and on.
globality-corp_microcosm-logging
train
f9d9b3e01e20cde9b718ae363b28d7de23d54334
diff --git a/tests/FactoryMuffinTest.php b/tests/FactoryMuffinTest.php index <HASH>..<HASH> 100644 --- a/tests/FactoryMuffinTest.php +++ b/tests/FactoryMuffinTest.php @@ -112,6 +112,12 @@ class FactoryMuffinTest extends AbstractTestCase $this->assertSame('Jack Sparrow', $obj->getName()); } + public function testCanDetectNonPublicSetters() + { + $obj = static::$fm->instance('SetterTestModelWithNonPublicSetter'); + $this->assertSame('Jack Sparrow', $obj->getName()); + } + public function testCamelization() { $var = FactoryMuffin::camelize('foo_bar'); @@ -240,3 +246,27 @@ class SetterTestModelWithSetter return $this->name; } } + +class SetterTestModelWithNonPublicSetter +{ + private $name; + + public function __set($key, $value) + { + if ($key === 'name') { + $this->setName($value); + } else { + $this->$key = $value; + } + } + + private function setName($name) + { + $this->name = $name; + } + + public function getName() + { + return $this->name; + } +} diff --git a/tests/factories/main.php b/tests/factories/main.php index <HASH>..<HASH> 100644 --- a/tests/factories/main.php +++ b/tests/factories/main.php @@ -67,3 +67,7 @@ $fm->define('ModelWithStaticMethodFactory')->setDefinitions([ $fm->define('SetterTestModelWithSetter')->setDefinitions([ 'name' => 'Jack Sparrow', ]); + +$fm->define('SetterTestModelWithNonPublicSetter')->setDefinitions([ + 'name' => 'Jack Sparrow', +]);
Added non-public setter method tests Closes #<I>.
thephpleague_factory-muffin
train
18eb32d30cf2c64e35249eb22bb5424c60bc45fa
diff --git a/DBS2To3Migration/test/DBSSqlQueries.py b/DBS2To3Migration/test/DBSSqlQueries.py index <HASH>..<HASH> 100644 --- a/DBS2To3Migration/test/DBSSqlQueries.py +++ b/DBS2To3Migration/test/DBSSqlQueries.py @@ -403,7 +403,7 @@ class DBSSqlQueries(object): JOIN {ownerDBS2}.PERSON PS22 ON FS2.LASTMODIFIEDBY=PS22.ID JOIN {ownerDBS2}.BLOCK BL2 ON FS2.BLOCK=BL2.ID JOIN {ownerDBS2}.FILETYPE FT2 ON FT2.ID=FS2.FILETYPE - JOIN {db_owner_dbs2}.FILESTATUS FST ON FST.ID=FS2.FILESTATUS + JOIN {ownerDBS2}.FILESTATUS FST ON FST.ID=FS2.FILESTATUS ) GROUP BY FILE_ID, LOGICAL_FILE_NAME, IS_FILE_VALID, DATASET_ID, DATASET, @@ -792,9 +792,19 @@ class DBSSqlQueries(object): if not binds: binds = {} - result = self.dbFormatter.formatCursor(cursors[0]) - - connection.close() + connection = self.dbi.connection() + try: + cursors = self.dbi.processData(self.sqlDict[query], + binds, + connection, + transaction=False, + returnCursor=True) + except: + raise + else: + result = self.dbFormatter.formatCursor(cursors[0]) + finally: + connection.close() if sort: return sorted(result, key=lambda entry: entry[self.sqlPrimaryKey[query]])
Wrong variable name and close connection when exception is thrown.
dmwm_DBS
train
c209e6d67c3aaab4281d0f0fb68d1bc72e73589b
diff --git a/redis_collections/base.py b/redis_collections/base.py index <HASH>..<HASH> 100644 --- a/redis_collections/base.py +++ b/redis_collections/base.py @@ -10,10 +10,9 @@ from decimal import Decimal from fractions import Fraction import uuid -try: - import cPickle as pickle -except ImportError: - import pickle as pickle # NOQA +# We use pickle instead of cPickle on Python 2 intentionally, see +# http://bugs.python.org/issue5518 +import pickle import redis import six
Switch from cPickle on Python 2
honzajavorek_redis-collections
train
6078b938168e4a01842ca83c914cf40ecb3c3f79
diff --git a/fastlane/lib/fastlane/actions/create_pull_request.rb b/fastlane/lib/fastlane/actions/create_pull_request.rb index <HASH>..<HASH> 100644 --- a/fastlane/lib/fastlane/actions/create_pull_request.rb +++ b/fastlane/lib/fastlane/actions/create_pull_request.rb @@ -43,6 +43,9 @@ module Fastlane # Add reviewers to pull request add_reviewers(params, number) if params[:reviewers] || params[:team_reviewers] + # Add a milestone to pull request + add_milestone(params, number) if params[:milestone] + Actions.lane_context[SharedValues::CREATE_PULL_REQUEST_HTML_URL] = html_url Actions.lane_context[SharedValues::CREATE_PULL_REQUEST_NUMBER] = number return html_url @@ -111,6 +114,27 @@ module Fastlane ) end + def self.add_milestone(params, number) + payload = {} + if params[:milestone] + payload["milestone"] = params[:milestone] + end + + GithubApiAction.run( + server_url: params[:api_url], + api_token: params[:api_token], + http_method: 'PATCH', + path: "repos/#{params[:repo]}/issues/#{number}", + body: payload, + error_handlers: { + '*' => proc do |result| + UI.error("GitHub responded with #{result[:status]}: #{result[:body]}") + return nil + end + } + ) + end + ##################################################### # @!group Documentation ##################################################### @@ -157,6 +181,11 @@ module Fastlane description: "The labels for the pull request", type: Array, optional: true), + FastlaneCore::ConfigItem.new(key: :milestone, + env_name: "GITHUB_PULL_REQUEST_MILESTONE", + description: "The milestone ID (Integer) for the pull request", + type: Numeric, + optional: true), FastlaneCore::ConfigItem.new(key: :head, env_name: "GITHUB_PULL_REQUEST_HEAD", description: "The name of the branch where your changes are implemented (defaults to the current branch name)", diff --git a/fastlane/spec/actions_specs/create_pull_request_spec.rb b/fastlane/spec/actions_specs/create_pull_request_spec.rb index <HASH>..<HASH> 100644 --- a/fastlane/spec/actions_specs/create_pull_request_spec.rb +++ b/fastlane/spec/actions_specs/create_pull_request_spec.rb @@ -65,6 +65,16 @@ describe Fastlane do 'User-Agent' => 'fastlane-github_api' } ).to_return(status: 201, body: "", headers: {}) + + stub_request(:patch, "https://api.github.com/repos/fastlane/fastlane/issues/#{number}"). + with( + body: '{"milestone":42}', + headers: { + 'Authorization' => 'Basic MTIzNDU2Nzg5', + 'Host' => 'api.github.com:443', + 'User-Agent' => 'fastlane-github_api' + } + ).to_return(status: 201, body: "", headers: {}) end it 'correctly submits to github' do @@ -156,6 +166,21 @@ describe Fastlane do expect(result).to eq('https://github.com/fastlane/fastlane/pull/1347') end + + it 'correctly submits to github with a milestone' do + result = Fastlane::FastFile.new.parse(" + lane :test do + create_pull_request( + api_token: '123456789', + title: 'test PR', + repo: 'fastlane/fastlane', + milestone: 42 + ) + end + ").runner.execute(:test) + + expect(result).to eq('https://github.com/fastlane/fastlane/pull/1347') + end end end end
[action] introduce milestone support for create_pull_request action (#<I>) * Add supports adding milestone to PRs * Add specs for milestone * Apply rubocop * Update description
fastlane_fastlane
train
026d0555685087845b74dd87a0417b5a164b1c13
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb +++ b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb @@ -286,6 +286,10 @@ module ActiveRecord # Inserts the given fixture into the table. Overridden in adapters that require # something beyond a simple insert (eg. Oracle). def insert_fixture(fixture, table_name) + execute fixture_sql(fixture, table_name), 'Fixture Insert' + end + + def fixture_sql(fixture, table_name) columns = schema_cache.columns_hash(table_name) key_list = [] @@ -294,7 +298,7 @@ module ActiveRecord quote(value, columns[name]) end - execute "INSERT INTO #{quote_table_name(table_name)} (#{key_list.join(', ')}) VALUES (#{value_list.join(', ')})", 'Fixture Insert' + "INSERT INTO #{quote_table_name(table_name)} (#{key_list.join(', ')}) VALUES (#{value_list.join(', ')})" end def empty_insert_statement_value diff --git a/activerecord/lib/active_record/fixtures.rb b/activerecord/lib/active_record/fixtures.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/fixtures.rb +++ b/activerecord/lib/active_record/fixtures.rb @@ -504,16 +504,8 @@ module ActiveRecord connection.transaction(:requires_new => true) do fixture_sets.each do |fs| conn = fs.model_class.respond_to?(:connection) ? fs.model_class.connection : connection - table_rows = fs.table_rows - - table_rows.keys.each do |table| - conn.delete "DELETE FROM #{conn.quote_table_name(table)}", 'Fixture Delete' - end - - table_rows.each do |fixture_set_name, rows| - rows.each do |row| - conn.insert_fixture(row, fixture_set_name) - end + fs.fixture_sql(conn).each do |stmt| + conn.execute stmt end end @@ -580,6 +572,16 @@ module ActiveRecord fixtures.size end + def fixture_sql(conn) + table_rows = self.table_rows + + table_rows.keys.map { |table| + "DELETE FROM #{conn.quote_table_name(table)}" + }.concat table_rows.flat_map { |fixture_set_name, rows| + rows.map { |row| conn.fixture_sql(row, fixture_set_name) } + } + end + # Return a hash of rows to be inserted. The key is the table, the value is # a list of rows to insert to that table. def table_rows
ask the fixture set for the sql statements
rails_rails
train
3de3697d16ea27930cca6db1fa40a8219e7a3564
diff --git a/parsl/executors/base.py b/parsl/executors/base.py index <HASH>..<HASH> 100644 --- a/parsl/executors/base.py +++ b/parsl/executors/base.py @@ -7,7 +7,11 @@ class ParslExecutor(metaclass=ABCMeta): This is a metaclass that only enforces concrete implementations of functionality by the child classes. - .. note:: Shutdown is currently missing, as it is not yet supported by some of the executors (threads, for example). + In addition to the listed methods, a ParslExecutor instance must always + have a member field: + + label: str - a human readable label for the executor, unique + with respect to other executors. """
Add ParslExecutor docstring req to have a label, and remove obsolete comment about shutdown
Parsl_parsl
train
af5497f1f983bd4321759ce1cb555db70af21895
diff --git a/butterknife-compiler/src/test/java/butterknife/internal/DebouncingOnClickListener.java b/butterknife-compiler/src/test/java/butterknife/internal/DebouncingOnClickListener.java index <HASH>..<HASH> 100644 --- a/butterknife-compiler/src/test/java/butterknife/internal/DebouncingOnClickListener.java +++ b/butterknife-compiler/src/test/java/butterknife/internal/DebouncingOnClickListener.java @@ -8,7 +8,7 @@ import android.view.View; */ public abstract class DebouncingOnClickListener implements View.OnClickListener { - private static boolean enabled = true; + static boolean enabled = true; private static final Runnable ENABLE_AGAIN = new Runnable() { @Override public void run() { diff --git a/butterknife/src/main/java/butterknife/internal/DebouncingOnClickListener.java b/butterknife/src/main/java/butterknife/internal/DebouncingOnClickListener.java index <HASH>..<HASH> 100644 --- a/butterknife/src/main/java/butterknife/internal/DebouncingOnClickListener.java +++ b/butterknife/src/main/java/butterknife/internal/DebouncingOnClickListener.java @@ -7,7 +7,7 @@ import android.view.View; * same frame. A click on one button disables all buttons for that frame. */ public abstract class DebouncingOnClickListener implements View.OnClickListener { - private static boolean enabled = true; + static boolean enabled = true; private static final Runnable ENABLE_AGAIN = new Runnable() { @Override public void run() {
Remove creation of synthetic accessor methods.
JakeWharton_butterknife
train
551933209e313ffa5c55ee8281d299ceb380687a
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,5 @@ language: go go: -- tip + - 1.3 + - 1.4 + - tip diff --git a/time.go b/time.go index <HASH>..<HASH> 100644 --- a/time.go +++ b/time.go @@ -2,6 +2,7 @@ package coinbase import( "fmt" + "encoding/json" "time" ) @@ -23,12 +24,18 @@ type Time time.Time func (t *Time) UnmarshalJSON(data []byte) error { var err error var parsedTime time.Time + var stringTime string if string(data) == "null" { *t = Time(time.Time{}) return nil } + if err := json.Unmarshal(data, &stringTime);err != nil { + *t = Time(time.Time{}) + return err + } + layouts := []string{ "2006-01-02 15:04:05+00", "2006-01-02T15:04:05.999999Z", @@ -37,7 +44,7 @@ func (t *Time) UnmarshalJSON(data []byte) error { "2006-01-02T15:04:05Z", "2006-01-02 15:04:05.999999+00" } for _, layout := range layouts { - parsedTime, err = time.Parse(layout, string(data)) + parsedTime, err = time.Parse(layout, stringTime) if err != nil { continue } diff --git a/time_test.go b/time_test.go index <HASH>..<HASH> 100644 --- a/time_test.go +++ b/time_test.go @@ -1,7 +1,9 @@ package coinbase import( + "encoding/json" "errors" + "time" "testing" ) @@ -16,3 +18,21 @@ func TestGetTime(t *testing.T) { t.Error(errors.New("Zero value")) } } + +func TestTimeUnmarshalJSON(t *testing.T) { + c := Time{} + now := time.Now() + + jsonData, err := json.Marshal(now.Format("2006-01-02 15:04:05+00")) + if err != nil { + t.Error(err) + } + + if err = c.UnmarshalJSON(jsonData); err != nil { + t.Error(err) + } + + if now.Equal(c.Time()) { + t.Error(errors.New("Unmarshaled time does not equal original time")) + } +}
Updated travis language tests and fixed time parsing
preichenberger_go-coinbasepro
train
662b0c7e323886e0d7d40760f7d18ef4248fe156
diff --git a/contrib/ruby_event_store-rom/lib/ruby_event_store/rom/event_repository.rb b/contrib/ruby_event_store-rom/lib/ruby_event_store/rom/event_repository.rb index <HASH>..<HASH> 100644 --- a/contrib/ruby_event_store-rom/lib/ruby_event_store/rom/event_repository.rb +++ b/contrib/ruby_event_store-rom/lib/ruby_event_store/rom/event_repository.rb @@ -13,8 +13,8 @@ module RubyEventStore end def append_to_stream(records, stream, expected_version) - serialized_records = Array(records).map { |record| record.serialize(@serializer) } - event_ids = serialized_records.map(&:event_id) + serialized_records = records.map { |record| record.serialize(@serializer) } + event_ids = records.map(&:event_id) @rom.handle_error(:unique_violation) do @rom.unit_of_work do |changesets| @@ -31,7 +31,6 @@ module RubyEventStore end def link_to_stream(event_ids, stream, expected_version) - event_ids = Array(event_ids) validate_event_ids(event_ids) @rom.handle_error(:unique_violation) do
Obsolete defensive code The responsibility of layer above reposuitory is to deliver collections.
RailsEventStore_rails_event_store
train
c63d3baf523ab1097c4dd491fa597ae480e68581
diff --git a/src/Norm/Controller/NormController.php b/src/Norm/Controller/NormController.php index <HASH>..<HASH> 100644 --- a/src/Norm/Controller/NormController.php +++ b/src/Norm/Controller/NormController.php @@ -198,16 +198,6 @@ class NormController extends RestController $this->data['ids'] = $id; } - public function getRedirectUri() - { - $continue = $this->request->get('!continue'); - if (empty($continue)) { - return $this->getBaseUri(); - } else { - return $continue; - } - } - public function schema($schema = null) { if (func_num_args() === 0) {
refactor getRedirectUri to base Controller
xinix-technology_norm
train
02184cb970ee2073d073e10d4c26363783c72696
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ class TestCommand(setuptools.command.test.test): setup( name="rueckenwind", - version="0.4.2", + version="0.5.0", url='https://github.com/FlorianLudwig/rueckenwind', description='tornado based webframework', author='Florian Ludwig',
master is now <I>.x land breaking changes ahead!
FlorianLudwig_rueckenwind
train
c4523ae23c9a964d0c144eed89e6ad680e6a2659
diff --git a/openquake/calculators/hazard/disagg/core.py b/openquake/calculators/hazard/disagg/core.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/hazard/disagg/core.py +++ b/openquake/calculators/hazard/disagg/core.py @@ -144,6 +144,10 @@ def compute_disagg(job_id, sites, lt_rlz_id): # If the hazard curve is all zeros, don't even do the # disagg calculation. if all([x == 0.0 for x in curve.poes]): + logs.LOG.debug( + '* hazard curve contained all 0 probability values; ' + 'skipping' + ) continue for poe in hc.poes_disagg:
calcs/hazard/disagg/core: Added debug log statement to report when hazard curve poes are all <I>.
gem_oq-engine
train
2613536799d4fd75f9d1a1969794a84b75133747
diff --git a/salt/modules/apt.py b/salt/modules/apt.py index <HASH>..<HASH> 100644 --- a/salt/modules/apt.py +++ b/salt/modules/apt.py @@ -114,12 +114,13 @@ def _get_virtual(): Return a dict of virtual package information ''' if 'pkg._get_virtual' not in __context__: - cmd = 'grep-available -F Provides -s Package,Provides -e "^.+$"' - out = __salt__['cmd.run_stdout'](cmd, output_loglevel='debug') - virtpkg_re = re.compile(r'Package: (\S+)\nProvides: ([\S, ]+)') __context__['pkg._get_virtual'] = {} - for realpkg, provides in virtpkg_re.findall(out): - __context__['pkg._get_virtual'][realpkg] = provides.split(', ') + if __salt__['cmd.has_exec']('grep-available'): + cmd = 'grep-available -F Provides -s Package,Provides -e "^.+$"' + out = __salt__['cmd.run_stdout'](cmd, output_loglevel='debug') + virtpkg_re = re.compile(r'Package: (\S+)\nProvides: ([\S, ]+)') + for realpkg, provides in virtpkg_re.findall(out): + __context__['pkg._get_virtual'][realpkg] = provides.split(', ') return __context__['pkg._get_virtual']
Don't assume that grep-available is available
saltstack_salt
train
67805676bffdfd499f1da864c66f8e3fed64767f
diff --git a/src/com/wonderpush/sdk/WonderPush.java b/src/com/wonderpush/sdk/WonderPush.java index <HASH>..<HASH> 100644 --- a/src/com/wonderpush/sdk/WonderPush.java +++ b/src/com/wonderpush/sdk/WonderPush.java @@ -1567,6 +1567,16 @@ public class WonderPush { } protected static void handleDialogButtonAction(WonderPushDialogBuilder dialog, WonderPushDialogBuilder.Button buttonClicked) { + JSONObject eventData = new JSONObject(); + try { + eventData.put("buttonLabel", buttonClicked == null ? null : buttonClicked.label); + eventData.put("reactionTime", dialog.getShownDuration()); + eventData.putOpt("custom", dialog.getInteractionEventCustom()); + } catch (JSONException e) { + Log.e(TAG, "Failed to fill the @NOTIFICATION_ACTION event"); + } + trackInternalEvent("@NOTIFICATION_ACTION", eventData); + if (buttonClicked == null) { logDebug("User cancelled the dialog"); return; diff --git a/src/com/wonderpush/sdk/WonderPushDialogBuilder.java b/src/com/wonderpush/sdk/WonderPushDialogBuilder.java index <HASH>..<HASH> 100644 --- a/src/com/wonderpush/sdk/WonderPushDialogBuilder.java +++ b/src/com/wonderpush/sdk/WonderPushDialogBuilder.java @@ -10,6 +10,7 @@ import org.json.JSONObject; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; +import android.os.SystemClock; import android.util.Log; import android.view.View; @@ -32,6 +33,9 @@ class WonderPushDialogBuilder { final AlertDialog.Builder builder; final OnChoice listener; AlertDialog dialog; + long shownAtElapsedRealtime; + long endedAtElapsedRealtime; + JSONObject interactionEventCustom; String defaultTitle; int defaultIcon; @@ -166,12 +170,14 @@ class WonderPushDialogBuilder { } public void show() { + shownAtElapsedRealtime = SystemClock.elapsedRealtime(); if (dialog == null) { dialog = builder.create(); // On dismiss, handle chosen button, if any DialogInterface.OnDismissListener dismissListener = new DialogInterface.OnDismissListener() { @Override public void onDismiss(DialogInterface dialog) { + endedAtElapsedRealtime = SystemClock.elapsedRealtime(); WonderPush.logDebug("Dialog dismissed"); if (listener != null) { listener.onChoice(WonderPushDialogBuilder.this, choice.get()); @@ -187,6 +193,18 @@ class WonderPushDialogBuilder { dialog.dismiss(); } + public long getShownDuration() { + return endedAtElapsedRealtime - shownAtElapsedRealtime; + } + + protected JSONObject getInteractionEventCustom() { + return interactionEventCustom; + } + + protected void setInteractionEventCustom(JSONObject interactionEventCustom) { + this.interactionEventCustom = interactionEventCustom; + } + static class Button { public String label;
New @NOTIFICATION_ACTION event tracking interaction time and clicked button
wonderpush_wonderpush-android-sdk
train
a65bfd94236bdb9836dc3e277cb1af3628dd9ef0
diff --git a/library.js b/library.js index <HASH>..<HASH> 100644 --- a/library.js +++ b/library.js @@ -50,6 +50,10 @@ Mentions.notify = function(postData) { return match && array.indexOf(match) === index && noMentionGroups.indexOf(match) === -1; }); + if (!matches.length) { + return; + } + async.parallel({ userRecipients: function(next) { filter(matches, User.exists, next); @@ -62,6 +66,10 @@ Mentions.notify = function(postData) { return; } + if (!results.userRecipients.length && !results.groupRecipients.length) { + return; + } + async.parallel({ topic: function(next) { Topics.getTopicFields(postData.tid, ['title', 'cid'], next); @@ -76,6 +84,9 @@ Mentions.notify = function(postData) { }, groupsMembers: function(next) { getGroupMemberUids(results.groupRecipients, next); + }, + topicFollowers: function(next) { + Topics.getFollowers(postData.tid, next); } }, function(err, results) { if (err) { @@ -83,7 +94,7 @@ Mentions.notify = function(postData) { } var uids = results.uids.concat(results.groupsMembers).filter(function(uid, index, array) { - return array.indexOf(uid) === index && parseInt(uid, 10) !== parseInt(postData.uid, 10); + return array.indexOf(uid) === index && parseInt(uid, 10) !== parseInt(postData.uid, 10) && results.topicFollowers.indexOf(uid.toString()) === -1; }); if (!uids.length) {
if the user is already following the topic dont send mention notification
julianlam_nodebb-plugin-mentions
train
a03cfbc1ff5c031aee9a5637453f9fa3fde7b01b
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup, find_packages import os -version = '0.10' +version = '2.0dev1' here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.rst')).read()
Change version to <I>dev1
elemoine_papyrus
train
2f1616c15c6de157106016692e7153b3480b4cde
diff --git a/zhue/model/bridge.py b/zhue/model/bridge.py index <HASH>..<HASH> 100644 --- a/zhue/model/bridge.py +++ b/zhue/model/bridge.py @@ -232,7 +232,7 @@ class Bridge(object): if d.name == name: return d else: - if d.name.lower().startswith(name.lower()): + if re.match('.*{}.*'.format(name), d.name, re.IGNORECASE): return d elif str(d.hue_id) == str(hue_id): return d
Use regexes for matching on device names
pschmitt_zhue
train
9aa10654658fd7ca314503dd6c3ae275c17fddc1
diff --git a/master/buildbot/db/buildsets.py b/master/buildbot/db/buildsets.py index <HASH>..<HASH> 100644 --- a/master/buildbot/db/buildsets.py +++ b/master/buildbot/db/buildsets.py @@ -151,7 +151,7 @@ class BuildsetsConnectorComponent(base.DBConnectorComponent): whereclause=( (upstreams_tbl.c.schedulerid == schedulerid) & (upstreams_tbl.c.buildsetid == bs_tbl.c.id) & - (upstreams_tbl.c.active)), + (upstreams_tbl.c.active != 0)), distinct=True) return [ (row.id, row.sourcestampid, row.complete, row.results) for row in conn.execute(q).fetchall() ] diff --git a/master/buildbot/test/unit/test_db_buildsets.py b/master/buildbot/test/unit/test_db_buildsets.py index <HASH>..<HASH> 100644 --- a/master/buildbot/test/unit/test_db_buildsets.py +++ b/master/buildbot/test/unit/test_db_buildsets.py @@ -36,11 +36,13 @@ class TestBuildsetsConnectorComponent( # set up the tables we'll need, following links where ForeignKey # constraints are in place. def thd(engine): + self.db.model.patches.create(bind=engine) + self.db.model.sourcestamps.create(bind=engine) self.db.model.buildsets.create(bind=engine) self.db.model.buildset_properties.create(bind=engine) + self.db.model.schedulers.create(bind=engine) self.db.model.scheduler_upstream_buildsets.create(bind=engine) self.db.model.buildrequests.create(bind=engine) - self.db.model.sourcestamps.create(bind=engine) # set up a sourcestamp with id 234 for referential integrity engine.execute(self.db.model.sourcestamps.insert(), dict(id=234)) @@ -158,6 +160,18 @@ class TestBuildsetsConnectorComponent( def test_getSubscribedBuildsets(self): tbl = self.db.model.scheduler_upstream_buildsets def add_data_thd(conn): + conn.execute(self.db.model.schedulers.insert(), [ + dict(schedulerid=92, name='sc', state='', class_name='sch'), + dict(schedulerid=93, name='other', state='', class_name='sch'), + ]) + conn.execute(self.db.model.sourcestamps.insert(), [ + dict(id=120, branch='b', revision='120', + repository='', project=''), + dict(id=130, branch='b', revision='130', + repository='', project=''), + dict(id=140, branch='b', revision='140', + repository='', project=''), + ]) conn.execute(self.db.model.buildsets.insert(), [ dict(id=12, sourcestampid=120, complete=0, results=-1, submitted_at=0), @@ -165,6 +179,8 @@ class TestBuildsetsConnectorComponent( results=-1, submitted_at=0), dict(id=14, sourcestampid=140, complete=1, results=5, submitted_at=0), + dict(id=15, sourcestampid=120, complete=0, + results=-1, submitted_at=0), ]) conn.execute(tbl.insert(), [ dict(schedulerid=92, buildsetid=12, active=1), diff --git a/master/buildbot/test/util/db.py b/master/buildbot/test/util/db.py index <HASH>..<HASH> 100644 --- a/master/buildbot/test/util/db.py +++ b/master/buildbot/test/util/db.py @@ -15,7 +15,7 @@ import os import sqlalchemy -from sqlalchemy.schema import MetaData +from buildbot.db import model from twisted.python import log class RealDatabaseMixin(object): @@ -31,22 +31,10 @@ class RealDatabaseMixin(object): log.msg("cleaning database %s" % self.db_url) engine = sqlalchemy.create_engine(self.db_url) - meta = MetaData() - - # there are some tables for which reflection sometimes fails, but since - # we're just dropping them, we don't need actual schema - a fake - # table will do the trick - for table in [ 'buildrequests', 'builds', - 'buildset_properties', 'buildsets', 'change_properties', - 'change_files', 'change_links', - 'changes', 'patches', 'sourcestamp_changes', 'sourcestamps', - 'scheduler_changes', 'scheduler_upstream_buildsets', - 'schedulers' ]: - sqlalchemy.Table(table, meta, - sqlalchemy.Column('tmp', sqlalchemy.Integer)) - - # load any remaining tables - meta.reflect(bind=engine) + # While it's awful to make tests depend on the code under test, this + # is the best way to ensure that we are deleting all of the tables in + # the model, without using reflection (which does not work very well) + meta = model.Model.metadata # and drop them, if they exist meta.drop_all(bind=engine, checkfirst=True)
test_getSubscribedBuildsets passes, but sqlite is broken This is enough to get buildbot.test.unit.test_db_buildsets.TestBuildsetsConnectorComponent.test_getSubscribedBuildsets to pass for postgres, but now the sqlite tests are broken..
buildbot_buildbot
train
fa9d0cb8e38ea2beb5622bf46178f852011fd31c
diff --git a/daemon/loadbalancer.go b/daemon/loadbalancer.go index <HASH>..<HASH> 100644 --- a/daemon/loadbalancer.go +++ b/daemon/loadbalancer.go @@ -425,17 +425,23 @@ func (d *Daemon) RevNATDump() ([]loadbalancer.L3n4AddrID, error) { } func restoreServiceIDs() { - svcMap, _, errors := lbmap.DumpServiceMapsToUserspace(false, false) + svcMap, _, errors := lbmap.DumpServiceMapsToUserspace(true, false) for _, err := range errors { log.WithError(err).Warning("Error occured while dumping service table from datapath") } for _, svc := range svcMap { + // Services where the service ID was missing in the BPF map + // cannot be restored + if uint32(svc.FE.ID) == uint32(0) { + continue + } + // The service ID can only be restored when global service IDs // are disabled. Global service IDs require kvstore access but // service load-balancing needs to be enabled before the // kvstore is guaranteed to be connected - if option.Config.LBInterface == "" && uint32(svc.FE.ID) != uint32(0) { + if option.Config.LBInterface == "" { scopedLog := log.WithFields(logrus.Fields{ logfields.ServiceID: svc.FE.ID, logfields.ServiceIP: svc.FE.L3n4Addr.String(), @@ -448,6 +454,12 @@ func restoreServiceIDs() { scopedLog.Info("Restored service ID from datapath") } } + + // Restore the service cache to guarantee backend ordering + // across restarts + if err := lbmap.RestoreService(svc); err != nil { + log.WithError(err).Warning("Unable to restore service in cache") + } } } diff --git a/pkg/maps/lbmap/bpfservice.go b/pkg/maps/lbmap/bpfservice.go index <HASH>..<HASH> 100644 --- a/pkg/maps/lbmap/bpfservice.go +++ b/pkg/maps/lbmap/bpfservice.go @@ -14,6 +14,10 @@ package lbmap +import ( + "github.com/cilium/cilium/pkg/loadbalancer" +) + type serviceValueMap map[string]ServiceValue type bpfBackend struct { @@ -145,6 +149,38 @@ func createBackendsMap(backends []ServiceValue) serviceValueMap { return m } +func (l *lbmapCache) restoreService(svc loadbalancer.LBSVC) error { + frontendID := svc.FE.String() + + serviceKey, serviceValues, err := LBSVC2ServiceKeynValue(svc) + if err != nil { + return err + } + + bpfSvc, ok := l.entries[frontendID] + if !ok { + bpfSvc = newBpfService(serviceKey) + l.entries[frontendID] = bpfSvc + } + + for index, backend := range serviceValues { + b := &bpfBackend{ + id: backend.String(), + bpfValue: backend, + } + if _, ok := bpfSvc.uniqueBackends[backend.String()]; ok { + b.isHole = true + bpfSvc.holes = append(bpfSvc.holes, index+1) + } else { + bpfSvc.uniqueBackends[backend.String()] = backend + } + + bpfSvc.backendsByMapIndex[index+1] = b + } + + return nil +} + func (l *lbmapCache) prepareUpdate(fe ServiceKey, backends []ServiceValue) *bpfService { frontendID := fe.String() diff --git a/pkg/maps/lbmap/lbmap.go b/pkg/maps/lbmap/lbmap.go index <HASH>..<HASH> 100644 --- a/pkg/maps/lbmap/lbmap.go +++ b/pkg/maps/lbmap/lbmap.go @@ -696,3 +696,9 @@ func DumpRevNATMapsToUserspace(skipIPv4 bool) (loadbalancer.RevNATMap, []error) return newRevNATMap, errors } + +// RestoreService restores a single service in the cache. This is required to +// guarantee consistent backend ordering +func RestoreService(svc loadbalancer.LBSVC) error { + return cache.restoreService(svc) +}
service: Restore bpfservie cache on startup Lack of restoring the cache resulted in the backends to be rewritten in a different order on the first service update. This resulted in connections getting reset when the agent restarts.
cilium_cilium
train
ebee6fdd01d8d97bcca07542b69c7b3b8964bf04
diff --git a/greenmail-core/src/main/java/com/icegreen/greenmail/imap/ImapServer.java b/greenmail-core/src/main/java/com/icegreen/greenmail/imap/ImapServer.java index <HASH>..<HASH> 100644 --- a/greenmail-core/src/main/java/com/icegreen/greenmail/imap/ImapServer.java +++ b/greenmail-core/src/main/java/com/icegreen/greenmail/imap/ImapServer.java @@ -12,7 +12,6 @@ import com.icegreen.greenmail.util.ServerSetup; import java.io.IOException; import java.net.Socket; -import java.util.Iterator; public final class ImapServer extends AbstractServer { diff --git a/greenmail-core/src/main/java/com/icegreen/greenmail/imap/commands/CommandParser.java b/greenmail-core/src/main/java/com/icegreen/greenmail/imap/commands/CommandParser.java index <HASH>..<HASH> 100644 --- a/greenmail-core/src/main/java/com/icegreen/greenmail/imap/commands/CommandParser.java +++ b/greenmail-core/src/main/java/com/icegreen/greenmail/imap/commands/CommandParser.java @@ -331,10 +331,6 @@ public class CommandParser { return (chr >= 0x01 && chr <= 0x7f); } - private boolean isCHAR8(char chr) { - return (chr >= 0x01 && chr <= 0xff); - } - protected boolean isListWildcard(char chr) { return (chr == '*' || chr == '%'); } diff --git a/greenmail-core/src/main/java/com/icegreen/greenmail/pop3/Pop3Server.java b/greenmail-core/src/main/java/com/icegreen/greenmail/pop3/Pop3Server.java index <HASH>..<HASH> 100644 --- a/greenmail-core/src/main/java/com/icegreen/greenmail/pop3/Pop3Server.java +++ b/greenmail-core/src/main/java/com/icegreen/greenmail/pop3/Pop3Server.java @@ -11,7 +11,6 @@ import com.icegreen.greenmail.util.ServerSetup; import com.icegreen.greenmail.pop3.commands.Pop3CommandRegistry; import java.io.IOException; -import java.util.Iterator; import java.net.Socket; public class Pop3Server extends AbstractServer {
Cleanup: Removed unused imports etc.
greenmail-mail-test_greenmail
train
4f4caf7a04a396e2be26fa7f68ee8d73ff8a0fcf
diff --git a/src/main/java/com/github/funthomas424242/rades/annotations/processors/RadesBuilderProcessor.java b/src/main/java/com/github/funthomas424242/rades/annotations/processors/RadesBuilderProcessor.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/github/funthomas424242/rades/annotations/processors/RadesBuilderProcessor.java +++ b/src/main/java/com/github/funthomas424242/rades/annotations/processors/RadesBuilderProcessor.java @@ -71,10 +71,10 @@ public class RadesBuilderProcessor extends AbstractProcessor { } protected String getFullQualifiedClassName(final TypeMirror typeMirror){ - String typeName=null; - if(typeMirror instanceof DeclaredType){ - final DeclaredType type = (DeclaredType) typeMirror; - } +// String typeName=null; +// if(typeMirror instanceof DeclaredType){ +// final DeclaredType type = (DeclaredType) typeMirror; +// } return typeMirror.toString(); }
[Codacy] Issues fixed
FunThomas424242_rades-annotations
train
164cbafd71494f8cb080017b74fc3df225acecaf
diff --git a/src/js/handlers.js b/src/js/handlers.js index <HASH>..<HASH> 100644 --- a/src/js/handlers.js +++ b/src/js/handlers.js @@ -96,6 +96,7 @@ event.preventDefault(); dragStartEvent = $.Event(EVENT_DRAG_START, { + originalEvent: originalEvent, dragType: dragType }); @@ -150,6 +151,7 @@ event.preventDefault(); dragMoveEvent = $.Event(EVENT_DRAG_MOVE, { + originalEvent: originalEvent, dragType: dragType }); @@ -178,6 +180,7 @@ event.preventDefault(); dragEndEvent = $.Event(EVENT_DRAG_END, { + originalEvent: event.originalEvent, dragType: dragType });
Added originalEvent for drag events
fengyuanchen_cropper
train
66ae504c4bf8cc1f005c8822623d2b3c3d3e4aa3
diff --git a/sqlparse/sql.py b/sqlparse/sql.py index <HASH>..<HASH> 100644 --- a/sqlparse/sql.py +++ b/sqlparse/sql.py @@ -257,37 +257,25 @@ class TokenList(Token): """Returns the previous token relative to *idx*. If *skip_ws* is ``True`` (the default) whitespace tokens are ignored. + If *skip_cm* is ``True`` comments are ignored. ``None`` is returned if there's no previous token. """ - if idx is None: - return None, None - idx += 1 # alot of code usage current pre-compensates for this - funcs = lambda tk: not ((skip_ws and tk.is_whitespace()) or - (skip_cm and imt(tk, t=T.Comment, i=Comment))) - return self._token_matching(funcs, idx, reverse=True) + return self.token_next(idx, skip_ws, skip_cm, _reverse=True) - # TODO: May need to implement skip_cm for upstream changes. # TODO: May need to re-add default value to idx - def token_next(self, idx, skip_ws=True, skip_cm=False): + def token_next(self, idx, skip_ws=True, skip_cm=False, _reverse=False): """Returns the next token relative to *idx*. If *skip_ws* is ``True`` (the default) whitespace tokens are ignored. + If *skip_cm* is ``True`` comments are ignored. ``None`` is returned if there's no next token. """ if idx is None: return None, None idx += 1 # alot of code usage current pre-compensates for this - try: - if not skip_ws: - return idx, self.tokens[idx] - else: - while True: - token = self.tokens[idx] - if not token.is_whitespace(): - return idx, token - idx += 1 - except IndexError: - return None, None + funcs = lambda tk: not ((skip_ws and tk.is_whitespace()) or + (skip_cm and imt(tk, t=T.Comment, i=Comment))) + return self._token_matching(funcs, idx, reverse=_reverse) def token_index(self, token, start=0): """Return list index of token.""" diff --git a/tests/test_regressions.py b/tests/test_regressions.py index <HASH>..<HASH> 100644 --- a/tests/test_regressions.py +++ b/tests/test_regressions.py @@ -312,3 +312,9 @@ def test_issue207_runaway_format(): " 2 as two,", " 3", " from dual) t0"]) + + +def token_next_doesnt_ignore_skip_cm(): + sql = '--comment\nselect 1' + tok = sqlparse.parse(sql)[0].token_next(-1, skip_cm=True)[1] + assert tok.value == 'select' \ No newline at end of file
token_next shouldn't ignore skip_cm
andialbrecht_sqlparse
train
dba45751e0a860d37dfe7b5986c58bede39a2ffd
diff --git a/pycbc/mchirp_area_improved.py b/pycbc/mchirp_area_improved.py index <HASH>..<HASH> 100644 --- a/pycbc/mchirp_area_improved.py +++ b/pycbc/mchirp_area_improved.py @@ -226,8 +226,12 @@ def calc_probabilities(mchirp, snr, eff_distance, src_args): mass_gap = src_args['mass_gap'] # If the mchirp is greater than the mchirp corresponding to two masses - # equal to the maximum mass, the probability for BBH is 100% + # equal to the maximum mass, the probability for BBH is 100%. + # If it is less than the mchirp corresponding to two masses equal to the + # minimum mass, the probability for BNS is 100%. mc_max = mass_limits['max_m1'] / (2 ** 0.2) + mc_min = mass_limits['min_m2'] / (2 ** 0.2) + if trig_mc_det['central'] > mc_max * (1 + z['central']): if mass_gap: probabilities = {"BNS": 0.0, "GNS": 0.0, "NSBH": 0.0, "GG": 0.0, @@ -235,8 +239,18 @@ def calc_probabilities(mchirp, snr, eff_distance, src_args): else: probabilities = {"BNS": 0.0, "NSBH": 0.0, "BBH": 1.0, "Mass Gap": 0.0} + + elif trig_mc_det['central'] < mc_min * (1 + z['central']): + if mass_gap: + probabilities = {"BNS": 1.0, "GNS": 0.0, "NSBH": 0.0, "GG": 0.0, + "BHG": 0.0, "BBH": 0.0} + else: + probabilities = {"BNS": 1.0, "NSBH": 0.0, "BBH": 0.0, + "Mass Gap": 0.0} + else: areas = calc_areas(trig_mc_det, mass_limits, mass_bdary, z, mass_gap) total_area = sum(areas.values()) probabilities = {key: areas[key]/total_area for key in areas} + return probabilities
Add condition for mchirp less than min_mchirp (#<I>) * Add condition for mchirp less than min_mchirp * bump to restart checks
gwastro_pycbc
train
ba625022e32ecab1dceb8f80550ed4298582a8f6
diff --git a/txdarn/test/test_compat.py b/txdarn/test/test_compat.py index <HASH>..<HASH> 100644 --- a/txdarn/test/test_compat.py +++ b/txdarn/test/test_compat.py @@ -13,7 +13,7 @@ def skipIfVersion(test): return _decorator -class TestCompat(unittest.SynchronousTestCase): +class CompatTestCase(unittest.SynchronousTestCase): def test_asJSON(self): self.assertEqual(C.asJSON({'a': [1]}), diff --git a/txdarn/test/test_protocol.py b/txdarn/test/test_protocol.py index <HASH>..<HASH> 100644 --- a/txdarn/test/test_protocol.py +++ b/txdarn/test/test_protocol.py @@ -19,7 +19,7 @@ class sockJSJSONTestCase(unittest.SynchronousTestCase): b'[3000,"Go away!"]') -class HeartbeatClock(unittest.TestCase): +class HeartbeatClockTestCase(unittest.TestCase): def setUp(self): self.clock = Clock()
rename some inconsistently named tests
markrwilliams_txdarn
train
1aec03b32e0b30546c39ee8bec599a4a653b9383
diff --git a/tests/utils/test_contextvars.py b/tests/utils/test_contextvars.py index <HASH>..<HASH> 100644 --- a/tests/utils/test_contextvars.py +++ b/tests/utils/test_contextvars.py @@ -1,7 +1,7 @@ -import pytest import random import time +import pytest import gevent @@ -20,7 +20,13 @@ def test_gevent_is_not_patched(): @pytest.mark.parametrize("with_gevent", [True, False]) def test_leaks(with_gevent): if with_gevent: - gevent.monkey.patch_all() + try: + gevent.monkey.patch_all() + except Exception as e: + if "_RLock__owner" in str(e): + pytest.skip(reason="https://github.com/gevent/gevent/issues/1380") + else: + raise import threading
fix: Fix tests under pypy
getsentry_sentry-python
train
936c1c305557ad1d3309d98ebcf51554357ec51e
diff --git a/formats/fql.py b/formats/fql.py index <HASH>..<HASH> 100644 --- a/formats/fql.py +++ b/formats/fql.py @@ -918,6 +918,14 @@ class Correction(object): #AS CORRECTION/SUGGESTION expression... inheritchildren = [] if focus and not self.bare: #copy all data within inheritchildren = list(focus.copychildren(query.doc, True)) + if action.action == "EDIT" and 'respan' in action.extra: + #delete all word references from the copy first, we will add new ones + inheritchildren = [ c for c in inheritchildren if not isinstance(c, folia.WordReference) ] + if not isinstance(focus, folia.AbstractSpanAnnotation): raise QueryError("Can only perform RESPAN on span annotation elements!") + contextselector = target if target else query.doc + spanset = next(action.extra['respan'](query, contextselector, True, debug)) #there can be only one + for w in spanset: + inheritchildren.append(w) if actionassignments: kwargs['new'] = action.focus.Class(query.doc,*inheritchildren, **actionassignments)
FQL: RESPAN now works in AS CORRECTION as well
proycon_pynlpl
train
21bb08e9100efef3d4d7a2eca40ecbd708bdf185
diff --git a/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedMetaProperty.java b/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedMetaProperty.java index <HASH>..<HASH> 100644 --- a/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedMetaProperty.java +++ b/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedMetaProperty.java @@ -50,6 +50,8 @@ public class DetachedMetaProperty<V> extends DetachedElement<Property<V>> implem if (hiddenProperties != null) hiddenProperties.entrySet().iterator().forEachRemaining(kv -> this.properties.put(Graph.Key.hide(kv.getKey()), new ArrayList(Arrays.asList(new DetachedProperty(kv.getKey(), kv.getValue(), this))))); } + // todo: straighten out all these constructors and their scopes - what do we really need here? + private DetachedMetaProperty(final MetaProperty property) { super(property); if (null == property) throw Graph.Exceptions.argumentCanNotBeNull("property"); diff --git a/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedProperty.java b/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedProperty.java index <HASH>..<HASH> 100644 --- a/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedProperty.java +++ b/gremlin-core/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedProperty.java @@ -39,6 +39,8 @@ public class DetachedProperty<V> implements Property, Serializable, Attachable<P this.hashCode = super.hashCode(); } + // todo: straighten out all these constructors and their scopes - what do we really need here? + private DetachedProperty(final Property property) { if (null == property) throw Graph.Exceptions.argumentCanNotBeNull("property");
Add some todos for later.
apache_tinkerpop
train
947170370cfac6624e3d54eadf9f011664fc4f25
diff --git a/src/jQuery-image-upload.js b/src/jQuery-image-upload.js index <HASH>..<HASH> 100644 --- a/src/jQuery-image-upload.js +++ b/src/jQuery-image-upload.js @@ -91,9 +91,11 @@ // create the file input element var $fileInput = $("<input>") - .attr("type", "file") + .attr({ + "type": "file", + "name": settings.inputFileName + }) .addClass(settings.inputFileClass) - .attr("name", settings.inputFileName); // create the upload button var $uploadButton = $("<button>") @@ -141,18 +143,22 @@ // create the upload iframe var $uploadIframe = $("<iframe>") - .attr("id", iframeId) - .attr("name", iframeId) + .attr({ + "id": iframeId, + "name": iframeId + }) .hide(); // create the upload form var $uploadForm = $("<form>") .addClass(settings.formClass) - .attr("target", $uploadIframe.attr("id")) - .attr("enctype", "multipart/form-data") - .attr("method", "post") - .attr("action", settings.formAction); + .attr({ + "target": $uploadIframe.attr("id"), + "enctype": "multipart/form-data", + "method": "post", + "action": settings.formAction + }); // append controls to form
Use single attr calls passing objects
jillix_jQuery-image-upload
train
d4650400da47b9483c461e6ecf82e813bafaf936
diff --git a/src/artoo.helpers.js b/src/artoo.helpers.js index <HASH>..<HASH> 100644 --- a/src/artoo.helpers.js +++ b/src/artoo.helpers.js @@ -18,6 +18,15 @@ Object.setPrototypeOf(artoo, Object.getPrototypeOf(ee)); + // Legacy support + // TODO: drop this asap + artoo.hooks = { + trigger: function(name) { + artoo.emit(name); + } + }; + + /** * Generic Helpers * ----------------
Legacy support for hooks.trigger
medialab_artoo
train
3718bbc24708990a40eff51a120915d98bb08e47
diff --git a/faker/providers/internet.py b/faker/providers/internet.py index <HASH>..<HASH> 100644 --- a/faker/providers/internet.py +++ b/faker/providers/internet.py @@ -38,6 +38,12 @@ class Provider(BaseProvider): '{{url}}{{uri_path}}/{{uri_page}}/', '{{url}}{{uri_path}}/{{uri_page}}{{uri_extension}}', ) + image_placeholder_services = ( + 'http://placekitten.com/{width}/{height}', + 'http://placehold.it/{width}x{height}', + 'http://www.lorempixum.com/{width}/{height}', + 'http://dummyimage.com/{width}x{height}', + ) def email(self): pattern = self.random_element(self.email_formats) @@ -121,3 +127,15 @@ class Provider(BaseProvider): value = unicodedata.normalize('NFKD', value or Lorem.text(20)).encode('ascii', 'ignore').decode('ascii') value = re.sub('[^\w\s-]', '', value).strip().lower() return re.sub('[-\s]+', '-', value) + + @classmethod + def image_url(cls, width=None, height=None): + """ + Returns URL to placeholder image + Example: http://placehold.it/640x480 + """ + width_ = width or cls.random_int(max=1024) + height_ = height or cls.random_int(max=1024) + placeholder_url = cls.random_element(cls.image_placeholder_services) + return placeholder_url.format(width=width_, height=height_) +
Image Placeholder Added new method image_url that return URL to placeholder image: fake.image_url() # <URL>
joke2k_faker
train
fd7d7bc26a7a6ad343e3782048ef20b2e9894ffe
diff --git a/test/secret-handshake.js b/test/secret-handshake.js index <HASH>..<HASH> 100644 --- a/test/secret-handshake.js +++ b/test/secret-handshake.js @@ -35,10 +35,11 @@ tape('test handshake', function (t) { }) + var r = Math.random() var bobHS = shs.server(bob, function (public, cb) { t.deepEqual(public, alice.publicKey) - if(deepEqual(public, alice.publicKey)) cb(null) + if(deepEqual(public, alice.publicKey)) cb(null, {okay: true, random: r}) else cb(new Error('unauthorized')) @@ -47,6 +48,7 @@ tape('test handshake', function (t) { if(err) throw err + t.deepEqual(stream.auth, {okay: true, random: r}) pull(stream, pull.through(function (data) { console.log('echo:', data.toString()) }), stream) //ECHO
test that authorize can attach metadata to server stream
auditdrivencrypto_secret-handshake
train
9a0de15ffb92337d35bdccc050bc035ca3b8e5ad
diff --git a/src/Http/Controllers/Traits/HCAdminListHeaders.php b/src/Http/Controllers/Traits/HCAdminListHeaders.php index <HASH>..<HASH> 100644 --- a/src/Http/Controllers/Traits/HCAdminListHeaders.php +++ b/src/Http/Controllers/Traits/HCAdminListHeaders.php @@ -58,4 +58,20 @@ trait HCAdminListHeaders 'label' => $label, ]; } + + /** + * @param string $label + * @param int $width + * @param int $height + * @return array + */ + protected function headerImage(string $label, int $width = 100, int $height = 100): array + { + return [ + 'type' => 'image', + 'label' => $label, + 'width' => $width, + 'height' => $height, + ]; + } }
Added lite cell - Image
honey-comb_core
train
539988cda688ab95b3be6159d65d444a7f129270
diff --git a/apiserver/common/networkingcommon/networkconfigapi.go b/apiserver/common/networkingcommon/networkconfigapi.go index <HASH>..<HASH> 100644 --- a/apiserver/common/networkingcommon/networkconfigapi.go +++ b/apiserver/common/networkingcommon/networkconfigapi.go @@ -49,18 +49,21 @@ func (api *NetworkConfigAPI) SetObservedNetworkConfig(args params.SetMachineNetw return nil } - providerConfig, err := api.getOneMachineProviderNetworkConfig(m) - if errors.IsNotProvisioned(err) { - logger.Infof("not updating machine %q network config: %v", m.Id(), err) - return nil - } - if err != nil { - return errors.Trace(err) - } + // Do not ask the provider about containers in machines. mergedConfig := observedConfig - if len(providerConfig) != 0 { - mergedConfig = MergeProviderAndObservedNetworkConfigs(providerConfig, observedConfig) - logger.Tracef("merged observed and provider network config for machine %q: %+v", m.Id(), mergedConfig) + if !m.IsContainer() { + providerConfig, err := api.getOneMachineProviderNetworkConfig(m) + if errors.IsNotProvisioned(err) { + logger.Infof("not updating machine %q network config: %v", m.Id(), err) + return nil + } + if err != nil { + return errors.Trace(err) + } + if len(providerConfig) != 0 { + mergedConfig = MergeProviderAndObservedNetworkConfigs(providerConfig, observedConfig) + logger.Tracef("merged observed and provider network config for machine %q: %+v", m.Id(), mergedConfig) + } } mergedConfig, err = api.fixUpFanSubnets(mergedConfig) @@ -122,6 +125,7 @@ func (api *NetworkConfigAPI) SetProviderNetworkConfig(args params.Entities) (par } if m.IsContainer() { + logger.Debugf("not updating network config for container %q", m.Id()) continue } @@ -163,10 +167,6 @@ func (api *NetworkConfigAPI) getMachineForSettingNetworkConfig(machineTag string return nil, errors.Trace(err) } - if m.IsContainer() { - logger.Debugf("not updating network config for container %q", m.Id()) - } - return m, nil }
Ensures that when updating observed network configuration, we do not attempt to merge provider network config if the machine is a container.
juju_juju
train
d3dcfee247b1ea7e6e59108527ccc2153fd27228
diff --git a/spring-credhub-core/src/main/java/org/springframework/credhub/support/ssh/SshCredential.java b/spring-credhub-core/src/main/java/org/springframework/credhub/support/ssh/SshCredential.java index <HASH>..<HASH> 100644 --- a/spring-credhub-core/src/main/java/org/springframework/credhub/support/ssh/SshCredential.java +++ b/spring-credhub-core/src/main/java/org/springframework/credhub/support/ssh/SshCredential.java @@ -21,14 +21,17 @@ import org.springframework.credhub.support.KeyPairCredential; /** * An SSH credential consists of a public and/or private key. At least one of these key values must be provided. * - * @author Scott Frederick + * @author Scott Frederick */ public class SshCredential extends KeyPairCredential { + private final String publicKeyFingerprint; + /** * Create an empty {@link SshCredential}. Intended to be used internally for deserialization of responses. */ private SshCredential() { super(); + publicKeyFingerprint = null; } /** @@ -40,5 +43,16 @@ public class SshCredential extends KeyPairCredential { */ public SshCredential(String publicKey, String privateKey) { super(publicKey, privateKey); + publicKeyFingerprint = null; + } + + /** + * Get the fingerprint of the public key associated with the credential. This value can not be provided + * when creating an {@code SshCredential}, but may be provided by CredHub when retrieving one. + * + * @return the public key fingerprint value + */ + public String getPublicKeyFingerprint() { + return publicKeyFingerprint; } } diff --git a/spring-credhub-core/src/test/java/org/springframework/credhub/core/CredHubTemplateDetailSshUnitTests.java b/spring-credhub-core/src/test/java/org/springframework/credhub/core/CredHubTemplateDetailSshUnitTests.java index <HASH>..<HASH> 100644 --- a/spring-credhub-core/src/test/java/org/springframework/credhub/core/CredHubTemplateDetailSshUnitTests.java +++ b/spring-credhub-core/src/test/java/org/springframework/credhub/core/CredHubTemplateDetailSshUnitTests.java @@ -44,12 +44,12 @@ public class CredHubTemplateDetailSshUnitTests @DataPoints("detail-responses") public static List<ResponseEntity<CredentialDetails<SshCredential>>> buildDetailResponses() { - return buildDetailResponses(CredentialType.RSA, CREDENTIAL); + return buildDetailResponses(CredentialType.SSH, CREDENTIAL); } @DataPoints("data-responses") public static List<ResponseEntity<CredentialDetailsData<SshCredential>>> buildDataResponses() { - return buildDataResponses(CredentialType.RSA, CREDENTIAL); + return buildDataResponses(CredentialType.SSH, CREDENTIAL); } @Override diff --git a/spring-credhub-core/src/test/java/org/springframework/credhub/support/ssh/SshCredentialDetailsUnitTests.java b/spring-credhub-core/src/test/java/org/springframework/credhub/support/ssh/SshCredentialDetailsUnitTests.java index <HASH>..<HASH> 100644 --- a/spring-credhub-core/src/test/java/org/springframework/credhub/support/ssh/SshCredentialDetailsUnitTests.java +++ b/spring-credhub-core/src/test/java/org/springframework/credhub/support/ssh/SshCredentialDetailsUnitTests.java @@ -31,14 +31,15 @@ public class SshCredentialDetailsUnitTests extends JsonParsingUnitTestsBase { " \"type\": \"ssh\"," + " \"value\": {" + " \"private_key\": \"private-key\"," + - " \"public_key\": \"public-key\"" + + " \"public_key\": \"public-key\"," + + " \"public_key_fingerprint\": \"fingerprint\"" + " }"; @Test public void deserializeDetailsWithPublicAndPrivateKeys() throws Exception { CredentialDetails<SshCredential> data = parseDetails(SSH_CREDENTIALS); - assertDetails(data, "public-key", "private-key"); + assertDetails(data, "public-key", "private-key", "fingerprint"); } @Test @@ -50,7 +51,7 @@ public class SshCredentialDetailsUnitTests extends JsonParsingUnitTestsBase { " }"; CredentialDetails<SshCredential> data = parseDetails(credentials); - assertDetails(data, "public-key", null); + assertDetails(data, "public-key", null, null); } @Test @@ -62,7 +63,7 @@ public class SshCredentialDetailsUnitTests extends JsonParsingUnitTestsBase { " }"; CredentialDetails<SshCredential> data = parseDetails(credentials); - assertDetails(data, null, "private-key"); + assertDetails(data, null, "private-key", null); } @Test @@ -73,14 +74,16 @@ public class SshCredentialDetailsUnitTests extends JsonParsingUnitTestsBase { CredentialDetails<SshCredential> data = response.getData().get(0); - assertDetails(data, "public-key", "private-key"); + assertDetails(data, "public-key", "private-key", "fingerprint"); } - private void assertDetails(CredentialDetails<SshCredential> data, String publicKey, String privateKey) { + private void assertDetails(CredentialDetails<SshCredential> data, + String publicKey, String privateKey, String publicKeyFingerprint) { assertCommonDetails(data); assertThat(data.getCredentialType(), equalTo(CredentialType.SSH)); assertThat(data.getValue().getPublicKey(), equalTo(publicKey)); assertThat(data.getValue().getPrivateKey(), equalTo(privateKey)); + assertThat(data.getValue().getPublicKeyFingerprint(), equalTo(publicKeyFingerprint)); } }
Add publicKeyFingerprint field to get SSH response Fixes #<I>
spring-projects_spring-credhub
train
863a8ddfffd7e56ed8cb34220d8342b1cff53c43
diff --git a/framework/core/js/src/forum/components/PostMeta.js b/framework/core/js/src/forum/components/PostMeta.js index <HASH>..<HASH> 100644 --- a/framework/core/js/src/forum/components/PostMeta.js +++ b/framework/core/js/src/forum/components/PostMeta.js @@ -55,6 +55,6 @@ export default class PostMeta extends Component { * @returns {string} */ getPermalink(post) { - return app.forum.attribute('baseUrl') + app.route.post(post); + return app.forum.attribute('baseOrigin') + app.route.post(post); } } diff --git a/framework/core/src/Api/Serializer/ForumSerializer.php b/framework/core/src/Api/Serializer/ForumSerializer.php index <HASH>..<HASH> 100644 --- a/framework/core/src/Api/Serializer/ForumSerializer.php +++ b/framework/core/src/Api/Serializer/ForumSerializer.php @@ -75,7 +75,8 @@ class ForumSerializer extends AbstractSerializer 'description' => $this->settings->get('forum_description'), 'showLanguageSelector' => (bool) $this->settings->get('show_language_selector', true), 'baseUrl' => $url = $this->url->to('forum')->base(), - 'basePath' => parse_url($url, PHP_URL_PATH) ?: '', + 'basePath' => $path = parse_url($url, PHP_URL_PATH) ?: '', + 'baseOrigin' => substr($url, 0, strlen($url) - strlen($path)), 'debug' => $this->config->inDebugMode(), 'apiUrl' => $this->url->to('api')->base(), 'welcomeTitle' => $this->settings->get('welcome_title'),
fix: don't duplicate path in URL to post on subdirectory installs. (#<I>) `baseUrl` includes the path portion of the url, as does `basePath`, which is included in `app.route.x` function outputs. The `baseOrigin` (with no path component at all) should be used instead.
flarum_core
train
6c88b3a3a81c3674030b64d8a4c298ec515323dc
diff --git a/molgenis-data-rest/src/test/java/org/molgenis/data/rest/RestControllerIT.java b/molgenis-data-rest/src/test/java/org/molgenis/data/rest/RestControllerIT.java index <HASH>..<HASH> 100644 --- a/molgenis-data-rest/src/test/java/org/molgenis/data/rest/RestControllerIT.java +++ b/molgenis-data-rest/src/test/java/org/molgenis/data/rest/RestControllerIT.java @@ -244,9 +244,9 @@ public class RestControllerIT @Test public void testRetrieveEntityCollection() { + String responseBody = "\"name\"\n" + "\"python\"\n" + "\"R\"\n" + "\"JavaScript (Magma)\"\n" + "\"JavaScript\"\n"; given().log().all().header(X_MOLGENIS_TOKEN, this.testUserToken).contentType(TEXT_CSV).when() - .get(PATH + "csv/sys_scr_ScriptType").then().log().all().statusCode(200).body(equalTo( - "\"name\"\n" + "\"python\"\n" + "\"R\"\n" + "\"JavaScript (Magma)\"\n" + "\"JavaScript\"\n")); + .get(PATH + "csv/sys_scr_ScriptType").then().log().all().statusCode(200).body(equalTo(responseBody)); } private void noPermissionRequest(String requestedEntity)
Move responseBody to a variable for retrieveEntityCollection
molgenis_molgenis
train
2240e6d05be3288cbde4819a5ea7930d2922cc08
diff --git a/worldengine/generation.py b/worldengine/generation.py index <HASH>..<HASH> 100644 --- a/worldengine/generation.py +++ b/worldengine/generation.py @@ -158,18 +158,16 @@ def sea_depth(world, sea_level): height, width = ocean.shape for dist in range(max_radius): - for y in range(height): - for x in range(width): - if next_land[y,x] == -1: + indices = numpy.transpose(numpy.where(next_land==dist)) + for y, x in indices: + for dy in range(-1, 2): + ny = y + dy + if 0 <= ny < height: for dx in range(-1, 2): nx = x + dx if 0 <= nx < width: - for dy in range(-1, 2): - ny = y + dy - if 0 <= ny < height and (dx != 0 or dy != 0): - if next_land[ny,nx] == dist: - next_land[y,x] = dist+1 - + if next_land[ny,nx] == -1: + next_land[ny,nx] = dist + 1 return next_land # We want to multiply the raw sea_depth by one of these factors
transpose next_land_dynamic for greater speed and clarity (#<I>)
Mindwerks_worldengine
train
e67a06d3fb79634bc5f4d47d52c1a54eee27f03a
diff --git a/Form/Extension/DateExtension.php b/Form/Extension/DateExtension.php index <HASH>..<HASH> 100644 --- a/Form/Extension/DateExtension.php +++ b/Form/Extension/DateExtension.php @@ -69,11 +69,9 @@ class DateExtension extends AbstractTypeExtension 'leading_zero', )); - $resolver->setAllowedTypes(array( - 'placeholder' => array('string'), - 'language' => array('string'), - 'leading_zero' => array('bool'), - )); + $resolver->setAllowedTypes('placeholder', 'string'); + $resolver->setAllowedTypes('language', 'string'); + $resolver->setAllowedTypes('leading_zero', 'bool'); } /** diff --git a/Form/Type/ObjectToIdentifierType.php b/Form/Type/ObjectToIdentifierType.php index <HASH>..<HASH> 100644 --- a/Form/Type/ObjectToIdentifierType.php +++ b/Form/Type/ObjectToIdentifierType.php @@ -63,9 +63,7 @@ class ObjectToIdentifierType extends AbstractType ->setDefaults(array( 'identifier' => 'id', )) - ->setAllowedTypes(array( - 'identifier' => array('string'), - )) + ->setAllowedTypes('identifier', 'string') ; } diff --git a/spec/Form/Extension/DateExtensionSpec.php b/spec/Form/Extension/DateExtensionSpec.php index <HASH>..<HASH> 100644 --- a/spec/Form/Extension/DateExtensionSpec.php +++ b/spec/Form/Extension/DateExtensionSpec.php @@ -50,11 +50,9 @@ class DateExtensionSpec extends ObjectBehavior 'leading_zero', ))->shouldBeCalled(); - $resolver->setAllowedTypes(array( - 'placeholder' => array('string'), - 'language' => array('string'), - 'leading_zero' => array('bool'), - ))->shouldBeCalled(); + $resolver->setAllowedTypes('placeholder', 'string')->shouldBeCalled(); + $resolver->setAllowedTypes('language', 'string')->shouldBeCalled(); + $resolver->setAllowedTypes('leading_zero', 'bool')->shouldBeCalled(); $this->configureOptions($resolver); } diff --git a/spec/Form/Type/ObjectToIdentifierTypeSpec.php b/spec/Form/Type/ObjectToIdentifierTypeSpec.php index <HASH>..<HASH> 100644 --- a/spec/Form/Type/ObjectToIdentifierTypeSpec.php +++ b/spec/Form/Type/ObjectToIdentifierTypeSpec.php @@ -50,9 +50,7 @@ class ObjectToIdentifierTypeSpec extends ObjectBehavior 'identifier' => 'id' ))->willReturn($resolver); - $resolver->setAllowedTypes(array( - 'identifier' => array('string') - ))->willReturn($resolver); + $resolver->setAllowedTypes('identifier', 'string')->willReturn($resolver); $this->configureOptions($resolver); }
Refactored OptionsResolver::setAllowedTypes() to non-deprecated form
Sylius_SyliusResourceBundle
train
e1b9c820a543143fc8bcb4b4c9b354d13bc9a309
diff --git a/src/Caouecs/Sirtrevorjs/SirTrevorJsConverter.php b/src/Caouecs/Sirtrevorjs/SirTrevorJsConverter.php index <HASH>..<HASH> 100644 --- a/src/Caouecs/Sirtrevorjs/SirTrevorJsConverter.php +++ b/src/Caouecs/Sirtrevorjs/SirTrevorJsConverter.php @@ -162,7 +162,7 @@ class SirTrevorJsConverter $this->view = 'sirtrevorjs::fb'; } - $this->output = 'fbarticles'; + $this->output = 'fb'; return $this->convert($json); }
fix: converter - fb
caouecs_Laravel-SirTrevorJS
train
900c23e9a5ef18295ce33676a3e9498d871cc5e0
diff --git a/src/com/google/javascript/jscomp/Es6ToEs3ClassSideInheritance.java b/src/com/google/javascript/jscomp/Es6ToEs3ClassSideInheritance.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/Es6ToEs3ClassSideInheritance.java +++ b/src/com/google/javascript/jscomp/Es6ToEs3ClassSideInheritance.java @@ -20,7 +20,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; -import com.google.javascript.jscomp.NodeTraversal.AbstractPreOrderCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSDocInfoBuilder; @@ -80,12 +79,8 @@ public final class Es6ToEs3ClassSideInheritance extends AbstractPostOrderCallbac @Override public void process(Node externs, Node root) { - FindCopyProp findCopyProp = new FindCopyProp(); - NodeTraversal.traverse(compiler, root, findCopyProp); - if (findCopyProp.found) { - NodeTraversal.traverse(compiler, root, new FindStaticMembers()); - NodeTraversal.traverse(compiler, root, this); - } + NodeTraversal.traverse(compiler, root, new FindStaticMembers()); + NodeTraversal.traverse(compiler, root, this); } @Override @@ -134,21 +129,6 @@ public final class Es6ToEs3ClassSideInheritance extends AbstractPostOrderCallbac staticMembers.put(subclassNameNode.getQualifiedName(), assign); } - private class FindCopyProp extends AbstractPreOrderCallback { - - private boolean found = false; - - @Override - public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { - if (found || n.isCall() - && n.getFirstChild().matchesQualifiedName(Es6ToEs3Converter.COPY_PROP)) { - found = true; - return false; - } - return true; - } - } - private class FindStaticMembers extends NodeTraversal.AbstractPostOrderCallback { private final Set<String> classNames = new HashSet<>();
Remove superfluous pass from Es6ToEs3ClassSideInheritance ------------- Created by MOE: <URL>
google_closure-compiler
train
f7479c374448beb11d25830184a1c7e759893f29
diff --git a/src/visualizers/widgets/HFSMViz/HFSMVizWidget.js b/src/visualizers/widgets/HFSMViz/HFSMVizWidget.js index <HASH>..<HASH> 100644 --- a/src/visualizers/widgets/HFSMViz/HFSMVizWidget.js +++ b/src/visualizers/widgets/HFSMViz/HFSMVizWidget.js @@ -105,6 +105,7 @@ define([ 'edges': {} }; this.waitingNodes = {}; + this.droppedChild = {}; // LAYOUT RELATED DATA this._handle = this._el.find('#hfsmVizHandle'); @@ -635,6 +636,13 @@ define([ n.position( parentPos ); } + if (self.droppedChild && self.droppedChild.id && self.droppedChild.position) { + if (self.droppedChild.id == desc.id || self.droppedChild.id == desc.parentId) { + n.renderedPosition( self.droppedChild.position ); + self._clearDroppedChild(); + } + } + self.nodes[desc.id] = desc; self.updateDependencies(); self.debouncedReLayout(); @@ -889,7 +897,22 @@ define([ return canCreate; }; - HFSMVizWidget.prototype._createChild = function( nodeId, parentId ) { + HFSMVizWidget.prototype._clearDroppedChild = function() { + var self = this; + self.droppedChild = {}; + }; + + HFSMVizWidget.prototype._updateDroppedChild = function( nodeId, event ) { + var self = this; + var pos = self._getContainerPosFromEvent(event); + pos.x -= $(self._left).width(); + self.droppedChild = { + id: nodeId, + position: pos + }; + }; + + HFSMVizWidget.prototype._createChild = function( nodeId, parentId, event ) { var self = this, client = self._client, node = client.getNode(nodeId); @@ -906,12 +929,14 @@ define([ baseId: nodeId, }; self.forceShowChildren( cyNode.id() ); - client.createChild(childCreationParams, 'Creating new child'); + var newId = client.createChild(childCreationParams, 'Creating new child'); + self._updateDroppedChild( newId, event ); } else { self.forceShowChildren( cyNode.id() ); var params = {parentId: parentId}; params[nodeId] = {}; + self._updateDroppedChild( parentId, event ); client.startTransaction(); client.copyMoreNodes(params); client.completeTransaction(); @@ -971,8 +996,11 @@ define([ }, drop: function (event, dragInfo) { if (self._isValidDrop(event, dragInfo)) { - self._createChild( dragInfo[DROP_CONSTANTS.DRAG_ITEMS][0], - self._hoveredNodeId ); + self._createChild( + dragInfo[DROP_CONSTANTS.DRAG_ITEMS][0], + self._hoveredNodeId, + event + ); } self._isDropping = false; self._dropId = null;
Drop creation / copying of nodes now sets the position to the drop location. closes #<I>.
finger563_webgme-hfsm
train
562972c9b0d0e6302d122fb7a1ba6efbb567cadb
diff --git a/engine/test_polygon.py b/engine/test_polygon.py index <HASH>..<HASH> 100644 --- a/engine/test_polygon.py +++ b/engine/test_polygon.py @@ -1664,9 +1664,11 @@ class Test_Polygon(unittest.TestCase): p4 = [122.22472918310231, -8.6218197001101728] line0 = [p1, p2] line1 = [p3, p4] - Vector(geometry=[line0, line1], - geometry_type='line').write_to_file('impossible_state.shp') + #Vector(geometry=[line0, line1], + # geometry_type='line').write_to_file('impossible_state.shp') status, value = intersection(line0, line1) + assert status == 0 + assert value is None def test_clip_line_by_polygon_simple(self): """Simple lines are clipped and classified by polygon
Removed generated file from polygon test
inasafe_inasafe
train
27d0ea5e51f9dbf2d09d7c277cd5a1139f31af24
diff --git a/src/server/pfs/server/local_block_api_server.go b/src/server/pfs/server/local_block_api_server.go index <HASH>..<HASH> 100644 --- a/src/server/pfs/server/local_block_api_server.go +++ b/src/server/pfs/server/local_block_api_server.go @@ -173,6 +173,14 @@ func (s *localBlockAPIServer) InspectObject(ctx context.Context, request *pfscli }, nil } +func (s *localBlockAPIServer) ListObjects(request *pfsclient.ListObjectsRequest, listObjectsServer pfsclient.ObjectAPI_ListObjectsServer) (retErr error) { + return nil +} + +func (s *localBlockAPIServer) ListObjectsTaggedWithPrefix(request *pfsclient.ListObjectsTaggedWithPrefixRequest, server pfsclient.ObjectAPI_ListObjectsTaggedWithPrefixServer) (retErr error) { + return nil +} + func (s *localBlockAPIServer) GetTag(request *pfsclient.Tag, getTagServer pfsclient.ObjectAPI_GetTagServer) (retErr error) { func() { s.Log(request, nil, nil, 0) }() defer func(start time.Time) { s.Log(request, nil, retErr, time.Since(start)) }(time.Now()) diff --git a/src/server/pfs/server/obj_block_api_server.go b/src/server/pfs/server/obj_block_api_server.go index <HASH>..<HASH> 100644 --- a/src/server/pfs/server/obj_block_api_server.go +++ b/src/server/pfs/server/obj_block_api_server.go @@ -321,7 +321,7 @@ func (s *objBlockAPIServer) ListObjectsTaggedWithPrefix(request *pfsclient.ListO if err := s.readProto(hash, tagObjectIndex); err != nil { return err } - for tag, object := range tagObjectIndex.Tags { + for _, object := range tagObjectIndex.Tags { server.Send(object) } return nil
Add no-op implementation of the new APIs to the local block server
pachyderm_pachyderm
train
54ab2b48902be0082584b8f209fed0e71a52acff
diff --git a/src/Api/DataTransformer/Sales/Order/Shipment.php b/src/Api/DataTransformer/Sales/Order/Shipment.php index <HASH>..<HASH> 100644 --- a/src/Api/DataTransformer/Sales/Order/Shipment.php +++ b/src/Api/DataTransformer/Sales/Order/Shipment.php @@ -33,8 +33,15 @@ class Shipment extends DataTransformerAbstract * @param string $trackUrl * @param string $status */ - public function __construct($orderId, array $items, $trackCode, $trackCarrier, $trackMethod, $trackUrl, $status = null) - { + public function __construct( + $orderId, + array $items, + $trackCode, + $trackCarrier, + $trackMethod, + $trackUrl, + $status = null + ) { $shipment = [ 'shipment' => [ 'code' => $orderId,
Fixing the count of characters in the line.
bittools_skyhub-php
train
efe83dd07dde3ab0d6733e36eb35f50af18dfa0e
diff --git a/src/wheel/wheel.js b/src/wheel/wheel.js index <HASH>..<HASH> 100644 --- a/src/wheel/wheel.js +++ b/src/wheel/wheel.js @@ -85,6 +85,9 @@ newX = this.x + Math.round(this.hasHorizontalScroll ? wheelDeltaX : 0); newY = this.y + Math.round(this.hasVerticalScroll ? wheelDeltaY : 0); + this.directionX = wheelDeltaX > 0 ? -1 : wheelDeltaX < 0 ? 1 : 0; + this.directionY = wheelDeltaY > 0 ? -1 : wheelDeltaY < 0 ? 1 : 0; + if ( newX > 0 ) { newX = 0; } else if ( newX < this.maxScrollX ) {
fix directionX,Y when scrolling with mouse ref #<I>, #<I>
cubiq_iscroll
train
b4819d0c062b7eb6a9b015abcf3f90e138841fe1
diff --git a/lib/motion/project/cocoapods.rb b/lib/motion/project/cocoapods.rb index <HASH>..<HASH> 100644 --- a/lib/motion/project/cocoapods.rb +++ b/lib/motion/project/cocoapods.rb @@ -368,7 +368,7 @@ end namespace :pod do task :update_spec_repos do - $stderr.puts '[!] If you need to update CocoaPods repogitory to install newer libraries, please run "pod repo update" command before.' + $stderr.puts '[!] If you need to update CocoaPods repository to install newer libraries, please run "pod repo update" command before.' end desc "Download and integrate newly added pods"
Fix 'repogitory' to 'repository' This seems like a typo
HipByte_motion-cocoapods
train
7b49d566b305f5417bdf392e66103c5eadeb2593
diff --git a/lib/exportResume.js b/lib/exportResume.js index <HASH>..<HASH> 100644 --- a/lib/exportResume.js +++ b/lib/exportResume.js @@ -6,6 +6,7 @@ var resumeToPDF = require('resume-to-pdf'); var resumeToMarkdown = require('resume-to-markdown'); var fs = require('fs'); var read = require('read'); +var registryServer = process.env.REGISTRY_SERVER || 'http://registry.jsonresume.org'; var Spinner = require('cli-spinner').Spinner; var spinner = new Spinner('downloading...'); spinner.setSpinnerString('/-\\'); @@ -61,7 +62,7 @@ function exportResume(resumeJson, fileName, callback) { spinner.start(); request // .post('http://localhost:5000/resume') - .post('http://registry.jsonresume.org/resume') + .post(registryServer + '/resume') .send({ resume: resumeJson, email: null, @@ -83,20 +84,10 @@ function exportResume(resumeJson, fileName, callback) { console.log(error); console.log('There has been an error publishing your resume.'.red); } else { - - - download(res.body.url + '.pdf', fileName, function(err, data) { spinner.stop(); console.log('\nDone! Find your generated .pdf resume at:'.green, process.cwd() + '/' + fileName); - - // console.log(err, data); - // only returns undefined undefined - // do nothing? - }) - - - + }); } }); return;
if export as pdf, converts and downloads from the server
jsonresume_resume-cli
train
9c8cc15f75a659883cd942f61f15382f1c0c8c50
diff --git a/txaws/server/resource.py b/txaws/server/resource.py index <HASH>..<HASH> 100644 --- a/txaws/server/resource.py +++ b/txaws/server/resource.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta from uuid import uuid4 -from pytz import UTC +from dateutil.tz import tzutc from twisted.python import log from twisted.python.reflect import safe_str @@ -159,7 +159,7 @@ class QueryAPI(Resource): def get_utc_time(self): """Return a C{datetime} object with the current time in UTC.""" - return datetime.now(UTC) + return datetime.now(tzutc()) def _validate(self, request): """Validate an L{HTTPRequest} before executing it. diff --git a/txaws/server/schema.py b/txaws/server/schema.py index <HASH>..<HASH> 100644 --- a/txaws/server/schema.py +++ b/txaws/server/schema.py @@ -1,7 +1,7 @@ from datetime import datetime from operator import itemgetter -from pytz import UTC +from dateutil.tz import tzutc from zope.datetime import parse, SyntaxError @@ -243,7 +243,7 @@ class Date(Parameter): def parse(self, value): try: - return datetime(*parse(value, local=False)[:6], tzinfo=UTC) + return datetime(*parse(value, local=False)[:6], tzinfo=tzutc()) except (TypeError, SyntaxError): raise ValueError() diff --git a/txaws/server/tests/test_resource.py b/txaws/server/tests/test_resource.py index <HASH>..<HASH> 100644 --- a/txaws/server/tests/test_resource.py +++ b/txaws/server/tests/test_resource.py @@ -1,7 +1,7 @@ from json import dumps, loads -from pytz import UTC from cStringIO import StringIO from datetime import datetime +from dateutil.tz import tzutc from twisted.trial.unittest import TestCase from twisted.python.reflect import safe_str @@ -542,7 +542,7 @@ class QueryAPITest(TestCase): self.assertEqual("data", request.response) self.assertEqual(200, request.code) - now = datetime(2009, 12, 31, tzinfo=UTC) + now = datetime(2009, 12, 31, tzinfo=tzutc()) self.api.get_utc_time = lambda: now self.api.principal = TestPrincipal(creds) return self.api.handle(request).addCallback(check) @@ -567,7 +567,7 @@ class QueryAPITest(TestCase): " 2010-01-01T12:00:00Z", request.response) self.assertEqual(400, request.code) - now = datetime(2010, 1, 1, 12, 0, 1, tzinfo=UTC) + now = datetime(2010, 1, 1, 12, 0, 1, tzinfo=tzutc()) self.api.get_utc_time = lambda: now return self.api.handle(request).addCallback(check) diff --git a/txaws/server/tests/test_schema.py b/txaws/server/tests/test_schema.py index <HASH>..<HASH> 100644 --- a/txaws/server/tests/test_schema.py +++ b/txaws/server/tests/test_schema.py @@ -2,7 +2,7 @@ from datetime import datetime -from pytz import UTC, FixedOffset +from dateutil.tz import tzutc, tzoffset from twisted.trial.unittest import TestCase @@ -318,7 +318,7 @@ class DateTest(TestCase): def test_parse(self): """L{Date.parse checks that the given raw C{value} is a date/time.""" parameter = Date("Test") - date = datetime(2010, 9, 15, 23, 59, 59, tzinfo=UTC) + date = datetime(2010, 9, 15, 23, 59, 59, tzinfo=tzutc()) self.assertEqual(date, parameter.parse("2010-09-15T23:59:59Z")) def test_format(self): @@ -328,7 +328,7 @@ class DateTest(TestCase): """ parameter = Date("Test") date = datetime(2010, 9, 15, 23, 59, 59, - tzinfo=FixedOffset(120)) + tzinfo=tzoffset('UTC', 120*60)) self.assertEqual("2010-09-15T21:59:59Z", parameter.format(date))
drop pytz for dateutil
twisted_txaws
train
b6c4251a42c841b457002f2088f721751df2046e
diff --git a/tests/test_languages.py b/tests/test_languages.py index <HASH>..<HASH> 100644 --- a/tests/test_languages.py +++ b/tests/test_languages.py @@ -64,6 +64,7 @@ class TestBundledLanguages(BaseTestCase): param('en', "2014-12-12T12:33:39-08:00", "2014-12-12 12:33:39-08:00"), param('en', "2014-10-15T16:12:20+00:00", "2014-10-15 16:12:20+00:00"), param('en', "28 Oct 2014 16:39:01 +0000", "28 october 2014 16:39:01 +0000"), + param('es', "13 Febrero 2015 a las 23:00", "13 february 2015 23:00") ]) def test_translation(self, shortname, datetime_string, expected_translation): self.given_bundled_language(shortname)
Added a more descriptive test for 'a las' phrase
scrapinghub_dateparser
train
3766670cce1ab2e12aca7f3a987989441c368e62
diff --git a/src/Middleware/SocialAuthMiddleware.php b/src/Middleware/SocialAuthMiddleware.php index <HASH>..<HASH> 100644 --- a/src/Middleware/SocialAuthMiddleware.php +++ b/src/Middleware/SocialAuthMiddleware.php @@ -184,7 +184,7 @@ class SocialAuthMiddleware } if (!$user) { - $user = $this->_getUser($profile); + $user = $this->_getUserEntity($profile); } $profile->user_id = $user->id; @@ -266,7 +266,7 @@ class SocialAuthMiddleware * * @return \Cake\Datasource\EntityInterface User entity. */ - protected function _getUser(EntityInterface $profile) + protected function _getUserEntity(EntityInterface $profile) { $callbackMethod = $this->config('getUserCallback');
fix: properly name userentity method
ADmad_cakephp-social-auth
train
ade94bb0070a9f1daf100ca521425978c3652592
diff --git a/gulp/makeCss.js b/gulp/makeCss.js index <HASH>..<HASH> 100644 --- a/gulp/makeCss.js +++ b/gulp/makeCss.js @@ -17,7 +17,7 @@ gulp.task('makeCss', function() { .on("error", notify.onError(function(error) { return error.message; })) - .pipe(concat(paths.bundleName + '.css')) + .pipe(concat(paths.bundleFileName + '.css')) .pipe(gulp.dest(paths.bundleDir)) .pipe(minifyCSS({ //the minifyer does not work well with lines including a comment. e.g. @@ -26,7 +26,7 @@ gulp.task('makeCss', function() { //So, disable the 'advantaced' feature. This only makes the minified file 100 bytes larger noAdvanced: true, })) - .pipe(rename(paths.bundleName + '.min.css')) + .pipe(rename(paths.bundleFileName + '.min.css')) .pipe(gulp.dest(paths.bundleDir)) .pipe(connect.reload()); }) diff --git a/gulp/makeJs.js b/gulp/makeJs.js index <HASH>..<HASH> 100644 --- a/gulp/makeJs.js +++ b/gulp/makeJs.js @@ -23,10 +23,10 @@ gulp.task('browserify', function() { bundler.exclude(modName); } bundler.bundle() - .pipe(exorcist(paths.bundleDir + '/' + paths.bundleName + '.js.map')) - .pipe(source(paths.bundleName + '.js')) + .pipe(exorcist(paths.bundleDir + '/' + paths.bundleFileName + '.js.map')) + .pipe(source(paths.bundleFileName + '.js')) .pipe(gulp.dest(paths.bundleDir)) - .pipe(rename(paths.bundleName + '.min.js')) + .pipe(rename(paths.bundleFileName + '.min.js')) .pipe(buffer()) .pipe(sourcemaps.init({ loadMaps: true, @@ -50,10 +50,10 @@ gulp.task('browserifyWithDeps', function() { return bundler .bundle() - .pipe(exorcist(paths.bundleDir + '/' + paths.bundleName + '.bundled.js.map')) - .pipe(source(paths.bundleName + '.bundled.js')) + .pipe(exorcist(paths.bundleDir + '/' + paths.bundleFileName + '.bundled.js.map')) + .pipe(source(paths.bundleFileName + '.bundled.js')) .pipe(gulp.dest(paths.bundleDir)) - .pipe(rename(paths.bundleName + '.bundled.min.js')) + .pipe(rename(paths.bundleFileName + '.bundled.min.js')) .pipe(buffer()) .pipe(sourcemaps.init({ loadMaps: true, @@ -83,7 +83,7 @@ gulp.task('browserifyForDebug', function() { .on("error", notify.onError(function(error) { return error.message; })) - .pipe(source(paths.bundleName + '.bundled.min.js')) + .pipe(source(paths.bundleFileName + '.bundled.min.js')) .pipe(embedlr()) .pipe(gulp.dest(paths.bundleDir)) .pipe(connect.reload()); diff --git a/gulp/paths.js b/gulp/paths.js index <HASH>..<HASH> 100644 --- a/gulp/paths.js +++ b/gulp/paths.js @@ -1,5 +1,5 @@ module.exports = { style: ['src/scss/scoped.scss', 'src/scss/global.scss'], bundleDir: "dist", - bundleName: "yasr" + bundleFileName: "yasr" }; \ No newline at end of file
improved varnames, small refactoring
OpenTriply_YASGUI.YASR
train
799b4aea8bf2eb9a6ef2b914f8ca989f57030438
diff --git a/composer.json b/composer.json index <HASH>..<HASH> 100644 --- a/composer.json +++ b/composer.json @@ -17,7 +17,7 @@ }, "require": { "php": ">=7.1", - "laravie/codex": "^4.0" + "laravie/codex": "^4.0.1" }, "require-dev": { "mockery/mockery": "^1.1", diff --git a/src/Client.php b/src/Client.php index <HASH>..<HASH> 100644 --- a/src/Client.php +++ b/src/Client.php @@ -5,8 +5,10 @@ namespace Laravie\Webhook; use Laravie\Codex\Discovery; use Laravie\Codex\Support\HttpClient; use Http\Client\Common\HttpMethodsClient; +use Laravie\Codex\Contracts\Client as ClientContract; +use Laravie\Codex\Contracts\Request as RequestContract; -class Client +class Client implements ClientContract { use HttpClient; @@ -28,6 +30,20 @@ class Client } /** + * Handle uses using via. + * + * @param \Laravie\Webhook\Request $request + * + * @return \Laravie\Webhook\Request + */ + public function via(RequestContract $request): RequestContract + { + $request->setClient($this); + + return $request; + } + + /** * Set Content-Type value for webhook request. * * @param string $contentType diff --git a/src/Request.php b/src/Request.php index <HASH>..<HASH> 100644 --- a/src/Request.php +++ b/src/Request.php @@ -2,11 +2,15 @@ namespace Laravie\Webhook; +use Laravie\Codex\Support\Responsable; use Laravie\Codex\Request as BaseRequest; +use Laravie\Codex\Contracts\Request as RequestContract; use Laravie\Codex\Contracts\Response as ResponseContract; -abstract class Request extends BaseRequest +abstract class Request implements RequestContract { + use Responsable; + /** * Send Webhook request. *
Bump to codex <I>
laravie_webhook
train
79b6d31dfe8bba5c3f27c5f3a011e5a658d87400
diff --git a/trezor_agent/gpg/agent.py b/trezor_agent/gpg/agent.py index <HASH>..<HASH> 100644 --- a/trezor_agent/gpg/agent.py +++ b/trezor_agent/gpg/agent.py @@ -37,6 +37,11 @@ def sig_encode(r, s): return '(7:sig-val(5:ecdsa(1:r32:{})(1:s32:{})))'.format(r, s) +def _verify_keygrip(expected, actual): + if expected != actual: + raise KeyError('Keygrip mismatch: {!r} != {!r}', expected, actual) + + def pksign(keygrip, digest, algo): """Sign a message digest using a private EC key.""" assert algo == '8', 'Unsupported hash algorithm ID {}'.format(algo) @@ -46,7 +51,7 @@ def pksign(keygrip, digest, algo): use_custom=True, ecdh=False) pubkey, conn = encode.load_from_public_key(pubkey_dict=pubkey_dict) with contextlib.closing(conn): - assert pubkey.keygrip == binascii.unhexlify(keygrip) + _verify_keygrip(pubkey.keygrip, binascii.unhexlify(keygrip)) r, s = conn.sign(binascii.unhexlify(digest)) result = sig_encode(r, s) log.debug('result: %r', result) @@ -91,7 +96,7 @@ def pkdecrypt(keygrip, conn): use_custom=True, ecdh=True) pubkey, conn = encode.load_from_public_key(pubkey_dict=local_pubkey) with contextlib.closing(conn): - assert pubkey.keygrip == binascii.unhexlify(keygrip) + _verify_keygrip(pubkey.keygrip, binascii.unhexlify(keygrip)) return _serialize_point(conn.ecdh(remote_pubkey))
gpg: raise proper exception when keygrip mismatch is detected
romanz_trezor-agent
train
4741bd712e5d5f35a93f6b5883a01b2456d7fd29
diff --git a/openquake/baselib/tests/parallel_test.py b/openquake/baselib/tests/parallel_test.py index <HASH>..<HASH> 100644 --- a/openquake/baselib/tests/parallel_test.py +++ b/openquake/baselib/tests/parallel_test.py @@ -138,7 +138,9 @@ class StarmapTestCase(unittest.TestCase): self.assertEqual(num[b'waiting'], 4) self.assertEqual(num[b'total supertask'], 4) # tasks self.assertEqual(num[b'total get_length'], 17) # subtasks - self.assertGreater(len(h5['task_info']), 0) + info = h5['task_info'][()] + dic = dict(general.fast_agg3(info, 'taskname', ['received'])) + self.assertEqual(dic, {b'get_length': 357, b'supertask': 58}) shutil.rmtree(tmpdir) def test_countletters(self):
Added test on received bytes Former-commit-id: <I>e<I>e8cdc<I>bea<I>e5ff<I>eaf<I>
gem_oq-engine
train
43783db4f199de4283f39a4af8bc1bbb4a4a9b27
diff --git a/zinnia/templatetags/zinnia_tags.py b/zinnia/templatetags/zinnia_tags.py index <HASH>..<HASH> 100644 --- a/zinnia/templatetags/zinnia_tags.py +++ b/zinnia/templatetags/zinnia_tags.py @@ -157,6 +157,7 @@ def get_calendar_entries(context, year=None, month=None, """Return an HTML calendar of entries""" if not year or not month: date_month = context.get('month') or context.get('day') or \ + getattr(context.get('object'), 'creation_date', None) or \ datetime.today() year, month = date_month.timetuple()[:2]
updating calendar when an entry is viewed
Fantomas42_django-blog-zinnia
train
a4291da7565045be28fd46d857636a2bf59e1341
diff --git a/core/src/main/java/com/google/errorprone/bugpatterns/DoubleBraceInitialization.java b/core/src/main/java/com/google/errorprone/bugpatterns/DoubleBraceInitialization.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/google/errorprone/bugpatterns/DoubleBraceInitialization.java +++ b/core/src/main/java/com/google/errorprone/bugpatterns/DoubleBraceInitialization.java @@ -17,7 +17,7 @@ package com.google.errorprone.bugpatterns; import static com.google.common.collect.ImmutableList.toImmutableList; -import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; +import static com.google.errorprone.BugPattern.SeverityLevel.ERROR; import static com.google.errorprone.matchers.Description.NO_MATCH; import static com.google.errorprone.matchers.Matchers.expressionStatement; import static com.google.errorprone.matchers.method.MethodMatchers.constructor; @@ -66,7 +66,7 @@ import javax.lang.model.element.Modifier; summary = "Prefer collection factory methods or builders to the double-brace initialization" + " pattern.", - severity = WARNING) + severity = ERROR) public class DoubleBraceInitialization extends BugChecker implements NewClassTreeMatcher { @SuppressWarnings("ImmutableEnumChecker") // Matcher is immutable in practice diff --git a/core/src/main/java/com/google/errorprone/scanner/BuiltInCheckerSuppliers.java b/core/src/main/java/com/google/errorprone/scanner/BuiltInCheckerSuppliers.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/google/errorprone/scanner/BuiltInCheckerSuppliers.java +++ b/core/src/main/java/com/google/errorprone/scanner/BuiltInCheckerSuppliers.java @@ -623,6 +623,7 @@ public class BuiltInCheckerSuppliers { DiscardedPostfixExpression.class, DoNotCallChecker.class, DoNotMockChecker.class, + DoubleBraceInitialization.class, DuplicateMapKeys.class, DurationFrom.class, DurationGetTemporalUnit.class, @@ -807,7 +808,6 @@ public class BuiltInCheckerSuppliers { DoNotCallSuggester.class, DoNotClaimAnnotations.class, DoNotMockAutoValue.class, - DoubleBraceInitialization.class, DoubleCheckedLocking.class, EmptyBlockTag.class, EmptyCatch.class,
Give DoubleBraceInitialization a promotion to ERROR, but disable in build_defs for a release. PiperOrigin-RevId: <I>
google_error-prone
train
7a069dc6f8438b6f635d33dabf3e303c5a113243
diff --git a/activejob/test/support/integration/adapters/sidekiq.rb b/activejob/test/support/integration/adapters/sidekiq.rb index <HASH>..<HASH> 100644 --- a/activejob/test/support/integration/adapters/sidekiq.rb +++ b/activejob/test/support/integration/adapters/sidekiq.rb @@ -53,11 +53,20 @@ module SidekiqJobsManager require "sidekiq/cli" require "sidekiq/launcher" - config = Sidekiq - config[:queues] = ["integration_tests"] - config[:environment] = "test" - config[:concurrency] = 1 - config[:timeout] = 1 + if Sidekiq.respond_to?(:[]=) + config = Sidekiq + config[:queues] = ["integration_tests"] + config[:environment] = "test" + config[:concurrency] = 1 + config[:timeout] = 1 + else + config = { + queues: ["integration_tests"], + environment: "test", + concurrency: 1, + timeout: 1 + } + end sidekiq = Sidekiq::Launcher.new(config) Sidekiq.average_scheduled_poll_interval = 0.5 Sidekiq.options[:poll_interval_average] = 1
Work around Sidekiq <I> and <I> API difference
rails_rails
train
6cc280c15a6b28bf8f632f9cc3a2a6e55decad9d
diff --git a/system_tests/language.py b/system_tests/language.py index <HASH>..<HASH> 100644 --- a/system_tests/language.py +++ b/system_tests/language.py @@ -88,7 +88,7 @@ class TestLanguage(unittest.TestCase): self.assertEqual(entity2.metadata, {}) # Verify entity 3. self.assertEqual(entity3.name, self.NAME3) - self.assertEqual(entity3.entity_type, EntityType.EVENT) + self.assertEqual(entity3.entity_type, EntityType.WORK_OF_ART) self.assertGreater(entity3.salience, 0.0) self.assertEqual(entity3.mentions, [entity3.name]) wiki_url = ('http://en.wikipedia.org/wiki/'
Fixing system test value in Natural Language. The backend is finally recognizing "The Calling of Saint Matthew" as a work of art, rather than an event.
googleapis_google-cloud-python
train
3f5455c1bea54b8bc976ce8eb66417d42a676d77
diff --git a/lib/gsl.rb b/lib/gsl.rb index <HASH>..<HASH> 100644 --- a/lib/gsl.rb +++ b/lib/gsl.rb @@ -5,7 +5,8 @@ end begin require "gsl/#{RUBY_VERSION[/\d+.\d+/]}/gsl_native" -rescue LoadError +rescue LoadError => err + raise if err.respond_to?(:path) && !err.path require 'gsl/gsl_native' end
lib/gsl.rb: Reraise extension LoadError other than "cannot load such file".
SciRuby_rb-gsl
train
0b8674829eda03ce1f8fcae0386d45e9ab2ed98b
diff --git a/cwltool/workflow.py b/cwltool/workflow.py index <HASH>..<HASH> 100644 --- a/cwltool/workflow.py +++ b/cwltool/workflow.py @@ -82,27 +82,29 @@ def match_types(sinktype, src, iid, inputobj, linkMerge, valueFrom): else: raise WorkflowException(u"Unrecognized linkMerge enum '%s'" % linkMerge) return True - elif valueFrom is not None or are_same_type(src.parameter["type"], sinktype) or sinktype == "Any": + elif valueFrom is not None or can_assign_src_to_sink(src.parameter["type"], sinktype) or sinktype == "Any": # simply assign the value from state to input inputobj[iid] = copy.deepcopy(src.value) return True return False -def are_same_type(src, sink): # type: (Any, Any) -> bool +def can_assign_src_to_sink(src, sink): # type: (Any, Any) -> bool """Check for identical type specifications, ignoring extra keys like inputBinding. """ if isinstance(src, dict) and isinstance(sink, dict): if src["type"] == "array" and sink["type"] == "array": - if 'null' in sink["items"]: - return are_same_type([src["items"]], [it for it in sink["items"] if it != 'null']) - return are_same_type(src["items"], sink["items"]) - elif src["type"] == sink["type"]: - return True - else: - return False + return can_assign_src_to_sink(src["items"], sink["items"]) + elif isinstance(src, list): + for t in src: + if can_assign_src_to_sink(t, sink): + return True + elif isinstance(sink, list): + for t in sink: + if can_assign_src_to_sink(src, t): + return True else: return src == sink - + return False def object_from_state(state, parms, frag_only, supportsMultipleInput, sourceField): # type: (Dict[unicode, WorkflowStateItem], List[Dict[unicode, Any]], bool, bool, unicode) -> Dict[unicode, Any] diff --git a/tests/test_examples.py b/tests/test_examples.py index <HASH>..<HASH> 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -4,7 +4,7 @@ import cwltool.draft2tool as tool import cwltool.expression as expr import cwltool.factory import cwltool.process - +import cwltool.workflow class TestParamMatching(unittest.TestCase): @@ -150,6 +150,24 @@ class TestScanDeps(unittest.TestCase): set(("$include", "$schemas", "path")), loadref), indent=4) +class TestTypeCompare(unittest.TestCase): + def test_typecompare(self): + self.assertTrue(cwltool.workflow.can_assign_src_to_sink( + {'items': ['string', 'null'], 'type': 'array'}, + {'items': ['string', 'null'], 'type': 'array'})) + + self.assertTrue(cwltool.workflow.can_assign_src_to_sink( + {'items': ['string'], 'type': 'array'}, + {'items': ['string', 'null'], 'type': 'array'})) + + self.assertTrue(cwltool.workflow.can_assign_src_to_sink( + {'items': ['string', 'null'], 'type': 'array'}, + {'items': ['string'], 'type': 'array'})) + + self.assertFalse(cwltool.workflow.can_assign_src_to_sink( + {'items': ['string'], 'type': 'array'}, + {'items': ['int'], 'type': 'array'})) + if __name__ == '__main__': unittest.main()
Fix type checking that a source port is assignable to a sink port, now properly accomodates unions. alternate solution for #<I>
common-workflow-language_cwltool
train
536c78f83e71f67b32ea4eb3079658dc49087914
diff --git a/blockmanager.go b/blockmanager.go index <HASH>..<HASH> 100644 --- a/blockmanager.go +++ b/blockmanager.go @@ -6,6 +6,7 @@ import ( "bytes" "container/list" "fmt" + "math" "math/big" "sync" "sync/atomic" @@ -402,6 +403,31 @@ func (b *blockManager) cfHandler() { b.wg.Done() }() + var ( + // allCFCheckpoints is a map from our peers to the list of + // filter checkpoints they respond to us with. We'll attempt to + // get filter checkpoints immediately up to the latest block + // checkpoint we've got stored to avoid doing unnecessary + // fetches as the block headers are catching up. + allCFCheckpoints map[string][]*chainhash.Hash + + // lastCp will point to the latest block checkpoint we have for + // the active chain, if any. + lastCp chaincfg.Checkpoint + + // blockCheckpoints is the list of block checkpoints for the + // active chain. + blockCheckpoints = b.server.chainParams.Checkpoints + ) + + // Set the variable to the latest block checkpoint if we have any for + // this chain. Otherwise this block checkpoint will just stay at height + // 0, which will prompt us to look at the block headers to fetch + // checkpoints below. + if len(blockCheckpoints) > 0 { + lastCp = blockCheckpoints[len(blockCheckpoints)-1] + } + waitForHeaders: // We'll wait until the main header sync is either finished or the // filter headers are lagging at least a checkpoint interval behind the @@ -463,20 +489,55 @@ waitForHeaders: default: } - // Try to get all checkpoints from current peers. - allCheckpoints := b.getCheckpts(&lastHash, fType) - if len(allCheckpoints) == 0 { - log.Warnf("Unable to fetch set of " + - "candidate checkpoints, trying again...") + // If the height now exceeds the height at which we fetched the + // checkpoints last time, we must query our peers again. + if minCheckpointHeight(allCFCheckpoints) < lastHeight { + // Start by getting the filter checkpoints up to the + // latest block checkpoint we have for this chain. We + // do this so we don't have to fetch all filter + // checkpoints each time our block header chain + // advances. If our block header chain has already + // advanced past the last block checkpoint, we must + // fetch filter checkpoints to our last header hash. + // TODO(halseth): fetch filter checkpoints up to the + // best block of the connected peers. + bestHeight := uint32(lastCp.Height) + bestHash := *lastCp.Hash + if bestHeight < lastHeight { + bestHeight = lastHeight + bestHash = lastHash + } + + log.Debugf("Getting filter checkpoints up to "+ + "height=%v, hash=%v", bestHeight, bestHash) + allCFCheckpoints = b.getCheckpts(&bestHash, fType) + if len(allCFCheckpoints) == 0 { + log.Warnf("Unable to fetch set of " + + "candidate checkpoints, trying again...") - time.Sleep(QueryTimeout) - continue + time.Sleep(QueryTimeout) + continue + } + } + + // Cap the received checkpoints at the current height, as we + // can only verify checkpoints up to the height we have block + // headers for. + checkpoints := make(map[string][]*chainhash.Hash) + for p, cps := range allCFCheckpoints { + for i, cp := range cps { + height := uint32(i+1) * wire.CFCheckptInterval + if height > lastHeight { + break + } + checkpoints[p] = append(checkpoints[p], cp) + } } // See if we can detect which checkpoint list is correct. If // not, we will cycle again. goodCheckpoints, err = b.resolveConflict( - allCheckpoints, store, fType, + checkpoints, store, fType, ) if err != nil { log.Debugf("got error attempting to determine correct "+ @@ -1014,6 +1075,25 @@ func (b *blockManager) writeCFHeadersMsg(msg *wire.MsgCFHeaders, return &lastHeader, nil } +// minCheckpointHeight returns the height of the last filter checkpoint for the +// shortest checkpoint list among the given lists. +func minCheckpointHeight(checkpoints map[string][]*chainhash.Hash) uint32 { + // If the map is empty, return 0 immediately. + if len(checkpoints) == 0 { + return 0 + } + + // Otherwise return the length of the shortest one. + minHeight := uint32(math.MaxUint32) + for _, cps := range checkpoints { + height := uint32(len(cps) * wire.CFCheckptInterval) + if height < minHeight { + minHeight = height + } + } + return minHeight +} + // verifyHeaderCheckpoint verifies that a CFHeaders message matches the passed // checkpoint. It assumes everything else has been checked, including filter // type and stop hash matches, and returns true if matching and false if not.
blockmanager: fetch filter checkpoints up to chain checkpoints This commit makes the fetching of filter checkpoints go up to the hash of the latest chain checkpoints in case the block headers haven't caught up that far. This helps in terms of avoiding doing a lot of filter checkpoint fetches up to small heights while the block headers are catching up.
lightninglabs_neutrino
train
fc2d4f95de4d063ec74cc140412f698f4f11e78f
diff --git a/jsonschema/tests/test_validators.py b/jsonschema/tests/test_validators.py index <HASH>..<HASH> 100644 --- a/jsonschema/tests/test_validators.py +++ b/jsonschema/tests/test_validators.py @@ -165,22 +165,6 @@ class TestLegacyTypeCheckCreation(SynchronousTestCase): self.assertEqual(set(Validator.DEFAULT_TYPES), expected_types) - @unittest.skip("This logic is actually incorrect.") - def test_default_types_update_type_checker(self): - Validator = validators.create( - meta_schema=self.meta_schema, - validators=self.validators, - default_types={u"array": list} - ) - - self.assertEqual(set(Validator.DEFAULT_TYPES), {u"array"}) - Extended = validators.extend( - Validator, - type_checker=Validator.TYPE_CHECKER.remove(u"array") - ) - - self.assertEqual(set(Extended.DEFAULT_TYPES), {}) - def test_types_redefines_the_validators_type_checker(self): schema = {"type": "string"} self.assertFalse(validators.Draft7Validator(schema).is_valid(12)) @@ -243,6 +227,29 @@ class TestLegacyTypeCheckingDeprecation(SynchronousTestCase): ) self.assertFalse(self.flushWarnings()) + def test_extending_a_legacy_validator_with_a_type_checker_errors(self): + Validator = validators.create( + meta_schema={}, + validators={}, + default_types={u"array": list} + ) + with self.assertRaises(TypeError) as e: + validators.extend( + Validator, + validators={}, + type_checker=TypeChecker(), + ) + + self.assertIn( + ( + "Cannot extend a validator created with default_types " + "with a type_checker. Update the validator to use a " + "type_checker when created." + ), + str(e.exception), + ) + self.flushWarnings() + def test_extending_a_legacy_validator_does_not_rewarn(self): Validator = validators.create(meta_schema={}, default_types={}) self.assertTrue(self.flushWarnings()) diff --git a/jsonschema/validators.py b/jsonschema/validators.py index <HASH>..<HASH> 100644 --- a/jsonschema/validators.py +++ b/jsonschema/validators.py @@ -200,6 +200,7 @@ def create( raise TypeError( "Do not specify default_types when providing a type checker.", ) + _created_with_default_types = True warn( ( "The default_types argument is deprecated. " @@ -213,6 +214,7 @@ def create( ) else: default_types = _DEPRECATED_DEFAULT_TYPES + _created_with_default_types = False if type_checker is None: type_checker = _types.TypeChecker() @@ -226,6 +228,7 @@ def create( DEFAULT_TYPES = property(_DEFAULT_TYPES) _DEFAULT_TYPES = dict(default_types) + _CREATED_WITH_DEFAULT_TYPES = _created_with_default_types def __init__( self, @@ -393,8 +396,14 @@ def extend(validator, validators=(), version=None, type_checker=None): all_validators = dict(validator.VALIDATORS) all_validators.update(validators) - if not type_checker: + if type_checker is None: type_checker = validator.TYPE_CHECKER + elif validator._CREATED_WITH_DEFAULT_TYPES: + raise TypeError( + "Cannot extend a validator created with default_types " + "with a type_checker. Update the validator to use a " + "type_checker when created." + ) # Set the default_types to None during class creation to avoid # overwriting the type checker (and triggering the deprecation warning).
Intentionally break mixing the new and old type interfaces.
Julian_jsonschema
train
41538a2f7da9dad3abc698ad646902966eeb74fa
diff --git a/eZ/Publish/Core/REST/Client/Input/Parser/ObjectState.php b/eZ/Publish/Core/REST/Client/Input/Parser/ObjectState.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/Core/REST/Client/Input/Parser/ObjectState.php +++ b/eZ/Publish/Core/REST/Client/Input/Parser/ObjectState.php @@ -52,7 +52,7 @@ class ObjectState extends BaseParser 'id' => $data['_href'], 'identifier' => $data['identifier'], 'priority' => (int)$data['priority'], - 'defaultLanguageCode' => $data['defaultLanguageCode'], + 'mainLanguageCode' => $data['defaultLanguageCode'], 'languageCodes' => explode(',', $data['languageCodes']), 'names' => $names, 'descriptions' => $descriptions, diff --git a/eZ/Publish/Core/REST/Client/Input/Parser/ObjectStateGroup.php b/eZ/Publish/Core/REST/Client/Input/Parser/ObjectStateGroup.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/Core/REST/Client/Input/Parser/ObjectStateGroup.php +++ b/eZ/Publish/Core/REST/Client/Input/Parser/ObjectStateGroup.php @@ -51,7 +51,7 @@ class ObjectStateGroup extends BaseParser array( 'id' => $data['_href'], 'identifier' => $data['identifier'], - 'defaultLanguageCode' => $data['defaultLanguageCode'], + 'mainLanguageCode' => $data['defaultLanguageCode'], 'languageCodes' => explode(',', $data['languageCodes']), 'names' => $names, 'descriptions' => $descriptions,
EZP-<I>: Aligned REST Core Client with ObjectState defaultLanguageCode refactoring
ezsystems_ezpublish-kernel
train
aaae1ff085441532cd4b1bc0cd276812fdc5cb92
diff --git a/src/PHPRenderer.php b/src/PHPRenderer.php index <HASH>..<HASH> 100644 --- a/src/PHPRenderer.php +++ b/src/PHPRenderer.php @@ -6,16 +6,14 @@ * @copyright Copyright (c) 2011-2015 Josh Lockhart * @license https://github.com/slimphp/PHP-View/blob/master/LICENSE.md (MIT License) */ - -namespace Geggleto\Renderer; +namespace Slim\Views; use Psr\Http\Message\ResponseInterface; /** - * Class SlimRenderer - * PSR-7 compatible PHP Renderer + * Php View * - * @package Geggleto\Renderer + * PSR-7 compatible PHP renderer */ class PHPRenderer { @@ -68,4 +66,4 @@ class PHPRenderer return $response->getBody()->write($output); } -} \ No newline at end of file +}
Update docblocks and namespace
slimphp_PHP-View
train
e46cd0ccfb24d28ea10947de89977d30dd764d08
diff --git a/salt/modules/parted.py b/salt/modules/parted.py index <HASH>..<HASH> 100644 --- a/salt/modules/parted.py +++ b/salt/modules/parted.py @@ -58,9 +58,13 @@ def __virtual__(): return __virtualname__ -def probe(device=''): +def probe(*devices, **kwargs): ''' - Ask the kernel to update its local partition data + Ask the kernel to update its local partition data. When no args are + specified all block devices are tried. + + Caution: Generally only works on devices with no mounted partitions and + may take a long time to return if specified devices are in use. CLI Examples: @@ -68,14 +72,15 @@ def probe(device=''): salt '*' partition.probe salt '*' partition.probe /dev/sda - ''' - if device: - dev = device.replace('/dev/', '') - if dev not in os.listdir('/dev'): - raise CommandExecutionError( - 'Invalid device passed to partition.probe' - ) - cmd = 'partprobe {0}'.format(device) + salt '*' partition.probe /dev/sda /dev/sdb + ''' + salt.utils.kwargs_warn_until(kwargs, 'Beryllium') + if 'device' in kwargs: + devices = tuple([kwargs['device']] + list(devices)) + del(kwargs['device']) + if kwargs: + raise(TypeError, "probe() takes no keyword arguments") + cmd = 'partprobe -- {0}'.format(" ".join(devices)) out = __salt__['cmd.run'](cmd).splitlines() return out @@ -92,6 +97,11 @@ def part_list(device, unit=None): salt '*' partition.part_list /dev/sda unit=s salt '*' partition.part_list /dev/sda unit=kB ''' + salt.utils.warn_until( + 'Beryllium', + '''The \'part_list\' function has been deprecated in favor of + \'list_\'. Please update your code and configs to reflect this.''') + return list_(device, unit)
Some salt.modules.parted housekeeping * cleaned up call signature of probe to support multiple devices * deprecated device keyword to probe function and added appropriate warnings * added deprecation warning to part_list
saltstack_salt
train
142a86a1b3c16c1828f34348a504045e43fd3af2
diff --git a/rubocop-todo.yml b/rubocop-todo.yml index <HASH>..<HASH> 100644 --- a/rubocop-todo.yml +++ b/rubocop-todo.yml @@ -150,11 +150,6 @@ SpaceInsideBlockBraces: StringLiterals: Enabled: false -# Offense count: 2 -# Cop supports --auto-correct. -TrailingBlankLines: - Enabled: false - # Offense count: 27 # Configuration parameters: EnforcedStyleForMultiline, SupportedStyles. TrailingComma: diff --git a/spec/nist_example_spec.rb b/spec/nist_example_spec.rb index <HASH>..<HASH> 100644 --- a/spec/nist_example_spec.rb +++ b/spec/nist_example_spec.rb @@ -175,5 +175,3 @@ describe 'NIST P-521 examples' do it_behaves_like 'NIST example' end - - diff --git a/spec/verify_spec.rb b/spec/verify_spec.rb index <HASH>..<HASH> 100644 --- a/spec/verify_spec.rb +++ b/spec/verify_spec.rb @@ -58,4 +58,3 @@ describe ECDSA do end end -
rubocop: Removed trailing blank lines.
DavidEGrayson_ruby_ecdsa
train
462bcc3f65fcf02e10a4d874b030a3c952ac8a65
diff --git a/ipyvolume/headless.py b/ipyvolume/headless.py index <HASH>..<HASH> 100644 --- a/ipyvolume/headless.py +++ b/ipyvolume/headless.py @@ -6,11 +6,11 @@ Assuming osx, define the following aliases for convenience, and start in headles $ chrome --remote-debugging-port=9222 --headless Make sure you have `PyChromeDevTools` installed:: - + $ pip install PyChromeDevTools Now run the following snippet (doesn't have to be from the Jupyter notebook) :: - + import ipyvolume as ipv ipv.examples.klein_bottle() ipv.view(10,30) @@ -20,10 +20,14 @@ Now run the following snippet (doesn't have to be from the Jupyter notebook) :: """ import os +import time import subprocess +import numpy as np import PyChromeDevTools -from . import pylab + +from ipyvolume import pylab + def _get_browser(): options = [] @@ -48,9 +52,6 @@ def _screenshot_data(html_filename, timeout_seconds=10, output_widget=None, form chrome.Network.enable() chrome.Page.enable() chrome.Page.navigate(url=html_filename) - import time - #time.sleep(2) - # loadEventFired chrome.wait_event("Page.frameStoppedLoading", timeout=60) chrome.wait_event("Page.loadEventFired", timeout=60) time.sleep(0.5) @@ -69,11 +70,10 @@ def _screenshot_data(html_filename, timeout_seconds=10, output_widget=None, form raise def _main(): - import numpy as np print(_get_browser()) pylab.figure() pylab.scatter(*np.random.random((3,100))) pylab.savefig('test.png', headless=True) if __name__ == "__main__": - _main() \ No newline at end of file + _main()
PEP8 import structure in headless.py
maartenbreddels_ipyvolume
train
1c759000ff21dce6c34f3f619894ae94e252969d
diff --git a/src/com/google/javascript/jscomp/lint/CheckNullableReturn.java b/src/com/google/javascript/jscomp/lint/CheckNullableReturn.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/lint/CheckNullableReturn.java +++ b/src/com/google/javascript/jscomp/lint/CheckNullableReturn.java @@ -69,13 +69,18 @@ public class CheckNullableReturn implements HotSwapCompilerPass, NodeTraversal.C this.compiler = compiler; } + public static boolean hasReturnDeclaredNullable(Node n) { + return n.isBlock() && n.hasChildren() && isReturnTypeNullable(n.getParent()) + && !hasSingleThrow(n); + } + @Override public void visit(NodeTraversal t, Node n, Node parent) { // Do the checks when 'n' is the block node and 'parent' is the function // node, so that getControlFlowGraph will return the graph inside // the function, rather than the graph of the enclosing scope. - if (n.isBlock() && n.hasChildren() && isReturnTypeNullable(parent) - && !hasSingleThrow(n) && !canReturnNull(t.getControlFlowGraph())) { + if (hasReturnDeclaredNullable(n) + && !canReturnNull(t.getControlFlowGraph())) { String fnName = NodeUtil.getNearestFunctionName(parent); if (fnName != null && !fnName.isEmpty()) { compiler.report(t.makeError(parent, NULLABLE_RETURN_WITH_NAME, fnName)); @@ -122,7 +127,7 @@ public class CheckNullableReturn implements HotSwapCompilerPass, NodeTraversal.C /** * @return True if the given ControlFlowGraph could return null. */ - private static boolean canReturnNull(ControlFlowGraph<Node> graph) { + public static boolean canReturnNull(ControlFlowGraph<Node> graph) { CheckPathsBetweenNodes<Node, ControlFlowGraph.Branch> test = new CheckPathsBetweenNodes<Node, ControlFlowGraph.Branch>( graph,
Allow CheckNullableReturn check to be run from outside the pass. ------------- Created by MOE: <URL>
google_closure-compiler
train
8e1d2f0c1471b40769c078ce052161f82527f9d9
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -60,7 +60,7 @@ function core(type, pattern, token, callback) { return; } - if (typeof token !== 'string') { + if (token !== true && typeof token !== 'string') { errs.type('expect `token` be string', callback); return; } @@ -70,7 +70,7 @@ function core(type, pattern, token, callback) { return; } - var opts = { + var opts = token === true ? undefined : { headers: { 'Authorization': 'token ' + token }
hack for token logic in tests/travis
tunnckoCore_online-branch-exist
train
2e171b97f719bf7218ca9e04a6c34b693dac297f
diff --git a/shared/chat/conversation/list.desktop.js b/shared/chat/conversation/list.desktop.js index <HASH>..<HASH> 100644 --- a/shared/chat/conversation/list.desktop.js +++ b/shared/chat/conversation/list.desktop.js @@ -24,6 +24,7 @@ type State = { class ConversationList extends Component<void, Props, State> { _cellCache: any; _cellMeasurer: any; + _list: any; state: State; constructor (props: Props) { @@ -47,7 +48,7 @@ class ConversationList extends Component<void, Props, State> { // minus one because loader message is there const messageIndex = index - 1 const message = this.state.messages.get(messageIndex) - const id = message && message.messageID + const id = message && (message.outboxID || message.messageID) if (id == null) { console.warn('id is null for index:', messageIndex) } @@ -74,14 +75,29 @@ class ConversationList extends Component<void, Props, State> { componentWillReceiveProps (nextProps: Props) { // If we're not scrolling let's update our internal messages if (!this.state.isScrolling) { + this._invalidateChangedMessages() this.setState({ messages: nextProps.messages, }) } } + _toRemeasure = [] + + _invalidateChangedMessages () { + this.state.messages.forEach((item, index) => { + if (item.messageID !== this.props.messages.get(index, {}).messageID) { + console.warn('resetting ' + index, item, this.props.messages.get(index, {})) + // this._cellMeasurer.() // ForRow(index + 1) + // this._list.recomputeRowHeights(index + 1) + this._toRemeasure.push(index + 1) + } + }) + } + _onScrollSettled = _.debounce(() => { // If we've stopped scrolling let's update our internal messages + this._invalidateChangedMessages() this.setState({ isScrolling: false, ...(this.state.messages !== this.props.messages ? {messages: this.props.messages} : null), @@ -120,10 +136,15 @@ class ConversationList extends Component<void, Props, State> { } render () { - const countWithLoading = this.state.messages.size + 1 // Loading row on top always + const countWithLoading = this.state.messages.count() + 1 // Loading row on top always let scrollToIndex = this.state.isLockedToBottom ? countWithLoading - 1 : undefined let scrollTop = scrollToIndex ? undefined : this.state.scrollTop + this._toRemeasure.forEach(index => this._cellMeasurer.resetMeasurementForRow(index)) + this._toRemeasure = [] + + console.warn('last') + console.warn(this.state.messages.last() && this.state.messages.last().messageState) return ( <Box style={{...globalStyles.flexBoxColumn, flex: 1}}> <AutoSizer> @@ -137,6 +158,7 @@ class ConversationList extends Component<void, Props, State> { {({getRowHeight}) => ( <List height={height} + ref={r => { this._list = r }} width={width} onScroll={this._onScroll} scrollTop={scrollTop}
attempts at fixing chat pending->state not re-rendering
keybase_client
train
5917a784be944903c12f2ce42d57665dbc608904
diff --git a/parser/lexer.go b/parser/lexer.go index <HASH>..<HASH> 100644 --- a/parser/lexer.go +++ b/parser/lexer.go @@ -516,8 +516,14 @@ func startWithNumber(s *Scanner) (tok int, pos Pos, lit string) { func startWithDot(s *Scanner) (tok int, pos Pos, lit string) { pos = s.r.pos() s.r.inc() + save := s.r.pos() if isDigit(s.r.peek()) { - return s.scanFloat(&pos) + tok, _, lit = s.scanFloat(&pos) + if s.r.eof() || unicode.IsSpace(s.r.peek()) { + return + } + // Fail to parse a float, reset to dot. + s.r.p = save } tok, lit = int('.'), "." return diff --git a/parser/lexer_test.go b/parser/lexer_test.go index <HASH>..<HASH> 100644 --- a/parser/lexer_test.go +++ b/parser/lexer_test.go @@ -107,6 +107,7 @@ func (s *testLexerSuite) TestLiteral(c *C) { {fmt.Sprintf("%c", 0), invalid}, {fmt.Sprintf("t1%c", 0), identifier}, {".*", int('.')}, + {".1_t_1_x", int('.')}, } runTest(c, table) } diff --git a/parser/parser_test.go b/parser/parser_test.go index <HASH>..<HASH> 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -372,6 +372,7 @@ func (s *testParserSuite) TestDBAStmt(c *C) { {`SHOW GRANTS`, true}, {`SHOW GRANTS FOR 'test'@'localhost'`, true}, {`SHOW COLUMNS FROM City;`, true}, + {`SHOW COLUMNS FROM tv189.1_t_1_x;`, true}, {`SHOW FIELDS FROM City;`, true}, {`SHOW TRIGGERS LIKE 't'`, true}, {`SHOW DATABASES LIKE 'test2'`, true},
parser: fix <I>_t_1_x parsed as <I> _t_1_x (#<I>) it should not be parsed as a <I>(float) and _t_1_x(identifier) instead, it should be .(dot) 1_t_1_x(identifier)
pingcap_tidb
train
0f3357bb07e801ea62327872d19913969001dd7a
diff --git a/urivalidate.go b/urivalidate.go index <HASH>..<HASH> 100644 --- a/urivalidate.go +++ b/urivalidate.go @@ -48,7 +48,7 @@ func ParseUrls(baseUrl, redirectUrl string) (retBaseUrl, retRedirectUrl *url.URL // resolve references to base url retBaseUrl = (&url.URL{Scheme: base.Scheme, Host: base.Host, Path: "/"}).ResolveReference(&url.URL{Path: base.Path}) - retRedirectUrl = (&url.URL{Scheme: base.Scheme, Host: base.Host, Path: "/"}).ResolveReference(&url.URL{Path: redirect.Path}) + retRedirectUrl = (&url.URL{Scheme: base.Scheme, Host: base.Host, Path: "/"}).ResolveReference(&url.URL{Path: redirect.Path, RawQuery: redirect.RawQuery}) return } diff --git a/urivalidate_test.go b/urivalidate_test.go index <HASH>..<HASH> 100644 --- a/urivalidate_test.go +++ b/urivalidate_test.go @@ -60,6 +60,12 @@ func TestURIValidate(t *testing.T) { "https://mysafewebsite.com/secure/redirect/\\..\\../\\../evil", "https://mysafewebsite.com/secure/redirect/%5C..%5C../%5C../evil", }, + { + // Query string must be kept + "http://www.google.com/myapp/redir", + "http://www.google.com/myapp/redir?a=1&b=2", + "http://www.google.com/myapp/redir?a=1&b=2", + }, } for _, v := range valid { if realRedirectUri, err := ValidateUri(v[0], v[1]); err != nil {
Per the oauth2 spec, query strings in redirect urls must be kept
openshift_osin
train
166fb929842401bbd4301e74981873d41442e0f7
diff --git a/engo_empty.go b/engo_empty.go index <HASH>..<HASH> 100644 --- a/engo_empty.go +++ b/engo_empty.go @@ -75,8 +75,6 @@ func runLoop(defaultScene Scene, headless bool) { case <-resetLoopTicker: ticker.Stop() ticker = time.NewTicker(time.Duration(int(time.Second) / opts.FPSLimit)) - case <-c: - fallthrough case <-closeGame: ticker.Stop() closeEvent()
Removed fallthrough case since it caused build errors (#<I>)
EngoEngine_engo
train
868c48a1898709017c91d55ba689a39ab3c94989
diff --git a/tests_django/test_settings.py b/tests_django/test_settings.py index <HASH>..<HASH> 100644 --- a/tests_django/test_settings.py +++ b/tests_django/test_settings.py @@ -43,4 +43,9 @@ DATABASES = { } } +# Using the MD5 password hasher improves test performance +PASSWORD_HASHERS = ( + 'django.contrib.auth.hashers.MD5PasswordHasher', +) + USE_TZ = True
Use MD5 password hasher durring Django tests
gunthercox_ChatterBot
train
40df07d7ce3bfb97f29943c21d216f3062b04843
diff --git a/pycdlib/headervd.py b/pycdlib/headervd.py index <HASH>..<HASH> 100644 --- a/pycdlib/headervd.py +++ b/pycdlib/headervd.py @@ -352,7 +352,9 @@ class HeaderVolumeDescriptor(object): corresponds to that identifier. Parameters: + depth - The depth of this directory record in the hierarchy. ident - The identifier to look up in the path table record. + parent_dir_num - The parent_directory number to use. Returns: The PathTableRecord object corresponding to the identifier. ''' @@ -364,6 +366,7 @@ class HeaderVolumeDescriptor(object): index = bisect.bisect_left(self.path_table_records, fake_ptr) if index != len(self.path_table_records) and self.path_table_records[index] == fake_ptr: return self.path_table_records[index] + raise pycdlibexception.PyCdlibInvalidISO("Could not match directory to PTR") def extent_location(self): ''' diff --git a/pycdlib/pycdlib.py b/pycdlib/pycdlib.py index <HASH>..<HASH> 100644 --- a/pycdlib/pycdlib.py +++ b/pycdlib/pycdlib.py @@ -853,7 +853,7 @@ class PyCdlib(object): new_record.parent.ptr.directory_num) ptr.set_dirrecord(new_record) new_record.set_ptr(ptr) - except KeyError: + except pycdlibexception.PyCdlibInvalidISO: # There are some very broken ISOs in the wild # (Windows 98 SE is one of them) that have # directory records for directories without a
Clean things up so we have proper errors.
clalancette_pycdlib
train
bb7dc1f0aad2aa71b74d2054b84e2e30af7df95d
diff --git a/backbone.layoutmanager.js b/backbone.layoutmanager.js index <HASH>..<HASH> 100644 --- a/backbone.layoutmanager.js +++ b/backbone.layoutmanager.js @@ -52,6 +52,11 @@ var LayoutManager = Backbone.View.extend({ Backbone.View.call(this, options); }, + // By default return the View as context. + //serialize: function() { + // return this; + //}, + // This method is used within specific methods to indicate that they should // be treated as asynchronous. This method should only be used within the // render chain, otherwise unexpected behavior may occur. @@ -214,7 +219,7 @@ var LayoutManager = Backbone.View.extend({ if (!insert) { // If the View we are adding has already been rendered, simply inject it // into the parent. - if (manager.hasRendered) { + if (view.hasRendered) { // Apply the partial. options.partial(root.$el, view.$el, root.__manager__, manager); } @@ -290,7 +295,7 @@ var LayoutManager = Backbone.View.extend({ root.delegateEvents(); // Set this View as successfully rendered. - manager.hasRendered = true; + root.hasRendered = true; // Only process the queue if it exists. if (next = manager.queue.shift()) { @@ -579,7 +584,7 @@ var LayoutManager = Backbone.View.extend({ // Iterate over all of the nested View's and remove. root.getViews().each(function(view) { // Force doesn't care about if a View has rendered or not. - if (view.__manager__.hasRendered || force) { + if (view.hasRendered || force) { LayoutManager._removeView(view, force); } }); @@ -765,7 +770,7 @@ var LayoutManager = Backbone.View.extend({ var def = options.deferred(); // Ensure all nested Views are properly scrubbed if re-rendering. - if (manager.hasRendered) { + if (view.hasRendered) { view._removeViews(); }
experimental code for default view template context, and put hasRendered on the instance
tbranyen_backbone.layoutmanager
train
da29457b32dcdf670727d4dc0f2fd322c0c3bfb1
diff --git a/spec/controllers/spree/adyen_redirect_controller_spec.rb b/spec/controllers/spree/adyen_redirect_controller_spec.rb index <HASH>..<HASH> 100644 --- a/spec/controllers/spree/adyen_redirect_controller_spec.rb +++ b/spec/controllers/spree/adyen_redirect_controller_spec.rb @@ -4,6 +4,7 @@ module Spree describe AdyenRedirectController do let(:order) { create(:order_with_line_items, state: "payment") } + context "Adyen HPP Gateway" do def params { "merchantReference"=>"R183301255", @@ -15,6 +16,8 @@ module Spree "merchantSig"=>"erewrwerewrewrwer" } end + subject { spree_get :confirm, params } + let(:payment_method) { Gateway::AdyenHPP.create(name: "Adyen") } before do @@ -23,14 +26,13 @@ module Spree expect(controller).to receive(:payment_method).and_return payment_method end - it "create payment" do - expect { - spree_get :confirm, params - }.to change { Payment.count }.by(1) + it "creates payment" do + expect{ subject }.to change { Payment.count }.by(1) end it "sets payment attributes properly" do - spree_get :confirm, params + subject + payment = Payment.last expect(payment.amount.to_f).to eq order.total.to_f @@ -39,8 +41,7 @@ module Spree end it "redirects to order complete page" do - spree_get :confirm, params - expect(response).to redirect_to spree.order_path(order, :token => order.guest_token) + expect(subject).to redirect_to spree.order_path(order, :token => order.guest_token) end pending "test check signature filter"
Replace reused statements with subject blocks.
StemboltHQ_solidus-adyen
train
9b67195a5cf232f32b8ec4083e6c3e1cc35c294a
diff --git a/README.md b/README.md index <HASH>..<HASH> 100755 --- a/README.md +++ b/README.md @@ -1,7 +1,6 @@ <!-- START doctoc generated TOC please keep comment here to allow auto update --> <!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE --> - - [Column sorting for Laravel 5.*](#column-sorting-for-laravel-5) - [Setup](#setup) - [Composer](#composer) @@ -25,13 +24,13 @@ <!-- END doctoc generated TOC please keep comment here to allow auto update --> -# Column sorting for Laravel 5.* +# Column sorting for Laravel 5.5 [![Latest Version](https://img.shields.io/github/release/Kyslik/column-sortable.svg?style=flat-square)](https://github.com/Kyslik/column-sortable/releases) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat-square)](LICENSE.md) [![Total Downloads](https://img.shields.io/packagist/dt/Kyslik/column-sortable.svg?style=flat-square)](https://packagist.org/packages/Kyslik/column-sortable) -[![Build Status](https://travis-ci.org/Kyslik/column-sortable.svg?branch=master)](https://travis-ci.org/Kyslik/column-sortable) +[![Build Status](https://travis-ci.org/Kyslik/column-sortable.svg?branch=L5.5)](https://travis-ci.org/Kyslik/column-sortable) -Package for handling column sorting in Laravel 5.[1-4]. +Package for handling column sorting in Laravel 5.[1-5]. Simply put: [this hack](http://hack.swic.name/laravel-column-sorting-made-easy/) in package with blade extension and Font Awesome icon support. @@ -55,6 +54,12 @@ Pull this package in through Composer (development/latest version `dev-master`) >**Note**: Major and minor versions should match with Laravel, build versions are separate from Laravel versioning scheme. Example: If you are using Laravel 5.2, column-sortable version should be `5.2.*`. +### Laravel's 5.5 auto discovery + +Currently WIP. + +### Manual installation + Add the package to your application service providers in `config/app.php` ``` @@ -68,6 +73,7 @@ Add the package to your application service providers in `config/app.php` Kyslik\ColumnSortable\ColumnSortableServiceProvider::class, ], ``` + ## Publish configuration Publish the package configuration file to your application. @@ -107,7 +113,7 @@ You're set to go. ## Blade Extension -There is one blade extension for you to use **@sortablelink()** +There is a blade extension for you to use **@sortablelink()** ``` @sortablelink('column', 'Title', ['parameter' => 'smile']) @@ -160,6 +166,8 @@ Install [Font-Awesome](https://github.com/FortAwesome/Font-Awesome) for visual [ ## Full Example +>**Note**: you may find useful [working example repository](https://github.com/Kyslik/column-sortable-example) + ### Routes ``` diff --git a/src/ColumnSortable/Sortable.php b/src/ColumnSortable/Sortable.php index <HASH>..<HASH> 100755 --- a/src/ColumnSortable/Sortable.php +++ b/src/ColumnSortable/Sortable.php @@ -24,7 +24,7 @@ trait Sortable */ public function scopeSortable($query, $defaultSortParameters = null) { - if (Request::has('sort') && Request::has('order')) { + if (Request::filled('sort') && Request::filled('order')) { return $this->queryOrderBuilder($query, Request::only(['sort', 'order'])); } elseif ( ! is_null($defaultSortParameters)) { $defaultSortArray = $this->formatToSortParameters($defaultSortParameters); diff --git a/tests/ColumnSortableTraitTest.php b/tests/ColumnSortableTraitTest.php index <HASH>..<HASH> 100644 --- a/tests/ColumnSortableTraitTest.php +++ b/tests/ColumnSortableTraitTest.php @@ -55,9 +55,6 @@ class ColumnSortableTraitTest extends \Orchestra\Testbench\TestCase } - - - public function testSortableWithRequestParameters() { $usersTable = $this->user->getTable(); @@ -125,11 +122,12 @@ class ColumnSortableTraitTest extends \Orchestra\Testbench\TestCase $query = $this->comment->newQuery()->with(['parent']); $relation = $query->getRelation('parent'); $resultQuery = $this->invokeMethod($this->comment, 'queryJoinBuilder', [$query, $relation]); - $expectedQuery = - $this->comment->newQuery()->from('comments as parent_comments')->select('parent_comments.*')->join('comments', 'parent_comments.parent_id', '=', 'comments.id'); + $expectedQuery = $this->comment->newQuery()->from('comments as parent_comments')->select('parent_comments.*') + ->join('comments', 'parent_comments.parent_id', '=', 'comments.id'); $this->assertEquals($expectedQuery->toSql(), $resultQuery->toSql()); } - + + /** * Call protected/private method of a class. * @@ -370,6 +368,7 @@ class Profile extends Model */ class Comment extends Model { + use \Kyslik\ColumnSortable\Sortable; /** @@ -378,7 +377,7 @@ class Comment extends Model public $sortable = [ 'body', 'created_at', - 'updated_at' + 'updated_at', ];
Update package for Laravel <I>
Kyslik_column-sortable
train
dc9a192ed7cdb93de88d7b45befddaa29a8719b0
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,8 +9,105 @@ setup( url="http://github.com/daviesjamie/spout", license="MIT", description="A simple framework that makes it easy to work with data streams in Python.", - long_description=open('README.md').read(), install_requires=[ "twython >= 3.1.2" ], + long_description=""" +Spout +==== + +Spout is a small and simple framework that makes it easy to work with data +streams in Python. In particular, Spout was designed with the processing and +consumption of live data sources in mind. + + +How it works +------------ + +At the heart of Spout is the concept of a Stream (which is defined in an +abstract `Stream` class). This defines the basic operations that can be +performed upon a data stream: + +Mapping + The items in one stream can me "mapped" to another stream. This is done by + applying a supplied `Function` to each item in the input stream, to produce + another output stream. + + stream.map(Function) + +Filtering + The items in a stream can be "filtered", so that the resultant stream only + contains items that match a given criteria. This is done by using a supplied + `Predicate` to test each item in the input stream, and copies it to the output + stream if it passes the test criteria. + + stream.filter(Predicate) + +Processing (Consuming) + The items in a stream are used in some calculations or functionality that + provides no further output to the stream. This is done by applying the supplied + Operation to each item in the stream. + + stream.for_each(Operation) + + +Usage +----- + +To use Spout, you first need to create an input data stream. A data stream is simply an +instantiation of a Stream or any of its children (which can be found in the +streams.py file). The Stream class has been specifically designed so that it +is easy to extend and wrap around currently existing data sources that you might +have, such as files or databases. + +Some existing examples of stream data sources can by found in sources.py. + +For example, to create a Stream out of the lines in a plain text file: + + from spout.sources import FileInputStream + s = FileInputStream("test.txt") + +Now that you have your data in a stream, you simply have to process it! This can +be done by creating and using your own Functions, Predicates or Operations +(see above). + +For example, to print out all the lines in a text file that start with a digit, +but with the digit stripped, we can create our own Predicate and Function +and pass these to the .filter() and .map() functions: + + from spout.sources import FileInputStream + from spout.structs import Function, Predicate + from spout.utils import PrintOperation + + + class StartsWithDigit(Predicate): + def test(self, obj): + return obj[0].is_digit() + + + class StripFirstChar(Function): + def apply(self, input): + return input[1:] + + + s = FileInputStream("test.txt") + s \\ + .filter(StartsWithDigit()) \\ + .map(StripFirstChar()) \\ + .for_each(PrintOperation()) + + +Installation +------------ + +Spout is available in the Python Package Index (PyPI), and so the easiest way to +install it is through `pip`: + + $ pip install spout + +However, it is also possible to install the repository from the source, through +the `setup.py` utility: + + $ python setup.py install +""", )
Manually copied README into long description.
daviesjamie_spout
train
d6ec104829d30c611ae42c12c6efda5cea7c76c2
diff --git a/examples/create_invite_link.py b/examples/create_invite_link.py index <HASH>..<HASH> 100644 --- a/examples/create_invite_link.py +++ b/examples/create_invite_link.py @@ -1,7 +1,6 @@ import telebot from time import sleep, time -from telebot.types import InlineKeyboardMarkup as ikm #Only for creating Inline Buttons, not necessary for creating Invite Links -from telebot.types import InlineKeyboardButton as ikb #Only for creating Inline Buttons, not necessary for creating Invite Links +from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton #Only for creating Inline Buttons, not necessary for creating Invite Links Token = "api_token" #Your Bot Access Token Group_ID = -1234567890 #Group ID for which invite link is to be created @@ -19,8 +18,8 @@ def newmember(msg): #Create an invite link class that contains info about the created invite link using create_chat_invite_link() with parameters invite = bot.create_chat_invite_link(Group_ID, member_limit=1, expire_date=int(time())+45) #Here, the link will auto-expire in 45 seconds InviteLink = invite.invite_link #Get the actual invite link from 'invite' class - mrkplink = ikm() #Created Inline Markup Keyboard - mrkplink.add(ikb("Join our group 🚀", url=InviteLink)) #Added Invite Link to Inline Markup Keyboard + mrkplink = InlineKeyboardMarkup() #Created Inline Keyboard Markup + mrkplink.add(InlineKeyboardButton("Join our group 🚀", url=InviteLink)) #Added Invite Link to Inline Keyboard bot.send_message(msg.chat.id, f"Hey there {msg.from_user.first_name}, Click the link below to join our Official Group." reply_markup=mrkplink) #This will send a message with the newly-created invite link as markup button.
Update create_invite_link.py Imported both inline keyboards in same line:|
eternnoir_pyTelegramBotAPI
train
8b46dd6e544947d11bb83a757c4c54803e8c2f30
diff --git a/dev/com.ibm.ws.org.apache.myfaces.2.3/src/org/apache/myfaces/context/servlet/ServletExternalContextImpl.java b/dev/com.ibm.ws.org.apache.myfaces.2.3/src/org/apache/myfaces/context/servlet/ServletExternalContextImpl.java index <HASH>..<HASH> 100644 --- a/dev/com.ibm.ws.org.apache.myfaces.2.3/src/org/apache/myfaces/context/servlet/ServletExternalContextImpl.java +++ b/dev/com.ibm.ws.org.apache.myfaces.2.3/src/org/apache/myfaces/context/servlet/ServletExternalContextImpl.java @@ -83,7 +83,7 @@ public final class ServletExternalContextImpl extends ServletExternalContextImpl * Indicates the port used for websocket connections. */ @JSFWebConfigParam(since = "2.3") - public static final java.lang.String WEBSOCKET_PORT = "javax.faces.WEBSOCKET_PORT"; + public static final java.lang.String WEBSOCKET_ENDPOINT_PORT = "javax.faces.WEBSOCKET_ENDPOINT_PORT"; private ServletRequest _servletRequest; private ServletResponse _servletResponse; @@ -462,7 +462,7 @@ public final class ServletExternalContextImpl extends ServletExternalContextImpl checkNull(url, "url"); FacesContext facesContext = getCurrentFacesContext(); Integer port = WebConfigParamUtils.getIntegerInitParameter( - getCurrentFacesContext().getExternalContext(), WEBSOCKET_PORT); + getCurrentFacesContext().getExternalContext(), WEBSOCKET_ENDPOINT_PORT); port = (port == 0) ? null : port; if (port != null && !port.equals(facesContext.getExternalContext().getRequestServerPort()))
Issue #<I>: update websocket port param name
OpenLiberty_open-liberty
train
ded1ee715c4e139677ef3985701846a24fe148c4
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -205,7 +205,12 @@ Engines.prototype.decorate = function(engine) { } helpers = mergeHelpers(engine, opts); - var data = utils.merge({}, locals, helpers); + var data = {}; + if (opts && typeof opts.mergeFn === 'function') { + data = otps.mergeFn(helpers, locals); + } else { + data = utils.extend({}, locals, helpers); + } if (typeof cb !== 'function') { return renderSync(str, data); diff --git a/utils.js b/utils.js index <HASH>..<HASH> 100644 --- a/utils.js +++ b/utils.js @@ -1,4 +1,5 @@ var lazy = require('lazy-cache')(require); +lazy('extend-shallow', 'extend'); lazy('mixin-deep', 'merge'); lazy('async-helpers', 'AsyncHelpers'); lazy('helper-cache', 'Helpers');
fixes recursion bug from deep merging
jonschlinkert_engine-cache
train
326cc0ef32cedb0a070877eb6da966bf27af1f08
diff --git a/src/request_handlers/webelement_request_handler.js b/src/request_handlers/webelement_request_handler.js index <HASH>..<HASH> 100644 --- a/src/request_handlers/webelement_request_handler.js +++ b/src/request_handlers/webelement_request_handler.js @@ -99,19 +99,35 @@ ghostdriver.WebElementReqHand = function(id, session) { _getNameCommand = function(req, res) { var result = _session.getCurrentWindow().evaluate( - require("./webdriver_atoms.js").get("execute_script"), - "return arguments[0].tagName;", - [_getJSON()]); - // N.B. must convert value to a lowercase string as per WebDriver JSONWireProtocol spec - if(result.status === 0) result.value = result.value.toLowerCase(); + require("./webdriver_atoms.js").get("execute_script"), + "return arguments[0].tagName;", + [_getJSON()]); + + // Convert value to a lowercase string as per WebDriver JSONWireProtocol spec + // @see http://code.google.com/p/selenium/wiki/JsonWireProtocol#/session/:sessionId/element/:id/name + if(result.status === 0) { + result.value = result.value.toLowerCase(); + } + res.respondBasedOnResult(_session, req, result); }, _getAttributeCommand = function(req, res) { - var attributeValueAtom = require("./webdriver_atoms.js").get("get_attribute_value"); - var attributeName = req.urlParsed.file; - var response = _session.getCurrentWindow().evaluate(attributeValueAtom, _getJSON(), attributeName); - res.respondBasedOnResult(_session, req, response); + var attributeValueAtom = require("./webdriver_atoms.js").get("get_attribute_value"), + result; + + if (typeof(req.urlParsed.file) !== "undefined") { + // Read the attribute + result = _session.getCurrentWindow().evaluate( + attributeValueAtom, // < Atom to read an attribute + _getJSON(), // < Element to read from + req.urlParsed.file); // < Attribute to read + + res.respondBasedOnResult(_session, req, result); + return; + } + + throw _errors.createInvalidReqMissingCommandParameterEH(req); }, _submitCommand = function(req, res) {
Straightened coding style and added error handling for "element/:id/attribute/:name".
detro_ghostdriver
train
ce6ab847a8f6971c5a1c353a0430a9899c799405
diff --git a/Source/Persistence/DbRepositoryBase.php b/Source/Persistence/DbRepositoryBase.php index <HASH>..<HASH> 100644 --- a/Source/Persistence/DbRepositoryBase.php +++ b/Source/Persistence/DbRepositoryBase.php @@ -156,6 +156,8 @@ abstract class DbRepositoryBase implements IObjectSetWithPartialLoadSupport */ public function loadPartial(IPartialLoadCriteria $criteria) { + $criteria->verifyOfClass($this->getObjectType()); + $aliasPropertyNameMap = $criteria->getAliasNestedPropertyNameMap(); $readModelMapper = new ArrayReadModelMapper( $this->mapper,
Add check for class type on load partial criteria
dms-org_core
train
89a3827a11df639a1c079f11a7f70d153e39adc9
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,10 @@ installed to use this. Try `gem install haml`. If you use ## Options +### haml path +Specify where the haml executable is. Defaults to just `haml` if not provided. +`{hamlPath: '/path/to/haml'}` + ### style Output style. Can be indented (default) or ugly. `{style: ugly}` diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -32,7 +32,13 @@ module.exports = function(opt) { options.outExtension = options.outExtension || '.html'; var fileContents = file.contents.toString('utf8'); - var args = ['haml', '-s']; // read from stdin + var args = []; + if (options.hamlPath) { + args.push(options.hamlPath); + } else { + args.push('haml'); + } + args.push('-s'); // read from stdin if (options.trace) { args.push('--trace'); } @@ -92,7 +98,10 @@ module.exports = function(opt) { cp.on('error', function(err) { var message = err; if (err.code === 'ENOENT') { - var isHaml = err.syscall === 'spawn haml' || err.path === 'haml'; + var isHaml = + err.path === 'haml' && typeof options.hamlPath === 'undefined' || + err.path === options.hamlPath && + typeof options.hamlPath === 'string'; if (isHaml) { message = noHamlError; } diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -121,7 +121,7 @@ describe('Haml conversion without haml installed', function() { it('says to install haml', function(done) { var gotError = false; gulp.src(inPath). - pipe(haml().on('error', function(e) { + pipe(haml({hamlPath: './no-haml-here'}).on('error', function(e) { assert(e.message.indexOf(expected) > -1); gotError = true; done();
Add hamlPath option, tests pass
cheshire137_gulp-ruby-haml
train
999408b3f356cf49c831db98aaaae68c7c2e0942
diff --git a/src/test/java/com/mangofactory/swagger/spring/DocumentationRootTest.java b/src/test/java/com/mangofactory/swagger/spring/DocumentationRootTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/mangofactory/swagger/spring/DocumentationRootTest.java +++ b/src/test/java/com/mangofactory/swagger/spring/DocumentationRootTest.java @@ -2,6 +2,7 @@ package com.mangofactory.swagger.spring; import com.mangofactory.swagger.spring.controller.DocumentationController; import com.mangofactory.swagger.spring.sample.configuration.ServicesTestConfiguration; +import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -20,6 +21,7 @@ import static org.hamcrest.core.IsEqual.*; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; /** * Tests that exercise the documentation @@ -80,12 +82,11 @@ public class DocumentationRootTest { } @Test - @Ignore public void shouldHaveCorrectPathForBusinessServiceController() throws Exception { - mockMvc.perform(builder) - .andExpect(jsonPath("$.apiVersion").exists()) - .andExpect(jsonPath("$.apis[0].path").value(equalTo("/api-docs/business-service"))) - .andExpect(jsonPath("$.apis[0].description").value(equalTo(BUSINESS_ENTITY_SERVICES))); + String apiDescriptionSelector = "$.apis[?(@.path=='/api-docs/business-service')]"; + mockMvc.perform(builder) + .andExpect(jsonPath("$.apiVersion").exists()) + .andExpect(jsonPath(apiDescriptionSelector + ".description", Matchers.hasSize(1))); } @Test
Removing Ignore and making test deterministic
springfox_springfox
train
16ea1b54117a2c7247a3c14ae74a75f9f3809dcd
diff --git a/lib/sidekiq-unique-jobs.rb b/lib/sidekiq-unique-jobs.rb index <HASH>..<HASH> 100644 --- a/lib/sidekiq-unique-jobs.rb +++ b/lib/sidekiq-unique-jobs.rb @@ -4,6 +4,8 @@ require 'sidekiq_unique_jobs/middleware' require 'sidekiq_unique_jobs/version' require 'sidekiq_unique_jobs/config' require 'sidekiq_unique_jobs/payload_helper' +require 'sidekiq_unique_jobs/sidekiq_unique_ext' + require 'ostruct' module SidekiqUniqueJobs
Require sidekiq unique extensions and sidekiq/api
mhenrixon_sidekiq-unique-jobs
train