hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
0b2dca96ee5a4947264062ac5793690b87600731
diff --git a/cellbase-lib/src/main/java/org/opencb/cellbase/lib/builders/RefSeqGeneBuilder.java b/cellbase-lib/src/main/java/org/opencb/cellbase/lib/builders/RefSeqGeneBuilder.java index <HASH>..<HASH> 100644 --- a/cellbase-lib/src/main/java/org/opencb/cellbase/lib/builders/RefSeqGeneBuilder.java +++ b/cellbase-lib/src/main/java/org/opencb/cellbase/lib/builders/RefSeqGeneBuilder.java @@ -47,6 +47,8 @@ public class RefSeqGeneBuilder extends CellBaseBuilder { private Transcript transcript = null; private Set<Xref> exonDbxrefs = new HashSet<>(); private Set<Xref> geneDbxrefs = new HashSet<>(); + // sometimes there are two stop codons (eg NM_018159.4). Only parse the first one, skip the second + private boolean seenStopCodon = false; public RefSeqGeneBuilder(Path refSeqDirectoryPath, SpeciesConfiguration speciesConfiguration, CellBaseSerializer serializer) { super(serializer); @@ -115,10 +117,13 @@ public class RefSeqGeneBuilder extends CellBaseBuilder { parseCDS(gtf, indexer); break; case "start_codon": - //parseStartCodon(gtf); + seenStopCodon = false; break; case "stop_codon": - parseStopCodon(gtf); + if (!seenStopCodon) { + parseStopCodon(gtf); + seenStopCodon = true; + } break; default: throw new RuntimeException("Unexpected feature type: " + gtf.getFeature()); @@ -361,10 +366,6 @@ public class RefSeqGeneBuilder extends CellBaseBuilder { Transcript transcript = transcriptDict.get(gtf.getAttributes().get("transcript_id")); String exonId = transcript.getId() + "_" + exonNumber; Exon exon = exonDict.get(exonId); - if (exon == null) { - // exon hasn't been seen. it's a mistake - return; - } if (gtf.getStrand().equals("+")) { // In the positive strand, genomicCodingEnd for the last exon should be the "STOP CODON end" @@ -377,6 +378,24 @@ public class RefSeqGeneBuilder extends CellBaseBuilder { transcript.setCdnaCodingEnd(exon.getCdnaCodingEnd()); transcript.setCdsLength(transcript.getCdnaCodingEnd() - transcript.getCdnaCodingStart()); + // For NM_212554.4, the stop codon is split across the last intron with the first two bases of the codon in exon six + // and the third base of the stop codon in exon seven + if (gtf.getEnd() - gtf.getStart() == 1) { + String nextExonId = transcript.getId() + "_" + (exon.getExonNumber() + 1); + Exon nextExon = exonDict.get(nextExonId); + + nextExon.setGenomicCodingStart(nextExon.getStart()); + nextExon.setGenomicCodingEnd(nextExon.getStart()); + nextExon.setCdnaCodingStart(exon.getCdnaCodingEnd() + 1); + nextExon.setCdnaCodingEnd(exon.getCdnaCodingEnd() + 1); + nextExon.setCdsStart(exon.getCdsEnd() + 1); + nextExon.setCdsEnd(exon.getCdsEnd() + 1); + + transcript.setGenomicCodingEnd(nextExon.getStart()); + transcript.setCdnaCodingEnd(transcript.getCdnaCodingEnd() + 1); + transcript.setCdsLength(transcript.getCdnaCodingEnd() - transcript.getCdnaCodingStart()); + } + } else { // In the negative strand, genomicCodingStart for the first exon should be the "STOP CODON start". exon.setGenomicCodingStart(gtf.getStart()); @@ -387,6 +406,23 @@ public class RefSeqGeneBuilder extends CellBaseBuilder { transcript.setCdnaCodingEnd(exon.getCdnaCodingEnd()); transcript.setCdsLength(transcript.getCdnaCodingEnd() - transcript.getCdnaCodingStart()); + // For NM_212554.4, the stop codon is split across the last intron with the first two bases of the codon in exon six + // and the third base of the stop codon in exon seven + if (gtf.getEnd() - gtf.getStart() == 1) { + String nextExonId = transcript.getId() + "_" + (exon.getExonNumber() + 1); + Exon nextExon = exonDict.get(nextExonId); + + nextExon.setGenomicCodingStart(nextExon.getStart()); + nextExon.setGenomicCodingEnd(nextExon.getStart()); + nextExon.setCdnaCodingStart(exon.getCdnaCodingEnd() + 1); + nextExon.setCdnaCodingEnd(exon.getCdnaCodingEnd() + 1); + nextExon.setCdsStart(exon.getCdsEnd() + 1); + nextExon.setCdsEnd(exon.getCdsEnd() + 1); + + transcript.setGenomicCodingStart(nextExon.getEnd()); + transcript.setCdnaCodingEnd(transcript.getCdnaCodingEnd() + 1); + transcript.setCdsLength(transcript.getCdnaCodingEnd() - transcript.getCdnaCodingStart()); + } } }
parse stop codons that span introns
opencb_cellbase
train
3ebfb227bed7859c51603802928493d9093a4b0a
diff --git a/src/Http/ServerRequestFactory.php b/src/Http/ServerRequestFactory.php index <HASH>..<HASH> 100644 --- a/src/Http/ServerRequestFactory.php +++ b/src/Http/ServerRequestFactory.php @@ -49,6 +49,7 @@ abstract class ServerRequestFactory implements ServerRequestFactoryInterface * @param array $body $_POST superglobal * @param array $cookies $_COOKIE superglobal * @param array $files $_FILES superglobal + * @param string $input php://input Used to stub request streams in testing. * @return \Cake\Http\ServerRequest * @throws \InvalidArgumentException for invalid file values */ @@ -57,7 +58,8 @@ abstract class ServerRequestFactory implements ServerRequestFactoryInterface ?array $query = null, ?array $body = null, ?array $cookies = null, - ?array $files = null + ?array $files = null, + ?string $input = null ): ServerRequest { $server = normalizeServer($server ?: $_SERVER); $uri = static::createUri($server); @@ -79,6 +81,7 @@ abstract class ServerRequestFactory implements ServerRequestFactoryInterface 'base' => $uri->base, 'session' => $session, 'mergeFilesAsObjects' => Configure::read('App.uploadedFilesAsObjects', true), + 'input' => $input, ]); return $request; diff --git a/src/TestSuite/IntegrationTestTrait.php b/src/TestSuite/IntegrationTestTrait.php index <HASH>..<HASH> 100644 --- a/src/TestSuite/IntegrationTestTrait.php +++ b/src/TestSuite/IntegrationTestTrait.php @@ -592,19 +592,6 @@ trait IntegrationTestTrait } parse_str($query, $queryData); - $props = [ - 'url' => $url, - 'session' => $session, - 'query' => $queryData, - 'files' => [], - ]; - if (is_string($data)) { - $props['input'] = $data; - } else { - $data = $this->_addTokens($tokenUrl, $data); - $props['post'] = $this->_castToString($data); - } - $props['cookies'] = $this->_cookie; $env = [ 'REQUEST_METHOD' => $method, @@ -627,7 +614,28 @@ trait IntegrationTestTrait } unset($this->_request['headers']); } - $props['environment'] = $env; + $props = [ + 'url' => $url, + 'session' => $session, + 'query' => $queryData, + 'files' => [], + 'environment' => $env, + ]; + + if (is_string($data)) { + $props['input'] = $data; + } elseif ( + is_array($data) && + isset($props['environment']['CONTENT_TYPE']) && + $props['environment']['CONTENT_TYPE'] === 'application/x-www-form-urlencoded' + ) { + $props['input'] = http_build_query($data); + } else { + $data = $this->_addTokens($tokenUrl, $data); + $props['post'] = $this->_castToString($data); + } + + $props['cookies'] = $this->_cookie; $props = Hash::merge($props, $this->_request); return $props; diff --git a/src/TestSuite/MiddlewareDispatcher.php b/src/TestSuite/MiddlewareDispatcher.php index <HASH>..<HASH> 100644 --- a/src/TestSuite/MiddlewareDispatcher.php +++ b/src/TestSuite/MiddlewareDispatcher.php @@ -162,17 +162,11 @@ class MiddlewareDispatcher $spec['query'], $spec['post'], $spec['cookies'], - $spec['files'] + $spec['files'], + $spec['input'] ?? null ); $request = $request->withAttribute('session', $spec['session']); - if (isset($spec['input'])) { - $stream = new Stream('php://memory', 'rw'); - $stream->write($spec['input']); - $stream->rewind(); - $request = $request->withBody($stream); - } - return $request; } diff --git a/tests/TestCase/TestSuite/IntegrationTestTraitTest.php b/tests/TestCase/TestSuite/IntegrationTestTraitTest.php index <HASH>..<HASH> 100644 --- a/tests/TestCase/TestSuite/IntegrationTestTraitTest.php +++ b/tests/TestCase/TestSuite/IntegrationTestTraitTest.php @@ -468,6 +468,24 @@ class IntegrationTestTraitTest extends TestCase } /** + * Test that the PSR7 requests receive put data + * + * @return void + */ + public function testPutDataFormUrlEncoded() + { + $this->configRequest([ + 'headers' => [ + 'Content-Type' => 'application/x-www-form-urlencoded', + ], + ]); + $this->put('/request_action/post_pass', ['title' => 'value']); + $this->assertResponseOk(); + $data = json_decode('' . $this->_response->getBody()); + $this->assertSame('value', $data->title); + } + + /** * Test that the uploaded files are passed correctly to the request * * @return void
Fix form-urlencoded PUT bodies not working in tests When using application/www-form-urlencoded content type request bodies are expected to be present in the input stream. Propagate the input data through the request factory so that the stream is in the correct state during ServerRequest's constructor. Fixes #<I>
cakephp_cakephp
train
7c98fab9741ba7c1955c437ad30d5173a816152b
diff --git a/NavigationReactNative/sample/zoom/Detail.js b/NavigationReactNative/sample/zoom/Detail.js index <HASH>..<HASH> 100644 --- a/NavigationReactNative/sample/zoom/Detail.js +++ b/NavigationReactNative/sample/zoom/Detail.js @@ -29,7 +29,7 @@ export default ({color, stateNavigator}) => { </SharedElement> <SharedElement name={`text${color}`} - data={{color, fontSize: 80, fontColor: 0, hide: true}} + data={{color, fontSize: 80, fontColor: '#000', hide: true}} stateNavigator={stateNavigator}> <Text style={styles.text}>{color}</Text> </SharedElement> diff --git a/NavigationReactNative/sample/zoom/Grid.js b/NavigationReactNative/sample/zoom/Grid.js index <HASH>..<HASH> 100644 --- a/NavigationReactNative/sample/zoom/Grid.js +++ b/NavigationReactNative/sample/zoom/Grid.js @@ -35,7 +35,7 @@ export default ({stateNavigator}) => { <View> <SharedElement name={`text${color}`} - data={{color, fontSize: 20, fontColor: 255}} + data={{color, fontSize: 20, fontColor: '#fff'}} stateNavigator={stateNavigator}> <Text style={styles.text}>{color}</Text> </SharedElement> diff --git a/NavigationReactNative/sample/zoom/ZoomShared.js b/NavigationReactNative/sample/zoom/ZoomShared.js index <HASH>..<HASH> 100644 --- a/NavigationReactNative/sample/zoom/ZoomShared.js +++ b/NavigationReactNative/sample/zoom/ZoomShared.js @@ -30,7 +30,7 @@ export default ({stateNavigator}) => ( fontSize, textAlign: 'center', fontWeight: 'bold', - color: `rgb(${fontColor},${fontColor},${fontColor})`, + color: fontColor, zIndex: 1, }}> {color}
Used color interpolation from react move
grahammendick_navigation
train
972188693bb73a6886e4d205761b2475b2d52bc4
diff --git a/test/3.x/async_helpers.js b/test/3.x/async_helpers.js index <HASH>..<HASH> 100644 --- a/test/3.x/async_helpers.js +++ b/test/3.x/async_helpers.js @@ -43,18 +43,9 @@ before(function () { }) }) - hbs.registerAsyncHelper('async-with-context', function (context, cb) { - var originalUrl = this.originalUrl - - process.nextTick(function () { - cb('originalUrl: ' + originalUrl) - }) - }) - - var count = 0 - // fake async helper, returns immediately // although a regular helper could have been used we should support this use case + var count = 0 hbs.registerAsyncHelper('fake-async', function (context, cb) { var val = 'instant' + count++ cb(val) diff --git a/test/4.x/async_helpers.js b/test/4.x/async_helpers.js index <HASH>..<HASH> 100644 --- a/test/4.x/async_helpers.js +++ b/test/4.x/async_helpers.js @@ -58,19 +58,9 @@ before(function () { }) }) - // access req data from res.locals - hbs.registerAsyncHelper('async-with-context', function (context, cb) { - var originalUrl = this.originalUrl - - process.nextTick(function () { - cb('originalUrl: ' + originalUrl) - }) - }) - - var count = 0 - // fake async helper, returns immediately // although a regular helper could have been used we should support this use case + var count = 0 hbs.registerAsyncHelper('fake-async', function (context, cb) { var val = 'instant' + count++ cb(val)
tests: clean up async tests
pillarjs_hbs
train
5d6fc2a1352cde90f70aa755d8281aa300d3eff7
diff --git a/lib/ood_core/job/adapters/lsf.rb b/lib/ood_core/job/adapters/lsf.rb index <HASH>..<HASH> 100644 --- a/lib/ood_core/job/adapters/lsf.rb +++ b/lib/ood_core/job/adapters/lsf.rb @@ -174,6 +174,10 @@ module OodCore NodeInfo.new(name: host[:host], procs: host[:slots]) end + # FIXME: estimated_runtime should be set by batch object instead of + dispatch_time = helper.parse_past_time(v[:start_time], ignore_errors: true) + finish_time = helper.parse_past_time(v[:finish_time], ignore_errors: true) + Info.new( id: v[:id], status: get_state(v[:status]), @@ -184,10 +188,10 @@ module OodCore accounting_id: v[:project], procs: nodes.any? ? nodes.map(&:procs).reduce(&:+) : 0, queue_name: v[:queue], - wallclock_time: nil, + wallclock_time: helper.estimate_runtime(current_time: Time.now, start_time: dispatch_time, finish_time: finish_time), cpu_time: nil, submission_time: helper.parse_past_time(v[:submit_time], ignore_errors: true), - dispatch_time: helper.parse_past_time(v[:start_time], ignore_errors: true), + dispatch_time: dispatch_time, native: v ) end diff --git a/lib/ood_core/job/adapters/lsf/helper.rb b/lib/ood_core/job/adapters/lsf/helper.rb index <HASH>..<HASH> 100644 --- a/lib/ood_core/job/adapters/lsf/helper.rb +++ b/lib/ood_core/job/adapters/lsf/helper.rb @@ -38,4 +38,13 @@ class OodCore::Job::Adapters::Lsf::Helper def exec_host_regex @exec_host_regex ||= Regexp.new(/((\d+)\*)?([^:]+)/) end + + # given current time, dispatch time, and finish time values, estimate the + # runtime for a job; this estimate will be accurate if the job never enters a + # suspended state during its execution + def estimate_runtime(current_time:, start_time:, finish_time:) + return nil if start_time.nil? + + (finish_time || current_time) - start_time + end end diff --git a/spec/job/adapters/lsf/helper_spec.rb b/spec/job/adapters/lsf/helper_spec.rb index <HASH>..<HASH> 100644 --- a/spec/job/adapters/lsf/helper_spec.rb +++ b/spec/job/adapters/lsf/helper_spec.rb @@ -83,4 +83,18 @@ describe OodCore::Job::Adapters::Lsf::Helper do expect(helper.parse_exec_host(nil)).to eq([]) end end + + describe "#estimate_runtime" do + it "for running job" do + expect(helper.estimate_runtime(current_time: Time.at(100), start_time: Time.at(10), finish_time: nil)).to eq(90) + end + + it "for completed job" do + expect(helper.estimate_runtime(current_time: Time.at(200), start_time: Time.at(10), finish_time: Time.at(100))).to eq(90) + end + + it "for job not yet started" do + expect(helper.estimate_runtime(current_time: Time.at(100), start_time: nil, finish_time: nil)).to eq(nil) + end + end end diff --git a/spec/job/adapters/lsf_spec.rb b/spec/job/adapters/lsf_spec.rb index <HASH>..<HASH> 100644 --- a/spec/job/adapters/lsf_spec.rb +++ b/spec/job/adapters/lsf_spec.rb @@ -78,6 +78,7 @@ describe OodCore::Job::Adapters::Lsf do # TODO: do we create a complex mock? let(:batch) { double(get_jobs: [job_hash], get_job: job_hash) } + let(:start_time) { Time.local(year, 3, 31, 14, 46, 44) } #FIXME: using the filters to select specific fields, we can ensure that this doesn't change #as LSF::Batch support more attributes @@ -119,13 +120,12 @@ describe OodCore::Job::Adapters::Lsf do :queue_name=>job_hash[:queue], - # TODO: not sure yet exactly what how to determine - :wallclock_time=>nil, - + # estimated run time + :wallclock_time=>Time.now - start_time, # TODO: job_hash[:cpu_used] converted to proper format :cpu_time=>nil, :submission_time=>Time.local(year, 3, 31, 14, 46, 42), - :dispatch_time=>Time.local(year, 3, 31, 14, 46, 44), + :dispatch_time=>start_time, :native=>job_hash ) }
lsf: estimate runtime of job for wallclock_time
OSC_ood_core
train
80b2591cc7d28bd838df01742f8e00fe6873fc1d
diff --git a/basis_set_exchange/__init__.py b/basis_set_exchange/__init__.py index <HASH>..<HASH> 100644 --- a/basis_set_exchange/__init__.py +++ b/basis_set_exchange/__init__.py @@ -8,7 +8,7 @@ basis set information # Just import the basic user API from .api import (get_basis, lookup_basis_by_role, get_metadata, get_reference_data, get_all_basis_names, get_references, get_basis_family, filter_basis_sets, get_families, get_family_notes, get_basis_notes, - get_schema, get_formats, get_reference_formats, get_roles) + get_schema, get_formats, get_reference_formats, get_roles, version) from .bundle import create_bundle # Handle versioneer @@ -17,8 +17,3 @@ versions = get_versions() __version__ = versions['version'] __git_revision__ = versions['full-revisionid'] del get_versions, versions - - -def version(): - '''Obtain the version of the basis set exchange library''' - return __version__ diff --git a/basis_set_exchange/api.py b/basis_set_exchange/api.py index <HASH>..<HASH> 100644 --- a/basis_set_exchange/api.py +++ b/basis_set_exchange/api.py @@ -2,7 +2,6 @@ Main interface to Basis Set Exchange functionality ''' -import datetime import os import textwrap @@ -16,6 +15,15 @@ from . import refconverters from . import references from . import misc from . import lut +from ._version import get_versions + +__version__ = get_versions()['version'] + + +def version(): + '''Obtain the version of the basis set exchange library (as a string)''' + return __version__ + # Determine the path to the data directory that is part # of this installation @@ -51,15 +59,12 @@ def _header_string(basis_dict): Information includes description, revision, etc, but not references ''' - dt = datetime.datetime.utcnow() - timestamp = dt.strftime('%Y-%m-%d %H:%M:%S UTC') - tw = textwrap.TextWrapper(initial_indent='', subsequent_indent=' ' * 20) header = '-' * 70 + '\n' header += ' Basis Set Exchange\n' + header += ' Version ' + version() + '\n' header += ' ' + _main_url + '\n' - header += ' Accessed ' + timestamp + '\n' header += '-' * 70 + '\n' header += ' Basis set: ' + basis_dict['basis_set_name'] + '\n' header += tw.fill(' Description: ' + basis_dict['basis_set_description']) + '\n' diff --git a/doc/usage.rst b/doc/usage.rst index <HASH>..<HASH> 100644 --- a/doc/usage.rst +++ b/doc/usage.rst @@ -22,6 +22,10 @@ Importing All end-user functionality is available by importing the `basis_set_exchange` module. +Determining the library version +------------------------------- + +The library version can be determined with :func:`basis_set_exchange.version()` Getting a basis set -------------------
Replace access timestamp with version string
MolSSI-BSE_basis_set_exchange
train
d2f5d4da3f9dbdf16a317d4462226de3d7f60205
diff --git a/categories/__init__.py b/categories/__init__.py index <HASH>..<HASH> 100644 --- a/categories/__init__.py +++ b/categories/__init__.py @@ -1,7 +1,7 @@ __version_info__ = { 'major': 1, 'minor': 0, - 'micro': 1, + 'micro': 2, 'releaselevel': 'final', 'serial': 1 } @@ -67,7 +67,7 @@ try: from categories import settings from django.core.exceptions import ImproperlyConfigured - from django.db.models import get_model + from django.db.models.loading import get_model for key, value in settings.FK_REGISTRY.items(): model = get_model(*key.split('.'))
Importing get_model directly from the loading module appears to fix certain edge cases. Version bump to <I>
callowayproject_django-categories
train
5a5fe885c2a6dca4f77e8a47e0dcbac83f147335
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "zino", - "version": "2.2.2", + "version": "2.2.3", "description": "Micro component framework", "main": "zino.js", "directories": { diff --git a/src/zino-tester.js b/src/zino-tester.js index <HASH>..<HASH> 100644 --- a/src/zino-tester.js +++ b/src/zino-tester.js @@ -74,11 +74,12 @@ export function matchesSnapshot(...args) { if (previousResult !== resultString) { // create a diff let diffResult = diff(previousResult, resultString); + process.stderr.write('\nComponent ' + fileName + ' - snapshots don\'t match: \n'); diffResult.forEach(part => { let color = part[0] === diff.DELETE ? 'red' : part[0] === diff.INSERT ? 'green' : 'gray'; process.stderr.write(part[1][color]); }); - if (readline.question('\nThe snapshots don\'t match.\nDo you want to take the new one as the reference snapshot (y/N)?') === 'y') { + if (readline.question('\nDo you want to take the new snapshot as the reference snapshot (y/N)?') === 'y') { writeResult(resultString); } else { throw new Error('Snapshots don\'t match.'); diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -954,11 +954,12 @@ function matchesSnapshot() { if (previousResult !== resultString) { // create a diff var diffResult = diff(previousResult, resultString); + process.stderr.write('\nComponent ' + fileName + ' - snapshots don\'t match: \n'); diffResult.forEach(function (part) { var color = part[0] === diff.DELETE ? 'red' : part[0] === diff.INSERT ? 'green' : 'gray'; process.stderr.write(part[1][color]); }); - if (readline.question('\nThe snapshots don\'t match.\nDo you want to take the new one as the reference snapshot (y/N)?') === 'y') { + if (readline.question('\nDo you want to take the new snapshot as the reference snapshot (y/N)?') === 'y') { writeResult(resultString); } else { throw new Error('Snapshots don\'t match.');
fixed a problem with reporting while testing
AndCake_zino
train
c29600155578cfcaf6616a028e12404153253631
diff --git a/src/Http/HttpIntegration.php b/src/Http/HttpIntegration.php index <HASH>..<HASH> 100644 --- a/src/Http/HttpIntegration.php +++ b/src/Http/HttpIntegration.php @@ -24,11 +24,7 @@ class HttpIntegration implements \Rougin\Slytherin\Integration\IntegrationInterf */ public function define(ContainerInterface $container, Configuration $config) { - $cookies = $config->get('app.http.cookies', array()); - $files = $config->get('app.http.files', array()); - $get = $config->get('app.http.get', array()); - $post = $config->get('app.http.post', array()); - $server = $config->get('app.http.server', $this->getSampleServer()); + list($server, $cookies, $get, $files, $post) = $this->getGlobalVariables($config); $request = new \Rougin\Slytherin\Http\ServerRequest($server, $cookies, $get, $files, $post); $response = new \Rougin\Slytherin\Http\Response; @@ -48,6 +44,23 @@ class HttpIntegration implements \Rougin\Slytherin\Integration\IntegrationInterf } /** + * Returns the PHP's global variables. + * + * @param \Rougin\Slytherin\Integration\Configuration $config + * @return array + */ + protected function getGlobalVariables(Configuration $config) + { + $cookies = $config->get('app.http.cookies', array()); + $files = $config->get('app.http.files', array()); + $get = $config->get('app.http.get', array()); + $post = $config->get('app.http.post', array()); + $server = $config->get('app.http.server', $this->getSampleServer()); + + return array($server, $cookies, $get, $files, $post); + } + + /** * Returns a sample $_SERVER values. * * @return array diff --git a/src/Template/RendererIntegration.php b/src/Template/RendererIntegration.php index <HASH>..<HASH> 100644 --- a/src/Template/RendererIntegration.php +++ b/src/Template/RendererIntegration.php @@ -24,13 +24,13 @@ class RendererIntegration implements \Rougin\Slytherin\Integration\IntegrationIn */ public function define(ContainerInterface $container, Configuration $config) { + $renderer = new VanillaRenderer($config->get('app.views', '')); + if (class_exists('Twig_Environment')) { $loader = new \Twig_Loader_Filesystem($config->get('app.views', '')); $twig = new \Twig_Environment($loader); $renderer = new TwigRenderer($twig); - } else { - $renderer = new VanillaRenderer($config->get('app.views', '')); } $container->set('Rougin\Slytherin\Template\RendererInterface', $renderer);
Update HttpIntegration and RendererIntegration
rougin_slytherin
train
a883c78d3e9f4aede4081c1a9cac65e01a477bfe
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -33,10 +33,6 @@ setup (name = 'phoebe', description = 'PHOEBE 2.0 devel', packages = ['phoebe', 'phoebe.constants', 'phoebe.parameters', 'phoebe.frontend', 'phoebe.constraints', 'phoebe.dynamics', 'phoebe.distortions', 'phoebe.algorithms', 'phoebe.atmospheres', 'phoebe.backend', 'phoebe.utils'], install_requires=['numpy','scipy','astropy'], - package_data={'phoebe.atmospheres':['tables/wd/*', 'tables/ptf/*.*','redlaws/*.*','tables/ld_coeffs/README', - 'tables/ld_coeffs/blackbody_uniform_none_teff.fits', - 'tables/spectra/README','tables/spec_intens/README', - 'tables/gravb/claret.dat', 'tables/gravb/espinosa.dat', - 'tables/passbands/*'], + package_data={'phoebe.atmospheres':['tables/wd/*', 'tables/passbands/*'], }, ext_modules = ext_modules)
remove unused package data from setup script tables are now stored in a separate repo and fetch/installed automatically
phoebe-project_phoebe2
train
d9f40cdac0bbeb6699411f664094a212cb1ca301
diff --git a/src/Cocur/Slugify/Slugify.php b/src/Cocur/Slugify/Slugify.php index <HASH>..<HASH> 100644 --- a/src/Cocur/Slugify/Slugify.php +++ b/src/Cocur/Slugify/Slugify.php @@ -174,7 +174,7 @@ class Slugify { // remove unwanted characters $string = preg_replace('#[^-\w]+#', '', $string); - if (empty($string)) { + if ($string === '') { return 'n' . $separator . 'a'; } diff --git a/tests/Cocur/Tests/Slugify/SlugifyIconvTest.php b/tests/Cocur/Tests/Slugify/SlugifyIconvTest.php index <HASH>..<HASH> 100644 --- a/tests/Cocur/Tests/Slugify/SlugifyIconvTest.php +++ b/tests/Cocur/Tests/Slugify/SlugifyIconvTest.php @@ -66,6 +66,7 @@ class SlugifyIconvTest extends \PHPUnit_Framework_TestCase { return array( array('Hello', 'hello'), + array('[0]', '0'), array('Hello World', 'hello-world'), array('Hello: World', 'hello-world'), // does not seem to work consistently
Check if the slugified version is really empty and not "0"
cocur_slugify
train
c2cab2cafb54106a5f0b5b185d1ff2614e70ae7b
diff --git a/blankshield.js b/blankshield.js index <HASH>..<HASH> 100644 --- a/blankshield.js +++ b/blankshield.js @@ -1,24 +1,31 @@ ;(function(root) { 'use strict'; - var blankshield = function(ele) { - addEvent(ele, 'click', function(e) { - var href, usedModifier, child; + var handler = function(e) { + var href, usedModifier, child; - href = e.target.getAttribute('href'); - if (!href) return; + href = e.target.getAttribute('href'); + if (!href) return; - usedModifier = (e.ctrlKey || e.shiftKey || e.metaKey); - if (!usedModifier && e.target.getAttribute('target') !== '_blank') { - return; - } + usedModifier = (e.ctrlKey || e.shiftKey || e.metaKey); + if (!usedModifier && e.target.getAttribute('target') !== '_blank') { + return; + } - child = window.open(href); - child.opener = null; + child = window.open(href); + child.opener = null; - e.preventDefault(); - return false; - }); + e.preventDefault(); + }; + + var blankshield = function(target) { + if (typeof target.length === 'undefined') { + addEvent(target, 'click', handler); + } else if (typeof target !== 'string' && !(target instanceof String)) { + for (var i = 0; i < target.length; i++) { + addEvent(target[i], 'click', handler); + } + } }; function addEvent(target, type, listener) {
Support arrays and array-like objects
danielstjules_blankshield
train
cd1cf1fb02745c87bd848f19860b334baa8522a6
diff --git a/salt/cloud/clouds/vmware.py b/salt/cloud/clouds/vmware.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/vmware.py +++ b/salt/cloud/clouds/vmware.py @@ -2570,8 +2570,8 @@ def create(vm_): global_ip = vim.vm.customization.GlobalIPSettings() if 'dns_servers' in list(vm_.keys()): global_ip.dnsServerList = vm_['dns_servers'] - hostName, domainName = split(r'[^\w-]', vm_name, maxsplit=1) - domainName = domainName.split('.', maxsplit=1)[-1] + hostName = split(r'[^\w-]', vm_name, maxsplit=1)[0] + domainName = vm_name.split('.', maxsplit=1)[-1] if 'Windows' not in object_ref.config.guestFullName: identity = vim.vm.customization.LinuxPrep() identity.hostName = vim.vm.customization.FixedName(name=hostName)
fix PR#<I> if opts['name'] doesn't contain a .DOMAIN.TLD, this will fail to unpack 2 values
saltstack_salt
train
1db8b2046e7dfe713441e18a3638bb63537b0037
diff --git a/closure/goog/net/jsonp_test.js b/closure/goog/net/jsonp_test.js index <HASH>..<HASH> 100644 --- a/closure/goog/net/jsonp_test.js +++ b/closure/goog/net/jsonp_test.js @@ -300,7 +300,7 @@ function testNonce() { var script = getScriptElement(result); assertEquals( 'Nonce attribute should have been added to script element.', 'foo', - script.getAttribute('nonce')); + (script['nonce'] || script.getAttribute('nonce'))); checkCleanup(); timeoutHandler();
Prefer the IDL 'nonce' property over .getAttribute('nonce'), which will stop working when <URL> because of a change in Chrome. ------------- Created by MOE: <URL>
google_closure-library
train
41404854bb6ece51213978c8da485c87a2087e64
diff --git a/spec/performable_mailer_integration_spec.rb b/spec/performable_mailer_integration_spec.rb index <HASH>..<HASH> 100644 --- a/spec/performable_mailer_integration_spec.rb +++ b/spec/performable_mailer_integration_spec.rb @@ -1,7 +1,5 @@ require 'spec_helper' -require 'spec_helper' - describe Delayed::PerformableMailer do it "queues and delivers a delayed mail" do expect {
Fix: removed double spec_helper.
mongoid_delayed_job_shallow_mongoid
train
94728b3096a5f43f5fee799e24c4ad66744f9962
diff --git a/engine/orchestrator/oo-orchestrator-impl/src/main/java/com/hp/score/ScoreTriggeringImpl.java b/engine/orchestrator/oo-orchestrator-impl/src/main/java/com/hp/score/ScoreTriggeringImpl.java index <HASH>..<HASH> 100644 --- a/engine/orchestrator/oo-orchestrator-impl/src/main/java/com/hp/score/ScoreTriggeringImpl.java +++ b/engine/orchestrator/oo-orchestrator-impl/src/main/java/com/hp/score/ScoreTriggeringImpl.java @@ -67,12 +67,19 @@ public class ScoreTriggeringImpl implements ScoreTriggering { String subFlowUuid = dependencyExecutionPlan.getFlowUuid(); Long subFlowRunningId = runningExecutionPlanService.getOrCreateRunningExecutionPlan(dependencyExecutionPlan); runningPlansIds.put(subFlowUuid, subFlowRunningId); - beginStepsIds.put(subFlowUuid, executionPlan.getBeginStep()); + beginStepsIds.put(subFlowUuid, dependencyExecutionPlan.getBeginStep()); } + + // Adding the ids of the running execution plan of the parent + its begin step + // since this map should contain all the ids of the running plans + Long runningPlanId = runningExecutionPlanService.getOrCreateRunningExecutionPlan(executionPlan); + runningPlansIds.put(executionPlan.getFlowUuid(), runningPlanId); + beginStepsIds.put(executionPlan.getFlowUuid(), executionPlan.getBeginStep()); + systemContext.put(ExecutionConstants.RUNNING_PLANS_MAP, (Serializable) runningPlansIds); systemContext.put(ExecutionConstants.BEGIN_STEPS_MAP, (Serializable) beginStepsIds); - return runningExecutionPlanService.getOrCreateRunningExecutionPlan(executionPlan); + return runningPlanId; } private void enqueue(ExecutionMessage... messages) {
Fix bug in creating the running execution plans of the dependencies
CloudSlang_score
train
0ef76ce525cfabfd057c1bf8434db07ae9b5a14b
diff --git a/git-api.js b/git-api.js index <HASH>..<HASH> 100644 --- a/git-api.js +++ b/git-api.js @@ -84,7 +84,17 @@ exports.registerApi = function(app, server, dev) { app.post(exports.pathPrefix + '/fetch', function(req, res) { if (!verifyPath(req.body.path, res)) return; - git('fetch', req.body.path, res); + git('fetch', req.body.path, res, undefined, function(err, text) { + if (err) { + if (err.stderr.indexOf('fatal: No remote repository specified.') == 0) { + res.json({}); + } else { + res.json(400, err); + } + } else { + res.json({}); + } + }); }); app.get(exports.pathPrefix + '/diff', function(req, res) { diff --git a/test/spec.git-api.remote.js b/test/spec.git-api.remote.js index <HASH>..<HASH> 100644 --- a/test/spec.git-api.remote.js +++ b/test/spec.git-api.remote.js @@ -80,6 +80,10 @@ describe('git-api remote', function () { common.post(req, '/fetch', { path: testDirLocal }, done); }); + it('fetching in "remote" should work', function(done) { + common.post(req, '/fetch', { path: testDirRemote }, done); + }); + it('log in "local" should show remote as one step ahead', function(done) { common.get(req, '/log', { path: testDirLocal }, done, function(err, res) { expect(res.body).to.be.a('array');
Fetch in local only repo works
FredrikNoren_ungit
train
155fd516ef18c370b892a24c72e356065f8a9a3b
diff --git a/src/voku/cache/AdapterApc.php b/src/voku/cache/AdapterApc.php index <HASH>..<HASH> 100644 --- a/src/voku/cache/AdapterApc.php +++ b/src/voku/cache/AdapterApc.php @@ -76,7 +76,7 @@ class AdapterApc implements iAdapter * @param bool $limited - If $limited is TRUE, the return value will exclude the individual list of cache entries. * This is useful when trying to optimize calls for statistics gathering. * - * @return array of cached data (and meta-data) or FALSE on failure. + * @return array|bool <p>Array of cached data (and meta-data) or FALSE on failure.</p> */ public function cacheInfo(string $type = '', bool $limited = false): array { diff --git a/src/voku/cache/AdapterApcu.php b/src/voku/cache/AdapterApcu.php index <HASH>..<HASH> 100644 --- a/src/voku/cache/AdapterApcu.php +++ b/src/voku/cache/AdapterApcu.php @@ -58,8 +58,7 @@ class AdapterApcu implements iAdapter /** * Clears the APCu cache by type. * - * @param string $type - If $type is "user", the user cache will be cleared; otherwise, - * the system cache (cached files) will be cleared. + * @param string $type <p>WARNING: is not used in APCu only valid for APC</p> * * @return bool * @@ -67,7 +66,7 @@ class AdapterApcu implements iAdapter */ public function cacheClear(string $type): bool { - return (bool)\apcu_clear_cache($type); + return (bool)\apcu_clear_cache(); } /** @@ -76,7 +75,7 @@ class AdapterApcu implements iAdapter * @param bool $limited - If $limited is TRUE, the return value will exclude the individual list of cache entries. * This is useful when trying to optimize calls for statistics gathering. * - * @return array of cached data (and meta-data) or FALSE on failure. + * @return array|bool <p>Array of cached data (and meta-data) or FALSE on failure.</p> */ public function cacheInfo(bool $limited = false): array {
[+]: "AdapterApcu" -> fixed php-warning from "apcu_clear_cache()"
voku_simple-cache
train
16b9242ff4a74e58fe01588ae080f8925ca1e8e6
diff --git a/seqcluster/prepare_data.py b/seqcluster/prepare_data.py index <HASH>..<HASH> 100644 --- a/seqcluster/prepare_data.py +++ b/seqcluster/prepare_data.py @@ -130,6 +130,7 @@ def _read_fastq_files(f, args): seq_l[seq].quality = keep[seq].get() print >>out_handle, "total\t%s\t%s" % (idx, cols[1]) print >>out_handle, "added\t%s\t%s" % (len(seq_l), cols[1]) + logger.info("%s: Total read %s ; Total added %s" % (cols[1], idx, len(seq_l))) return seq_l, sample_l @@ -143,12 +144,14 @@ def _create_matrix_uniq_seq(sample_l, seq_l, maout, out, min_shared): :returns: Null """ + skip = 0 maout.write("id\tseq") for g in sample_l: maout.write("\t%s" % g) for s in seq_l.keys(): seen = sum([1 for g in seq_l[s].group if seq_l[s].group[g] > 0]) if seen < int(min_shared): + skip += 1 continue maout.write("\nseq_%s\t%s" % (seq_l[s].idx, seq_l[s].seq)) for g in sample_l: @@ -160,3 +163,4 @@ def _create_matrix_uniq_seq(sample_l, seq_l, maout, out, min_shared): out.write("@seq_%s\n%s\n+\n%s\n" % (seq_l[s].idx, seq_l[s].seq, qual)) out.close() maout.close() + logger.info("Total skipped due to --min-shared parameter (%s) : %s" % (min_shared, skip))
Add more informative logging at prepare sub-command
lpantano_seqcluster
train
1c3fa10e4082ecc54a608af885295491fca3e757
diff --git a/google/datalab/bigquery/commands/_bigquery.py b/google/datalab/bigquery/commands/_bigquery.py index <HASH>..<HASH> 100644 --- a/google/datalab/bigquery/commands/_bigquery.py +++ b/google/datalab/bigquery/commands/_bigquery.py @@ -854,9 +854,10 @@ def _extract_cell(args, cell_body): if not source: raise Exception('Could not find table %s' % args['table']) + csv_delimiter = args['delimiter'] if args['format'] == 'csv' else None job = source.extract(args['path'], - format='CSV' if args['format'] == 'csv' else 'NEWLINE_DELIMITED_JSON', - csv_delimiter=args['delimiter'], csv_header=args['header'], + format=args['format'], + csv_delimiter=csv_delimiter, csv_header=args['header'], compress=args['compress']) elif args['query'] or args['view']: source_name = args['view'] or args['query'] @@ -918,7 +919,7 @@ def _load_cell(args, cell_body): quote=args['quote']) job = table.load(args['path'], mode=args['mode'], - source_format=('csv' if args['format'] == 'csv' else 'NEWLINE_DELIMITED_JSON'), + source_format=args['format'], csv_options=csv_options, ignore_unknown_values=not args['strict']) if job.failed: diff --git a/tests/kernel/bigquery_tests.py b/tests/kernel/bigquery_tests.py index <HASH>..<HASH> 100644 --- a/tests/kernel/bigquery_tests.py +++ b/tests/kernel/bigquery_tests.py @@ -671,7 +671,7 @@ WITH q1 AS ( @mock.patch('google.datalab.bigquery.commands._bigquery._get_table') @mock.patch('google.datalab.utils.commands.get_notebook_item') def test_extract_cell_table(self, mock_get_notebook_item, mock_get_table, mock_table_extract): - args = {'table': 'test-table', 'path': 'test-path', 'format': None, 'delimiter': None, + args = {'table': 'test-table', 'path': 'test-path', 'format': 'json', 'delimiter': None, 'header': None, 'compress': None, 'nocache': None} mock_get_table.return_value = None with self.assertRaisesRegexp(Exception, 'Could not find table test-table'): @@ -684,7 +684,7 @@ WITH q1 AS ( mock_table_extract.return_value.errors = None self.assertEqual(google.datalab.bigquery.commands._bigquery._extract_cell(args, None), 'test-results') - mock_table_extract.assert_called_with('test-path', format='NEWLINE_DELIMITED_JSON', + mock_table_extract.assert_called_with('test-path', format='json', csv_delimiter=None, csv_header=None, compress=None) @mock.patch('google.datalab.Context.default') @@ -715,7 +715,7 @@ WITH q1 AS ( def test_load_cell(self, mock_get_table, mock_table_load, mock_table_exists, mock_table_create, mock_default_context): args = {'table': 'project.test.table', 'mode': 'create', 'path': 'test/path', 'skip': None, - 'csv': None, 'delimiter': None, 'format': None, 'strict': None, 'quote': None} + 'csv': None, 'delimiter': None, 'format': 'csv', 'strict': None, 'quote': None} context = self._create_context() table = google.datalab.bigquery.Table('project.test.table') mock_get_table.return_value = table @@ -755,7 +755,7 @@ WITH q1 AS ( google.datalab.bigquery.commands._bigquery._load_cell(args, json.dumps(cell_body)) mock_table_load.assert_called_with('test/path', mode='create', - source_format='NEWLINE_DELIMITED_JSON', + source_format='csv', csv_options=mock.ANY, ignore_unknown_values=True) mock_get_table.return_value = None
Fix two %%bq load/extract issues. (#<I>) * Fix two %%bq load/extract issues. - %%bq load with json fails with "Invalid source format NEWLINE_DELIMITED_JSON" - %%bq extract with json fails because we set a csv_delimiter and BQ doesn't like it. * Also fix tests.
googledatalab_pydatalab
train
509336e080e20a55e172468203956693d409465f
diff --git a/lib/validates_timeliness/conversion.rb b/lib/validates_timeliness/conversion.rb index <HASH>..<HASH> 100644 --- a/lib/validates_timeliness/conversion.rb +++ b/lib/validates_timeliness/conversion.rb @@ -2,7 +2,7 @@ module ValidatesTimeliness module Conversion def type_cast_value(value, type) - return nil if value.nil? + return nil if value.nil? || !value.respond_to?(:to_time) value = value.in_time_zone if value.acts_like?(:time) && @timezone_aware value = case type diff --git a/spec/validates_timeliness/conversion_spec.rb b/spec/validates_timeliness/conversion_spec.rb index <HASH>..<HASH> 100644 --- a/spec/validates_timeliness/conversion_spec.rb +++ b/spec/validates_timeliness/conversion_spec.rb @@ -22,6 +22,10 @@ describe ValidatesTimeliness::Conversion do it "should return date part of datetime value" do type_cast_value(DateTime.new(2010, 1, 1, 0, 0, 0), :date).should == Date.new(2010, 1, 1) end + + it 'should return nil for invalid value types' do + type_cast_value(12, :date).should == nil + end end describe "for time type" do @@ -40,6 +44,10 @@ describe ValidatesTimeliness::Conversion do it "should return dummy date with time part for datetime value" do type_cast_value(DateTime.civil_from_format(:utc, 2010, 1, 1, 12, 34, 56), :time).should == Time.utc(2000, 1, 1, 12, 34, 56) end + + it 'should return nil for invalid value types' do + type_cast_value(12, :time).should == nil + end end describe "for datetime type" do @@ -63,6 +71,10 @@ describe ValidatesTimeliness::Conversion do result.should == Time.zone.local(2010, 1, 1, 23, 34, 56) result.zone.should == 'EST' end + + it 'should return nil for invalid value types' do + type_cast_value(12, :datetime).should == nil + end end describe "ignore_usec option" do
Fix type_cast_value for values which don't respond to to_time or to_date (renatoelias)
adzap_validates_timeliness
train
f30236b1ffefa070a7dc9f906716cd504dcde6ce
diff --git a/src/lokijs.js b/src/lokijs.js index <HASH>..<HASH> 100644 --- a/src/lokijs.js +++ b/src/lokijs.js @@ -1003,10 +1003,12 @@ * @param {boolean} [options.asyncListeners=false] - whether listeners are called asynchronously * @param {boolean} [options.disableMeta=false] - set to true to disable meta property on documents * @param {boolean} [options.disableChangesApi=true] - set to false to enable Changes Api + * @param {boolean} [options.disableDeltaChangesApi=true] - set to false to enable Delta Changes API (requires Changes API, forces cloning) * @param {boolean} [options.autoupdate=false] - use Object.observe to update objects automatically * @param {boolean} [options.clone=false] - specify whether inserts and queries clone to/from user * @param {string} [options.cloneMethod='parse-stringify'] - 'parse-stringify', 'jquery-extend-deep', 'shallow, 'shallow-assign' - * @param {int} options.ttlInterval - time interval for clearing out 'aged' documents; not set by default. + * @param {int=} options.ttl - age of document (in ms.) before document is considered aged/stale. + * @param {int=} options.ttlInterval - time interval for clearing out 'aged' documents; not set by default. * @returns {Collection} a reference to the collection which was just added * @memberof Loki */ @@ -1014,6 +1016,18 @@ var i, len = this.collections.length; + if (options && options.disableMeta === true) { + if (options.disableChangesApi === false) { + throw new Error("disableMeta option cannot be passed as true when disableChangesApi is passed as false"); + } + if (options.disableDeltaChangesApi === false) { + throw new Error("disableMeta option cannot be passed as true when disableDeltaChangesApi is passed as false"); + } + if (typeof options.ttl === "number" && options.ttl > 0) { + throw new Error("disableMeta option cannot be passed as true when ttl is enabled"); + } + } + for (i = 0; i < len; i += 1) { if (this.collections[i].name === name) { return this.collections[i]; @@ -1545,12 +1559,6 @@ this.name = dbObject.name; - // restore database version - //this.databaseVersion = 1.0; - //if (dbObject.hasOwnProperty('databaseVersion')) { - // this.databaseVersion = dbObject.databaseVersion; - //} - // restore save throttled boolean only if not defined in options if (dbObject.hasOwnProperty('throttledSaves') && options && !options.hasOwnProperty('throttledSaves')) { this.throttledSaves = dbObject.throttledSaves; @@ -4405,13 +4413,15 @@ * @param {array=} [options.indices=[]] - array property names to define binary indexes for * @param {boolean} [options.adaptiveBinaryIndices=true] - collection indices will be actively rebuilt rather than lazily * @param {boolean} [options.asyncListeners=false] - whether listeners are invoked asynchronously + * @param {boolean} [options.disableMeta=false] - set to true to disable meta property on documents * @param {boolean} [options.disableChangesApi=true] - set to false to enable Changes API * @param {boolean} [options.disableDeltaChangesApi=true] - set to false to enable Delta Changes API (requires Changes API, forces cloning) * @param {boolean} [options.autoupdate=false] - use Object.observe to update objects automatically * @param {boolean} [options.clone=false] - specify whether inserts and queries clone to/from user * @param {boolean} [options.serializableIndices=true[]] - converts date values on binary indexed properties to epoch time * @param {string} [options.cloneMethod='parse-stringify'] - 'parse-stringify', 'jquery-extend-deep', 'shallow', 'shallow-assign' - * @param {int} options.ttlInterval - time interval for clearing out 'aged' documents; not set by default. + * @param {int=} options.ttl - age of document (in ms.) before document is considered aged/stale. + * @param {int=} options.ttlInterval - time interval for clearing out 'aged' documents; not set by default. * @see {@link Loki#addCollection} for normal creation of collections */ function Collection(name, options) {
#<I> : Added consistency checks between disableMeta, Changes API, DeltaChangesAPI and TTL Now that a new collection option 'disableMeta' allows you to not attach loki meta to documents, we needed to add consistency checks to make sure you are also not attempting to enable collection options which require that meta to work properly.
techfort_LokiJS
train
d5c8e5bd260bac0b58569a84e787e822fd051106
diff --git a/client/allocrunner/alloc_runner_test.go b/client/allocrunner/alloc_runner_test.go index <HASH>..<HASH> 100644 --- a/client/allocrunner/alloc_runner_test.go +++ b/client/allocrunner/alloc_runner_test.go @@ -64,11 +64,11 @@ func testAllocRunnerConfig(t *testing.T, alloc *structs.Allocation) (*Config, fu Logger: clientConf.Logger, ClientConfig: clientConf, StateDB: state.NoopDB{}, - Consul: consulapi.NewMockConsulServiceClient(t, logger), + Consul: consulapi.NewMockConsulServiceClient(t, clientConf.Logger), Vault: vaultclient.NewMockVaultClient(), StateUpdater: &MockStateUpdater{}, PrevAllocWatcher: allocwatcher.NoopPrevAlloc{}, - PluginSingletonLoader: singleton.NewSingletonLoader(logger, pluginLoader), + PluginSingletonLoader: singleton.NewSingletonLoader(clientConf.Logger, pluginLoader), } return conf, cleanup } diff --git a/client/allocrunner/taskrunner/task_runner_test.go b/client/allocrunner/taskrunner/task_runner_test.go index <HASH>..<HASH> 100644 --- a/client/allocrunner/taskrunner/task_runner_test.go +++ b/client/allocrunner/taskrunner/task_runner_test.go @@ -144,20 +144,14 @@ func TestTaskRunner_Restore_Running(t *testing.T) { // Wait for new task runner to exit when the process does <-newTR.WaitCh() - // Assert that the process was only started once, and only restored once + // Assert that the process was only started once started := 0 - restored := 0 state := newTR.TaskState() require.Equal(structs.TaskStateDead, state.State) for _, ev := range state.Events { - t.Logf("task event: %s %s", ev.Type, ev.Message) - switch ev.Type { - case structs.TaskStarted: + if ev.Type == structs.TaskStarted { started++ - case structs.TaskRestored: - restored++ } } assert.Equal(t, 1, started) - assert.Equal(t, 1, restored) }
client: fix ar and tr tests
hashicorp_nomad
train
907fe255a1eb1bc6b227a5beb6aa144a37d3e422
diff --git a/OpenPNM/Utilities/misc.py b/OpenPNM/Utilities/misc.py index <HASH>..<HASH> 100644 --- a/OpenPNM/Utilities/misc.py +++ b/OpenPNM/Utilities/misc.py @@ -1,5 +1,5 @@ import scipy as _sp -import time +import time as _time def iscoplanar(coords): r''' @@ -48,8 +48,8 @@ def tic(): Homemade version of matlab tic and toc function, tic starts or resets the clock, toc reports the time since the last call of tic. ''' - global startTime_for_tictoc - startTime_for_tictoc = time.time() + global _startTime_for_tictoc + _startTime_for_tictoc = _time.time() def toc(quiet=False): r''' @@ -62,8 +62,8 @@ def toc(quiet=False): If False (default) then a message is output to the console. If True the message is not displayed and the elapsed time is returned. ''' - if 'startTime_for_tictoc' in globals(): - t = time.time() - startTime_for_tictoc + if '_startTime_for_tictoc' in globals(): + t = _time.time() - _startTime_for_tictoc if quiet == False: print('Elapsed time in seconds: ', t) else:
A few updates to misc - it is important to import other modules preceded with an underscore, or else they pollute the namespace Former-commit-id: <I>a5b<I>b<I>d8da6c4bac3d6c9a<I>ae Former-commit-id: 8f<I>be5dd<I>caf<I>e<I>abefdc<I>a<I>bdaaf<I>d
PMEAL_OpenPNM
train
cf667c43130faccf6690e86dd745cd8e598aeb10
diff --git a/control.py b/control.py index <HASH>..<HASH> 100644 --- a/control.py +++ b/control.py @@ -1285,7 +1285,6 @@ def acc_find_possible_activities(user_info, ln=CFG_SITE_LANG): is allowed (i.e. all the administrative action which are connected to an web area in Invenio) and the corresponding url. """ - _ = gettext_set_language(ln) your_role_actions = acc_find_user_role_actions(user_info) your_admin_activities = {} for (role, action) in your_role_actions: @@ -1322,7 +1321,7 @@ def acc_find_possible_activities(user_info, ln=CFG_SITE_LANG): ret = {} for action, (name, url) in iteritems(your_admin_activities): - ret[_(name)] = url % ln + ret[name] = url % ln return ret
access: infinite recursion hotfix * Temporary fix for the occasional issue causing infinite recursion error upon loading the session. (addresses #<I>)
inveniosoftware_invenio-access
train
efa65d16b6e0b835c1786a47140c974e5c0f71cb
diff --git a/registry/session.go b/registry/session.go index <HASH>..<HASH> 100644 --- a/registry/session.go +++ b/registry/session.go @@ -281,7 +281,11 @@ func (r *Session) GetRepositoryData(remote string) (*RepositoryData, error) { // TODO: Right now we're ignoring checksums in the response body. // In the future, we need to use them to check image validity. if res.StatusCode != 200 { - return nil, utils.NewHTTPRequestError(fmt.Sprintf("HTTP code: %d", res.StatusCode), res) + errBody, err := ioutil.ReadAll(res.Body) + if err != nil { + log.Debugf("Error reading response body: %s", err) + } + return nil, utils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to pull repository %s: %q", res.StatusCode, remote, errBody), res) } var tokens []string @@ -510,7 +514,7 @@ func (r *Session) PushImageJSONIndex(remote string, imgList []*ImgData, validate if res.StatusCode != 200 && res.StatusCode != 201 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { - return nil, err + log.Debugf("Error reading response body: %s", err) } return nil, utils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to push repository %s: %q", res.StatusCode, remote, errBody), res) } @@ -534,7 +538,7 @@ func (r *Session) PushImageJSONIndex(remote string, imgList []*ImgData, validate if res.StatusCode != 204 { errBody, err := ioutil.ReadAll(res.Body) if err != nil { - return nil, err + log.Debugf("Error reading response body: %s", err) } return nil, utils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to push checksums %s: %q", res.StatusCode, remote, errBody), res) }
print detailed error info for docker pull When docker push get response with unknown HTTP status, docker daemon print: "Error: Status XXX trying to push repository XXX: XXX" But when docker pull meets response with unknown status code, it gives: "HTTP code: XXX" This commit helps docker pull print more detailed error info like push does, so push and pull can behave consistently when error happens.
containers_storage
train
3cedb90e111be5b32119c9dc065bd24735fc4a2d
diff --git a/willow-sshagent-auth/src/main/java/com/nitorcreations/willow/sshagentauth/SSHUtil.java b/willow-sshagent-auth/src/main/java/com/nitorcreations/willow/sshagentauth/SSHUtil.java index <HASH>..<HASH> 100644 --- a/willow-sshagent-auth/src/main/java/com/nitorcreations/willow/sshagentauth/SSHUtil.java +++ b/willow-sshagent-auth/src/main/java/com/nitorcreations/willow/sshagentauth/SSHUtil.java @@ -17,11 +17,16 @@ import com.jcraft.jsch.jce.SignatureDSA; import com.jcraft.jsch.jce.SignatureRSA; public class SSHUtil { - private static Logger logger = Logger.getLogger(SSHUtil.class.getCanonicalName()); + private static final SecureRandom random = new SecureRandom(); + private static final Logger logger = Logger.getLogger(SSHUtil.class.getCanonicalName()); + private static final String ENV_SSH_ID = "W_SSH_IDENTITY"; + private static SSHAuthentication sshAuthentication; - private static String ENV_SSH_ID = "W_SSH_IDENTITY"; - private final static SecureRandom random = new SecureRandom(); - static { + + private static synchronized void initializeSshAuthentication() { + if (sshAuthentication != null) { + return; + } String sshId = System.getenv(ENV_SSH_ID); if (sshId != null) { sshAuthentication = new PrivateKeySSHAuthentication(); @@ -40,7 +45,7 @@ public class SSHUtil { String home = System.getProperty("user.home"); String sshDir = home + File.separator + ".ssh" + File.separator; String[] defaultKeys = new String[] { - sshDir + "id_rsa", sshDir + "id_dsa", sshDir + "identity" + sshDir + "id_ecdsa", sshDir + "id_id_ed25519", sshDir + "id_rsa", sshDir + "id_dsa", sshDir + "identity" }; sshAuthentication = new PrivateKeySSHAuthentication(); for (String nextKey : defaultKeys) { @@ -121,6 +126,7 @@ public class SSHUtil { return ret; } public static String getPublicKeyAuthorization(String username) { + initializeSshAuthentication(); StringBuilder ret = new StringBuilder("PUBLICKEY "); String now = Long.toString(System.currentTimeMillis()); byte[] rnd = new byte[39];
Only initialize the SshAuthentication when needed. Otherwise for example the PrivateKeySSHAuthenticationTest can pause for a popup window to enter the secrets for keys found in ~/.ssh/ directory
NitorCreations_willow
train
0c0d915d7a5ab2715c94d102c13f2cd2018744d6
diff --git a/src/AnimationController.js b/src/AnimationController.js index <HASH>..<HASH> 100644 --- a/src/AnimationController.js +++ b/src/AnimationController.js @@ -577,6 +577,9 @@ define(function(require, exports, module) { _setItemOptions.call(this, item, options); _updateState.call(this); } + if (callback) { + callback(); + } return this; } if (item && (item.state !== ItemState.HIDING) && options) {
Fixed show callback not executed when same renderable is shown in AnimationController
IjzerenHein_famous-flex
train
3d2d595fcbafec2f6286ef98ecf644ab52860294
diff --git a/lib/rubocop/cop/collection_methods.rb b/lib/rubocop/cop/collection_methods.rb index <HASH>..<HASH> 100644 --- a/lib/rubocop/cop/collection_methods.rb +++ b/lib/rubocop/cop/collection_methods.rb @@ -4,23 +4,26 @@ module Rubocop module Cop class CollectionMethods < Cop PREFERRED_METHODS = { - 'collect' => 'map', - 'inject' => 'reduce', - 'detect' => 'find', - 'find_all' => 'select', + collect: 'map', + inject: 'reduce', + detect: 'find', + find_all: 'select' } + def self.portable? + true + end + def inspect(file, source, tokens, sexp) - each(:call, sexp) do |s| - s.drop(2).each_slice(2) do |m| - method_name = m[1][1] - if PREFERRED_METHODS[method_name] - add_offence( - :convention, - m[1][2].lineno, - "Prefer #{PREFERRED_METHODS[method_name]} over #{method_name}." - ) - end + on_node(:send, sexp) do |node| + _receiver, method_name, *_args = *node + + if PREFERRED_METHODS[method_name] + add_offence( + :convention, + node.src.line, + "Prefer #{PREFERRED_METHODS[method_name]} over #{method_name}." + ) end end end
Port CollectionMethods to Parser
rubocop-hq_rubocop
train
8a2487589210201c83a62538bf84f8c787fd54d0
diff --git a/mixbox/entities.py b/mixbox/entities.py index <HASH>..<HASH> 100644 --- a/mixbox/entities.py +++ b/mixbox/entities.py @@ -45,6 +45,34 @@ def _dictify(field, value): return value +class EntityFactory(object): + _dictkey = "xsi:type" + _objkey = "xsi_type" + + @classmethod + def entity_class(cls, key): + """Must be implemented by a subclass.""" + pass + + @classmethod + def from_dict(cls, cls_dict): + if not cls_dict: + return None + + typekey = cls_dict.get(cls._dictkey) + klass = cls.entity_class(typekey) + return klass.from_dict(cls_dict) + + @classmethod + def from_obj(cls, cls_obj): + if not cls_obj: + return None + + typekey = getattr(cls_obj, cls._objkey, None) + klass = cls.entity_class(typekey) + return klass.from_obj(cls_obj) + + class Entity(object): """Base class for all classes in the Cybox SimpleAPI.""" @@ -187,11 +215,11 @@ class Entity(object): for field in entity.typed_fields: val = getattr(cls_obj, field.name) - if field.type_: + if field.transformer: if field.multiple and val is not None: - val = [field.type_.from_obj(x) for x in val] + val = [field.transformer.from_obj(x) for x in val] else: - val = field.type_.from_obj(val) + val = field.transformer.from_obj(val) field.__set__(entity, val) return entity @@ -217,14 +245,14 @@ class Entity(object): for field in entity.typed_fields: val = cls_dict.get(field.key_name) - if field.type_: + if field.transformer: if field.multiple: if val is not None: - val = [field.type_.from_dict(x) for x in val] + val = [field.transformer.from_dict(x) for x in val] else: val = [] else: - val = field.type_.from_dict(val) + val = field.transformer.from_dict(val) else: if field.multiple and not val: val = [] diff --git a/mixbox/fields.py b/mixbox/fields.py index <HASH>..<HASH> 100644 --- a/mixbox/fields.py +++ b/mixbox/fields.py @@ -54,7 +54,7 @@ class TypedField(object): def __init__(self, name, type_=None, key_name=None, comparable=True, multiple=False, - preset_hook=None, postset_hook=None): + preset_hook=None, postset_hook=None, factory=None): """ Create a new field. @@ -91,6 +91,7 @@ class TypedField(object): self.preset_hook = preset_hook self.postset_hook = postset_hook self.is_type_castable = getattr(type_, "_try_cast", False) + self.factory = factory def __get__(self, instance, owner=None): """Return the TypedField value for the input `instance` and `owner`. @@ -191,6 +192,25 @@ class TypedField(object): def type_(self, value): self._type = value + @property + def transformer(self): + """Return the class for this field that transforms non-Entity objects + (e.g., dicts or binding objects) into Entity instances. + + Any non-None value returned from this method should implement a + from_obj() and from_dict() method. + + Returns: + None if no type_ or factory is defined by the field. Return a class + with from_dict and from_obj methods otherwise. + """ + if self.factory: + return self.factory + elif self.type_: + return self.type_ + else: + return None + class BytesField(TypedField): def _clean(self, value):
Experimenting with EntityFactory class.
CybOXProject_mixbox
train
ab08b931b277ae77fe602d7bec37ef03fdd8ba15
diff --git a/bql/table/table.go b/bql/table/table.go index <HASH>..<HASH> 100644 --- a/bql/table/table.go +++ b/bql/table/table.go @@ -388,3 +388,41 @@ func (t *Table) Sort(cfg SortConfig) { } sort.Sort(bySortConfig{t.data, cfg}) } + +// Accumulator type represents a generic accumulator for independent values +// expressed as the element of the array slice. Returns the values after being +// accumulated. If the wrong type is passed in, it will crash casting the +// interface. +type Accumulator func(interface{}) (interface{}, error) + +// NewSumInt64LiteralAccumulator accumulates the int64 types of a literal. +func NewSumInt64LiteralAccumulator(s int64) Accumulator { + return func(vs interface{}) (interface{}, error) { + l := vs.(*literal.Literal) + v, err := l.Int64() + if err != nil { + return s, err + } + s += v + return s, nil + } +} + +// NewSumFloat64LiteralAccumulator accumulates the float64 types of a literal. +func NewSumFloat64LiteralAccumulator(s float64) Accumulator { + return func(vs interface{}) (interface{}, error) { + l := vs.(*literal.Literal) + v, err := l.Float64() + if err != nil { + return s, err + } + s += v + return s, nil + } +} + +/* +func (t *Table) groupRange(i, j int, ma map[string]Accumulator) (Row, error) { + return nil, nil +} +*/ diff --git a/bql/table/table_test.go b/bql/table/table_test.go index <HASH>..<HASH> 100644 --- a/bql/table/table_test.go +++ b/bql/table/table_test.go @@ -607,3 +607,30 @@ func TestSort(t *testing.T) { } } } + +func TestAccumulators(t *testing.T) { + // int64 sum accumulator. + var ( + iv interface{} + ia = NewSumInt64LiteralAccumulator(0) + ) + for i := int64(0); i < 5; i++ { + l, _ := literal.DefaultBuilder().Build(literal.Int64, i) + iv, _ = ia(l) + } + if got, want := iv.(int64), int64(10); got != want { + t.Errorf("Int64 sum accumulator failed; got %d, want %d", got, want) + } + // float64 sum accumulator. + var ( + fv interface{} + fa = NewSumFloat64LiteralAccumulator(0) + ) + for i := float64(0); i < 5; i += 1.0 { + l, _ := literal.DefaultBuilder().Build(literal.Float64, i) + fv, _ = fa(l) + } + if got, want := fv.(float64), float64(10); got != want { + t.Errorf("Int64 sum accumulator failed; got %d, want %d", got, want) + } +}
Initial work on table reduction to support group by clauses. This commit is part of the work to address issue #<I>.
google_badwolf
train
55b33f3a544832abc2100b962a5061701022c513
diff --git a/ontrack-job/src/test/java/net/nemerosa/ontrack/job/orchestrator/JobOrchestratorTest.java b/ontrack-job/src/test/java/net/nemerosa/ontrack/job/orchestrator/JobOrchestratorTest.java index <HASH>..<HASH> 100644 --- a/ontrack-job/src/test/java/net/nemerosa/ontrack/job/orchestrator/JobOrchestratorTest.java +++ b/ontrack-job/src/test/java/net/nemerosa/ontrack/job/orchestrator/JobOrchestratorTest.java @@ -9,10 +9,10 @@ import org.junit.Test; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; +import java.util.function.Supplier; import static org.junit.Assert.*; @@ -40,6 +40,8 @@ public class JobOrchestratorTest { public void orchestrator_initial_jobs() throws InterruptedException, ExecutionException { JobScheduler scheduler = createJobScheduler(); + Supplier<RuntimeException> notScheduledException = () -> new RuntimeException("Not scheduled"); + List<JobRegistration> jobs = new ArrayList<>(); JobOrchestratorSupplier jobOrchestrationSupplier = jobs::stream; @@ -53,20 +55,21 @@ public class JobOrchestratorTest { // Orchestration is registered as a job, but does not run since we have a NONE schedule scheduler.schedule(orchestrator, Schedule.NONE); - Optional<JobStatus> status = scheduler.getJobStatus(key); - assertTrue(status.isPresent() && status.get().getNextRunDate() != null); + JobStatus status = scheduler.getJobStatus(key).orElse(null); + assertNotNull(status); + assertNull(status.getNextRunDate()); // Puts a job in the list jobs.add(new JobRegistration(new TestJob("1"), Schedule.NONE)); // ... and launches the orchestration - scheduler.fireImmediately(key).get(); + scheduler.fireImmediately(key).orElseThrow(notScheduledException).get(); // ... tests the job has been registered assertTrue(scheduler.getJobStatus(TestJob.getKey("1")).isPresent()); // Puts the second job in the list jobs.add(new JobRegistration(new TestJob("2"), Schedule.NONE)); // ... and launches the orchestration - scheduler.fireImmediately(key).get(); + scheduler.fireImmediately(key).orElseThrow(notScheduledException).get(); // ... tests the jobs are registered assertTrue(scheduler.getJobStatus(TestJob.getKey("1")).isPresent()); assertTrue(scheduler.getJobStatus(TestJob.getKey("2")).isPresent()); @@ -74,7 +77,7 @@ public class JobOrchestratorTest { // Removes the first job in the list jobs.remove(0); // ... and launches the orchestration - scheduler.fireImmediately(key).get(); + scheduler.fireImmediately(key).orElseThrow(notScheduledException).get(); // ... tests the jobs are registered assertFalse(scheduler.getJobStatus(TestJob.getKey("1")).isPresent()); assertTrue(scheduler.getJobStatus(TestJob.getKey("2")).isPresent());
#<I> REFAC - Corrections of tests
nemerosa_ontrack
train
b9e1742b2682c1fd5ad73e93eeda63f750f770b9
diff --git a/src/Charcoal/App/ServiceProvider/FilesystemServiceProvider.php b/src/Charcoal/App/ServiceProvider/FilesystemServiceProvider.php index <HASH>..<HASH> 100644 --- a/src/Charcoal/App/ServiceProvider/FilesystemServiceProvider.php +++ b/src/Charcoal/App/ServiceProvider/FilesystemServiceProvider.php @@ -230,17 +230,17 @@ class FilesystemServiceProvider implements ServiceProviderInterface { if (!$config['host']) { throw new InvalidArgumentException( - 'No host configured for FTP filesystem filesystem adapter.' + 'No host configured for FTP filesystem adapter.' ); } if (!$config['username']) { throw new InvalidArgumentException( - 'No username configured for FTP filesystem filesystem adapter.' + 'No username configured for FTP filesystem adapter.' ); } if (!$config['password']) { throw new InvalidArgumentException( - 'No password configured for FTP filesystem filesystem adapter.' + 'No password configured for FTP filesystem adapter.' ); } @@ -265,17 +265,17 @@ class FilesystemServiceProvider implements ServiceProviderInterface { if (!$config['host']) { throw new InvalidArgumentException( - 'No host configured for SFTP filesystem filesystem adapter.' + 'No host configured for SFTP filesystem adapter.' ); } if (!$config['username']) { throw new InvalidArgumentException( - 'No username configured for SFTP filesystem filesystem adapter.' + 'No username configured for SFTP filesystem adapter.' ); } if (!$config['password']) { throw new InvalidArgumentException( - 'No password configured for SFTP filesystem filesystem adapter.' + 'No password configured for SFTP filesystem adapter.' ); }
Fix exception messages in Filesystem service provider
locomotivemtl_charcoal-app
train
a9d25f94118fb4d148881dba4b4310fe81302471
diff --git a/sportsref/nba/boxscores.py b/sportsref/nba/boxscores.py index <HASH>..<HASH> 100644 --- a/sportsref/nba/boxscores.py +++ b/sportsref/nba/boxscores.py @@ -149,10 +149,7 @@ class BoxScore( :returns: pandas DataFrame of play-by-play. Similar to GPF. """ - try: - doc = self.get_subpage_doc('pbp') - except ValueError: - return pd.DataFrame() + doc = self.get_subpage_doc('pbp') table = doc('table#pbp') rows = [tr.children('td') for tr in table('tr').items() if tr('td')] data = []
nba.BoxScore.pbp raises ValueError when no pbp is found
mdgoldberg_sportsref
train
b12d9902f3159c98aecfb6fa2d877f755004bc70
diff --git a/parsedatetime/__init__.py b/parsedatetime/__init__.py index <HASH>..<HASH> 100644 --- a/parsedatetime/__init__.py +++ b/parsedatetime/__init__.py @@ -1079,7 +1079,7 @@ class Calendar: if not parsed """ s = datetimeString.strip() - now = time.localtime() + now = sourceTime or time.localtime() log.debug('_evalString(%s, %s)' % (datetimeString, sourceTime))
_evalString should use sourceTime as 'now' if set
bear_parsedatetime
train
cb30dd1d4caaba03f464cdf5751ef0336f8a6c33
diff --git a/gitlab/objects.py b/gitlab/objects.py index <HASH>..<HASH> 100644 --- a/gitlab/objects.py +++ b/gitlab/objects.py @@ -1379,8 +1379,7 @@ class ProjectMergeRequestNoteManager(BaseManager): class ProjectMergeRequest(GitlabObject): - _url = '/projects/%(project_id)s/merge_request' - _urlPlural = '/projects/%(project_id)s/merge_requests' + _url = '/projects/%(project_id)s/merge_requests' _constructorTypes = {'author': 'User', 'assignee': 'User'} requiredUrlAttrs = ['project_id'] requiredCreateAttrs = ['source_branch', 'target_branch', 'title']
Use the plural merge_requests URL everywhere This breaks compatibility with older gitlab versions but maintaining support for changed APIs is just too complex and time consuming. See issue #<I> if you need a workaround. Fixes #<I>
python-gitlab_python-gitlab
train
ede504400f583763f23500b4cd1c03367e63b371
diff --git a/cmd/xl-v1-metadata.go b/cmd/xl-v1-metadata.go index <HASH>..<HASH> 100644 --- a/cmd/xl-v1-metadata.go +++ b/cmd/xl-v1-metadata.go @@ -266,18 +266,25 @@ func newXLMetaV1(object string, dataBlocks, parityBlocks int) (xlMeta xlMetaV1) } // IsValid - tells if the format is sane by validating the version -// string and format style. +// string, format and erasure info fields. func (m xlMetaV1) IsValid() bool { - return isXLMetaValid(m.Version, m.Format) + return isXLMetaFormatValid(m.Version, m.Format) && + isXLMetaErasureInfoValid(m.Erasure.DataBlocks, m.Erasure.ParityBlocks) } // Verifies if the backend format metadata is sane by validating // the version string and format style. -func isXLMetaValid(version, format string) bool { +func isXLMetaFormatValid(version, format string) bool { return ((version == xlMetaVersion || version == xlMetaVersion100) && format == xlMetaFormat) } +// Verifies if the backend format metadata is sane by validating +// the ErasureInfo, i.e. data and parity blocks. +func isXLMetaErasureInfoValid(data, parity int) bool { + return ((data >= parity) && (data != 0) && (parity != 0)) +} + // Converts metadata to object info. func (m xlMetaV1) ToObjectInfo(bucket, object string) ObjectInfo { objInfo := ObjectInfo{ diff --git a/cmd/xl-v1-metadata_test.go b/cmd/xl-v1-metadata_test.go index <HASH>..<HASH> 100644 --- a/cmd/xl-v1-metadata_test.go +++ b/cmd/xl-v1-metadata_test.go @@ -368,3 +368,45 @@ func TestPickValidXLMeta(t *testing.T) { } } } + +func TestIsXLMetaFormatValid(t *testing.T) { + tests := []struct { + name int + version string + format string + want bool + }{ + {1, "123", "fs", false}, + {2, "123", xlMetaFormat, false}, + {3, xlMetaVersion, "test", false}, + {4, xlMetaVersion100, "hello", false}, + {5, xlMetaVersion, xlMetaFormat, true}, + {6, xlMetaVersion100, xlMetaFormat, true}, + } + for _, tt := range tests { + if got := isXLMetaFormatValid(tt.version, tt.format); got != tt.want { + t.Errorf("Test %d: Expected %v but received %v", tt.name, got, tt.want) + } + } +} + +func TestIsXLMetaErasureInfoValid(t *testing.T) { + tests := []struct { + name int + data int + parity int + want bool + }{ + {1, 5, 6, false}, + {2, 5, 5, true}, + {3, 0, 5, false}, + {4, 5, 0, false}, + {5, 5, 0, false}, + {6, 5, 4, true}, + } + for _, tt := range tests { + if got := isXLMetaErasureInfoValid(tt.data, tt.parity); got != tt.want { + t.Errorf("Test %d: Expected %v but received %v", tt.name, got, tt.want) + } + } +} diff --git a/cmd/xl-v1-utils.go b/cmd/xl-v1-utils.go index <HASH>..<HASH> 100644 --- a/cmd/xl-v1-utils.go +++ b/cmd/xl-v1-utils.go @@ -269,7 +269,7 @@ func readXLMetaStat(disk StorageAPI, bucket string, object string) (si statInfo, xlFormat := parseXLFormat(xlMetaBuf) // Validate if the xl.json we read is sane, return corrupted format. - if !isXLMetaValid(xlVersion, xlFormat) { + if !isXLMetaFormatValid(xlVersion, xlFormat) { // For version mismatchs and unrecognized format, return corrupted format. return si, nil, errors2.Trace(errCorruptedFormat) }
Add validation of xlMeta ErasureInfo field (#<I>)
minio_minio
train
f0d1f62f49de98d6c743f31f5319c840fc98c79b
diff --git a/tools/trial_tool/trial_keeper.py b/tools/trial_tool/trial_keeper.py index <HASH>..<HASH> 100644 --- a/tools/trial_tool/trial_keeper.py +++ b/tools/trial_tool/trial_keeper.py @@ -45,7 +45,6 @@ def main_loop(args): # Notice: We don't appoint env, which means subprocess wil inherit current environment and that is expected behavior process = Popen(args.trial_command, shell = True, stdout = stdout_file, stderr = stderr_file) print('Subprocess pid is {}'.format(process.pid)) - print('Current cwd is {}'.format(os.getcwd())) while True: retCode = process.poll() ## Read experiment metrics, to avoid missing metrics @@ -55,15 +54,15 @@ def main_loop(args): print('subprocess terminated. Exit code is {}. Quit'.format(retCode)) #copy local directory to hdfs nni_local_output_dir = os.environ['NNI_OUTPUT_DIR'] - hdfs_client = HdfsClient(hosts='{0}:{1}'.format(args.pai_hdfs_host, '50070'), user_name=args.pai_user_name) - print(nni_local_output_dir, args.pai_hdfs_output_dir) + hdfs_client = HdfsClient(hosts='{0}:{1}'.format(args.pai_hdfs_host, '50070'), user_name=args.pai_user_name, timeout=5) try: if copyDirectoryToHdfs(nni_local_output_dir, args.pai_hdfs_output_dir, hdfs_client): - print('copy directory success!') + print('copy directory from {0} to {1} success!'.format(nni_local_output_dir, args.pai_hdfs_output_dir)) else: - print('copy directory failed!') + print('copy directory from {0} to {1} failed!'.format(nni_local_output_dir, args.pai_hdfs_output_dir)) except Exception as exception: - print(exception) + print('HDFS copy directory got exception') + raise exception ## Exit as the retCode of subprocess(trial) exit(retCode) @@ -91,7 +90,10 @@ if __name__ == '__main__': try: main_loop(args) - except: - print('Exiting by user request') + except SystemExit as se: + print('NNI trial keeper exit with code {}'.format(se.code)) + sys.exit(se.code) + except Exception as e: + print('Exit trial keeper with code 1 because Exception: {} is catched'.format(str(e))) sys.exit(1)
Fix trial keeper wrongly exit issue (#<I>) * Fix trial keeper bug, use actual exitcode to exit rather than 1
Microsoft_nni
train
5e302ac720505e37ee45ad874c15d2a57deb8292
diff --git a/src/com/google/javascript/jscomp/DefaultPassConfig.java b/src/com/google/javascript/jscomp/DefaultPassConfig.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/DefaultPassConfig.java +++ b/src/com/google/javascript/jscomp/DefaultPassConfig.java @@ -279,6 +279,11 @@ public final class DefaultPassConfig extends PassConfig { checks.add(suspiciousCode); } + if (options.closurePass && options.checkMissingGetCssNameLevel.isOn() + && !options.skipNonTranspilationPasses) { + checks.add(closureCheckGetCssName); + } + // Late ES6 transpilation. // Includes ES6 features that are best handled natively by the compiler. // As we convert more passes to handle these features, we will be moving the transpilation @@ -307,10 +312,6 @@ public final class DefaultPassConfig extends PassConfig { // End of ES6 transpilation passes. - if (options.closurePass && options.checkMissingGetCssNameLevel.isOn()) { - checks.add(closureCheckGetCssName); - } - if (options.syntheticBlockStartMarker != null) { // This pass must run before the first fold constants pass. checks.add(createSyntheticBlocks);
Moving closureCheckGetCssName pass to before the later transpilation step ------------- Created by MOE: <URL>
google_closure-compiler
train
821aefa1f8b22986a43a584acf25353debcfc6a8
diff --git a/salt/netapi/__init__.py b/salt/netapi/__init__.py index <HASH>..<HASH> 100644 --- a/salt/netapi/__init__.py +++ b/salt/netapi/__init__.py @@ -117,7 +117,7 @@ class NetapiClient(object): ''' kwargs['fun'] = fun wheel = salt.wheel.WheelClient(self.opts) - return wheel.master_call(**kwargs) + return wheel.cmd_sync(kwargs) def wheel_async(self, fun, **kwargs): ''' diff --git a/salt/wheel/__init__.py b/salt/wheel/__init__.py index <HASH>..<HASH> 100644 --- a/salt/wheel/__init__.py +++ b/salt/wheel/__init__.py @@ -98,26 +98,7 @@ class WheelClient(mixins.SyncClientMixin, mixins.AsyncClientMixin, object): }) {'minions': {'jerry': '5d:f6:79:43:5e:d4:42:3f:57:b8:45:a8:7e:a4:6e:ca'}} ''' - sevent = salt.utils.event.get_event('master', - self.opts['sock_dir'], - self.opts['transport'], - opts=self.opts) - job = self.master_call(**low) - ret_tag = tagify('ret', base=job['tag']) - - timelimit = time.time() + (timeout or 300) - while True: - ret = sevent.get_event(full=True) - if ret is None: - continue - - if ret['tag'] == ret_tag: - return ret['data']['return'] - - if time.time() > timelimit: - raise salt.exceptions.SaltClientTimeout( - "WheelClient job '%s' timed out", job['jid'], - jid=job['jid']) + return self.master_call(**low) def cmd_async(self, low): '''
Fix WheelClient().cmd_sync() WheelClient defaults to sync behavior so we just need to wrap master_call here.
saltstack_salt
train
82c53beaecdd8e3dcaca469b0edfcc55bfafce68
diff --git a/src/Slide.js b/src/Slide.js index <HASH>..<HASH> 100644 --- a/src/Slide.js +++ b/src/Slide.js @@ -24,6 +24,8 @@ export default function Slide ({ ) } +Slide._isReactDynamicSwiperSlide = true + Slide.propTypes = { onActive: PropTypes.func, children: PropTypes.node, diff --git a/src/Swiper.js b/src/Swiper.js index <HASH>..<HASH> 100644 --- a/src/Swiper.js +++ b/src/Swiper.js @@ -151,7 +151,7 @@ export default class Swiper extends Component { _getSlideChildren (children) { children = children || this.props.children return Children.toArray(children) - .filter(child => child.type === Slide) + .filter(child => child.type && child.type._isReactDynamicSwiperSlide) } /**
fix incorrect reference comparing in case of dev mode build
nickpisacane_react-dynamic-swiper
train
1b3ee0e1948672f31daa0237553ea8ac104aa718
diff --git a/test/lib/examples/galena/tissue/migration/annotation_test.rb b/test/lib/examples/galena/tissue/migration/annotation_test.rb index <HASH>..<HASH> 100644 --- a/test/lib/examples/galena/tissue/migration/annotation_test.rb +++ b/test/lib/examples/galena/tissue/migration/annotation_test.rb @@ -7,14 +7,16 @@ module Galena def test_target verify_target(:annotation, :target => CaTissue::SpecimenCollectionGroup) do |scg| - pth = scg.pathology.radical_prostatectomy_pathology_annotations.first - assert_not_nil(pth, "Missing #{scg} annotation") - assert_not_nil(pth.comment, "Missing #{pth} comments") - gls = pth.gleason_score - assert_not_nil(pth, "Missing #{pth} gleason score") + pth = scg.pathology.first + assert_not_nil(pth, "Missing #{scg} pathology annotation proxy") + pst = pth.radical_prostatectomy_pathology_annotations.first + assert_not_nil(pst, "Missing #{scg} prostate annotation") + assert_not_nil(pst.comment, "Missing #{pst} comments") + gls = pst.gleason_score + assert_not_nil(pst, "Missing #{pst} gleason score") assert_equal('3', gls.primary_pattern_score, "Gleason score incorrect") - grd = pth.histologic_grades.first - assert_not_nil(grd, "Missing #{pth} grade") + grd = pst.histologic_grades.first + assert_not_nil(grd, "Missing #{pst} grade") assert_equal('2', grd.grade, "Grade incorrect") end end
Hook -> annotation proxy is 1:M association.
caruby_tissue
train
d243cf9d5aa66323694fd948eaa1a2f2837c4631
diff --git a/src/Handler/CurlFactory.php b/src/Handler/CurlFactory.php index <HASH>..<HASH> 100644 --- a/src/Handler/CurlFactory.php +++ b/src/Handler/CurlFactory.php @@ -378,8 +378,9 @@ class CurlFactory implements CurlFactoryInterface $conf[CURLOPT_FILE] = fopen('php://temp', 'w+'); $easy->sink = Psr7\stream_for($conf[CURLOPT_FILE]); } - + $timeoutRequiresNoSignal = false; if (isset($options['timeout'])) { + $timeoutRequiresNoSignal |= $options['timeout'] < 1; $conf[CURLOPT_TIMEOUT_MS] = $options['timeout'] * 1000; } @@ -393,9 +394,14 @@ class CurlFactory implements CurlFactoryInterface } if (isset($options['connect_timeout'])) { + $timeoutRequiresNoSignal |= $options['connect_timeout'] < 1; $conf[CURLOPT_CONNECTTIMEOUT_MS] = $options['connect_timeout'] * 1000; } + if ($timeoutRequiresNoSignal && strtoupper(substr(PHP_OS, 0, 3)) !== 'WIN') { + $conf[CURLOPT_NOSIGNAL] = true; + } + if (isset($options['proxy'])) { if (!is_array($options['proxy'])) { $conf[CURLOPT_PROXY] = $options['proxy'];
fix when curl set time less than one second causes libcurl to timeout… (#<I>)
guzzle_guzzle
train
d74ac90ee7550d6f007773f5bd3dcb5caaea1ddf
diff --git a/lib/prey/agent.js b/lib/prey/agent.js index <HASH>..<HASH> 100644 --- a/lib/prey/agent.js +++ b/lib/prey/agent.js @@ -249,7 +249,7 @@ var Agent = self = { var set_new_delay = function(delay){ - common.os.set_new_delay(delay, function(err){ + common.os.set_new_delay(parseInt(delay), function(err){ if(err) self.log_error("Delay not set: " + err.toString()); else logger.info("[agent] Delay succesfully set."); }); @@ -261,21 +261,21 @@ var Agent = self = { if(!current_delay.value) logger.warn("[agent] No delay found! First time you run Prey as " + self.running_as + "?") else - logger.debug("[agent] Current delay: " + current_delay.value + ", requested: " + requested_delay); - - // if we get a valid integer (eg. device marked as missing) - // set the execution delay to every X minutes - if(current_delay.value > 0){ + logger.info("[agent] Current delay: " + current_delay.value + ", requested: " + requested_delay); + + // if current delay is every 60 minutes + if(current_delay.one_hour){ + + // and a lower one was requested, set it + if(requested_delay < 60) + set_new_delay(requested_delay) + + } else { // if current delay is not every 60 min - // if new delay is different to current one, change - if(parseInt(current_delay.value) != parseInt(requested_delay)) + // and no delay is set or requested delay is different, set it + if(!current_delay.value || parseInt(current_delay.value) != requested_delay) set_new_delay(requested_delay); - - // if device is not missing, then make sure it is set to a random minute every one hour - } else if(parseInt(current_delay.value) == NaN || !current_delay.one_hour){ - - set_new_delay(60); - + } }); diff --git a/lib/prey/plugins/drivers/control-panel/index.js b/lib/prey/plugins/drivers/control-panel/index.js index <HASH>..<HASH> 100644 --- a/lib/prey/plugins/drivers/control-panel/index.js +++ b/lib/prey/plugins/drivers/control-panel/index.js @@ -182,8 +182,9 @@ var ControlPanelDriver = function(options){ // var status_msg = this.marked_as_missing() ? "HOLY SHMOLY, DEVICE IS MISSING!" : "Device not missing. Sweet."; // logger.info(status_msg, 'bold'); - if(requested.delay && requested.delay > 0) - this.emit('set', 'delay', this.marked_as_missing(requested) ? requested.delay : 0) + // if device is missing, set delay to requested delay, otherwise every 60 minutes + if(requested.delay && parseInt(requested.delay) > 0) + this.emit('set', 'delay', this.marked_as_missing(requested) ? requested.delay : 60) // if(requested.settings && Object.keys(requested.settings).length > 0) for(setting in requested.settings)
Simpler logic for checking and setting new delay.
prey_prey-node-client
train
45ca9ea304fe439e48f4c1185f7a139723effd95
diff --git a/bestiary_test.go b/bestiary_test.go index <HASH>..<HASH> 100644 --- a/bestiary_test.go +++ b/bestiary_test.go @@ -441,6 +441,20 @@ var fixtures = []fixture{ maxAttempts: 4, }, { + n: "locked atoms are matched on both local and net name", + ds: []depspec{ + dsv("root 0.0.0", "foo *"), + dsv("foo 1.0.0 foorev"), + dsv("foo 2.0.0 foorev2"), + }, + l: mklock( + "foo from baz 1.0.0 foorev", + ), + r: mkresults( + "foo 2.0.0 foorev2", + ), + }, + { n: "includes root package's dev dependencies", ds: []depspec{ dsv("root 1.0.0", "(dev) foo 1.0.0", "(dev) bar 1.0.0"), diff --git a/lock.go b/lock.go index <HASH>..<HASH> 100644 --- a/lock.go +++ b/lock.go @@ -87,9 +87,18 @@ func NewLockedProject(n ProjectName, v Version, uri, path string) LockedProject return lp } -// Name returns the name of the locked project. -func (lp LockedProject) Name() ProjectName { - return lp.n +// Ident returns the identifier describing the project. This includes both the +// local name (the root name by which the project is referenced in import paths) +// and the network name, where the upstream source lives. +func (lp LockedProject) Ident() ProjectIdentifier { + id := ProjectIdentifier{ + LocalName: lp.n, + NetworkName: lp.uri, + } + + // Keep things sane for things like map keys by ensuring the NetworkName is + // always set, even if it's the same as the LocalName. + return id.normalize() } // Version assembles together whatever version and/or revision data is @@ -106,11 +115,6 @@ func (lp LockedProject) Version() Version { return lp.v.Is(lp.r) } -// URI returns the upstream URI of the locked project. -func (lp LockedProject) URI() string { - return lp.uri -} - // Path returns the path relative to the vendor directory to which the locked // project should be checked out. func (lp LockedProject) Path() string { @@ -119,10 +123,7 @@ func (lp LockedProject) Path() string { func (lp LockedProject) toAtom() ProjectAtom { pa := ProjectAtom{ - Ident: ProjectIdentifier{ - LocalName: lp.n, - NetworkName: lp.uri, - }, + Ident: lp.Ident(), } if lp.v == nil { diff --git a/solver.go b/solver.go index <HASH>..<HASH> 100644 --- a/solver.go +++ b/solver.go @@ -107,7 +107,7 @@ type solver struct { // the network name to which they currently correspond. names map[ProjectName]string // A map of the names listed in the root's lock. - rlm map[ProjectName]LockedProject + rlm map[ProjectIdentifier]LockedProject } // Solve attempts to find a dependency solution for the given project, as @@ -144,12 +144,12 @@ func (s *solver) Solve(opts SolveOpts) (Result, error) { // Initialize maps s.chng = make(map[ProjectName]struct{}) - s.rlm = make(map[ProjectName]LockedProject) + s.rlm = make(map[ProjectIdentifier]LockedProject) s.names = make(map[ProjectName]string) if s.o.L != nil { for _, lp := range s.o.L.Projects() { - s.rlm[lp.n] = lp + s.rlm[lp.Ident()] = lp } } @@ -424,8 +424,7 @@ func (s *solver) getLockVersionIfValid(id ProjectIdentifier) (ProjectAtom, error } } - // TODO need to make rlm operate on the full ProjectIdentifier - lp, exists := s.rlm[id.LocalName] + lp, exists := s.rlm[id] if !exists { if s.l.Level >= logrus.DebugLevel { s.l.WithField("name", id).Debug("Project not present in lock") @@ -614,8 +613,8 @@ func (s *solver) unselectedComparator(i, j int) bool { return false } - _, ilock := s.rlm[iname.LocalName] - _, jlock := s.rlm[jname.LocalName] + _, ilock := s.rlm[iname] + _, jlock := s.rlm[jname] switch { case ilock && !jlock: diff --git a/types.go b/types.go index <HASH>..<HASH> 100644 --- a/types.go +++ b/types.go @@ -48,6 +48,14 @@ func (i ProjectIdentifier) errString() string { return fmt.Sprintf("%s (from %s)", i.LocalName, i.NetworkName) } +func (i ProjectIdentifier) normalize() ProjectIdentifier { + if i.NetworkName == "" { + i.NetworkName = string(i.LocalName) + } + + return i +} + type ProjectName string type ProjectAtom struct {
Use full ProjectIdentifier in root lock map
sdboyer_gps
train
acc657a9a3b0a639bb55b29d9945c89337abff0d
diff --git a/src/combineReducers.js b/src/combineReducers.js index <HASH>..<HASH> 100644 --- a/src/combineReducers.js +++ b/src/combineReducers.js @@ -17,14 +17,14 @@ export default function combineReducersWithReduxables(reducers) { const newReducers = {} Object.keys(reducers).forEach(key => { const reducer = reducers[key] - if (reducer.setScope) { - reducer.setScope(key) - } - - if (reducer.getReducer) { - newReducers[key] = reducer.getReducer() - } else { + if (typeof reducer === 'function') { newReducers[key] = reducer + } else { + newReducers[key] = reducer.getReducer() + + if (reducer.setScope) { + reducer.setScope(key) + } } })
Modify combineReducer to default to Reduxable
underscopeio_reduxable
train
de3db6fb199420bea8f36caff9faa6b2befc26bf
diff --git a/i3pystatus/core/__init__.py b/i3pystatus/core/__init__.py index <HASH>..<HASH> 100644 --- a/i3pystatus/core/__init__.py +++ b/i3pystatus/core/__init__.py @@ -42,11 +42,11 @@ class Status: :param input_stream: A file-like object that provides the input stream, if `standalone` is False. """ - def __init__(self, standalone=False, interval=1, input_stream=sys.stdin): + def __init__(self, standalone=False, interval=1, input_stream=sys.stdin, click_events=True): self.modules = util.ModuleList(self, ClassFinder(Module)) self.standalone = standalone if standalone: - self.io = io.StandaloneIO(interval) + self.io = io.StandaloneIO(click_events, interval) self.command_endpoint = CommandEndpoint( self.modules, lambda: io.JSONIO(io=io.IOHandler(sys.stdin, open(os.devnull, "w")), skiplines=1)) diff --git a/i3pystatus/core/io.py b/i3pystatus/core/io.py index <HASH>..<HASH> 100644 --- a/i3pystatus/core/io.py +++ b/i3pystatus/core/io.py @@ -51,11 +51,14 @@ class StandaloneIO(IOHandler): """ n = -1 - proto = ('{"version":1,"click_events":true}', "[", "[]", ",[]",) + proto = [{"version":1,"click_events":True}, "[", "[]", ",[]",] - def __init__(self, interval=1): + def __init__(self, click_events, interval=1): super().__init__() self.interval = interval + self.proto[0]['click_events'] = click_events + self.proto[0] = json.dumps(self.proto[0]) + def read(self): while True:
added option for disabeling click events
enkore_i3pystatus
train
1e4de986a8804aa620a001f4e04c9d4755e9d6b2
diff --git a/pre_commit/clientlib.py b/pre_commit/clientlib.py index <HASH>..<HASH> 100644 --- a/pre_commit/clientlib.py +++ b/pre_commit/clientlib.py @@ -1,6 +1,7 @@ import argparse import functools import logging +import re import shlex import sys from typing import Any @@ -112,6 +113,25 @@ LOCAL = 'local' META = 'meta' +# should inherit from cfgv.Conditional if sha support is dropped +class WarnMutableRev(cfgv.ConditionalOptional): + def check(self, dct: Dict[str, Any]) -> None: + super().check(dct) + + if self.key in dct: + rev = dct[self.key] + + if '.' not in rev and not re.match(r'^[a-fA-F0-9]+$', rev): + logger.warning( + f'The {self.key!r} field of repo {dct["repo"]!r} ' + f'appears to be a mutable reference ' + f'(moving tag / branch). Mutable references are never ' + f'updated after first install and are not supported. ' + f'See https://pre-commit.com/#using-the-latest-version-for-a-repository ' # noqa: E501 + f'for more details.', + ) + + class OptionalSensibleRegex(cfgv.OptionalNoDefault): def check(self, dct: Dict[str, Any]) -> None: super().check(dct) @@ -261,6 +281,14 @@ CONFIG_REPO_DICT = cfgv.Map( ), MigrateShaToRev(), + WarnMutableRev( + 'rev', + cfgv.check_string, + '', + 'repo', + cfgv.NotIn(LOCAL, META), + True, + ), cfgv.WarnAdditionalKeys(('repo', 'rev', 'hooks'), warn_unknown_keys_repo), ) DEFAULT_LANGUAGE_VERSION = cfgv.Map( diff --git a/tests/clientlib_test.py b/tests/clientlib_test.py index <HASH>..<HASH> 100644 --- a/tests/clientlib_test.py +++ b/tests/clientlib_test.py @@ -180,6 +180,70 @@ def test_ci_key_must_be_map(): cfgv.validate({'ci': 'invalid', 'repos': []}, CONFIG_SCHEMA) [email protected]( + 'rev', + ( + 'v0.12.4', + 'b27f281', + 'b27f281eb9398fc8504415d7fbdabf119ea8c5e1', + '19.10b0', + '4.3.21-2', + ), +) +def test_warn_mutable_rev_ok(caplog, rev): + config_obj = { + 'repo': 'https://gitlab.com/pycqa/flake8', + 'rev': rev, + 'hooks': [{'id': 'flake8'}], + } + cfgv.validate(config_obj, CONFIG_REPO_DICT) + + assert caplog.record_tuples == [] + + [email protected]( + 'rev', + ( + '', + 'HEAD', + 'stable', + 'master', + 'some_branch_name', + ), +) +def test_warn_mutable_rev_invalid(caplog, rev): + config_obj = { + 'repo': 'https://gitlab.com/pycqa/flake8', + 'rev': rev, + 'hooks': [{'id': 'flake8'}], + } + cfgv.validate(config_obj, CONFIG_REPO_DICT) + + assert caplog.record_tuples == [ + ( + 'pre_commit', + logging.WARNING, + "The 'rev' field of repo 'https://gitlab.com/pycqa/flake8' " + 'appears to be a mutable reference (moving tag / branch). ' + 'Mutable references are never updated after first install and are ' + 'not supported. ' + 'See https://pre-commit.com/#using-the-latest-version-for-a-repository ' # noqa: E501 + 'for more details.', + ), + ] + + +def test_warn_mutable_rev_conditional(): + config_obj = { + 'repo': 'meta', + 'rev': '3.7.7', + 'hooks': [{'id': 'flake8'}], + } + + with pytest.raises(cfgv.ValidationError): + cfgv.validate(config_obj, CONFIG_REPO_DICT) + + def test_validate_optional_sensible_regex(caplog): config_obj = { 'id': 'flake8',
added warning if mutable rev is used
pre-commit_pre-commit
train
34cec183eb031ad907554c9a8e0da8653c0f5f17
diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index <HASH>..<HASH> 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -21,12 +21,6 @@ Metrics/CyclomaticComplexity: Metrics/PerceivedComplexity: Max: 9 # Goal: 7 -# Offense count: 1 -# Cop supports --auto-correct. -Rails/ApplicationController: - Exclude: - - 'spec/dummy_app/app/controllers/test_controller.rb' - # Offense count: 56 # Cop supports --auto-correct. Rails/ApplicationRecord: diff --git a/spec/dummy_app/app/controllers/application_controller.rb b/spec/dummy_app/app/controllers/application_controller.rb index <HASH>..<HASH> 100644 --- a/spec/dummy_app/app/controllers/application_controller.rb +++ b/spec/dummy_app/app/controllers/application_controller.rb @@ -9,7 +9,7 @@ class ApplicationController < ActionController::Base before_action :modify_current_user # PT used to add this callback automatically. Now people are required to add - # it themsevles, like this, allowing them to control the order of callbacks. + # it themselves, like this, allowing them to control the order of callbacks. # The `modify_current_user` callback above shows why this control is useful. before_action :set_paper_trail_whodunnit diff --git a/spec/dummy_app/app/controllers/test_controller.rb b/spec/dummy_app/app/controllers/test_controller.rb index <HASH>..<HASH> 100644 --- a/spec/dummy_app/app/controllers/test_controller.rb +++ b/spec/dummy_app/app/controllers/test_controller.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -class TestController < ActionController::Base +class TestController < ApplicationController def user_for_paper_trail Thread.current.object_id end
Rails/ApplicationController
paper-trail-gem_paper_trail
train
3bc18cda548ea257d21d94477af95d895ca6f65a
diff --git a/adapters/epics.py b/adapters/epics.py index <HASH>..<HASH> 100644 --- a/adapters/epics.py +++ b/adapters/epics.py @@ -107,6 +107,6 @@ class EpicsAdapter(Adapter): count += 1 timer += delta if timer >= 1.0: - #print("Running at %d cycles per second (%.3f ms per cycle)" % (count, 1000.0 / count)) + print("Running at %d cycles per second (%.3f ms per cycle)" % (count, 1000.0 / count)) count = 0 timer = 0.0
Revert commenting out print statement from epics adapter
DMSC-Instrument-Data_lewis
train
a69b76a972735bad59b03146ec911953c229d9c2
diff --git a/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java b/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java index <HASH>..<HASH> 100644 --- a/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java +++ b/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java @@ -405,7 +405,8 @@ public class RemoteWebDriver implements WebDriver, JavascriptExecutor, Object value = converter.apply(response.getValue()); response.setValue(value); } catch (Exception e) { - throw new WebDriverException(e); + throw new WebDriverException("Error communicating with the remote browser. " + + "It may have died.", e); } return errorHandler.throwIfResponseFailed(response, System.currentTimeMillis() - start); diff --git a/java/client/test/org/openqa/selenium/firefox/FirefoxDriverTest.java b/java/client/test/org/openqa/selenium/firefox/FirefoxDriverTest.java index <HASH>..<HASH> 100644 --- a/java/client/test/org/openqa/selenium/firefox/FirefoxDriverTest.java +++ b/java/client/test/org/openqa/selenium/firefox/FirefoxDriverTest.java @@ -20,6 +20,7 @@ package org.openqa.selenium.firefox; import static java.lang.Thread.sleep; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.openqa.selenium.Ignore.Driver.FIREFOX; @@ -70,6 +71,36 @@ public class FirefoxDriverTest extends AbstractDriverTestCase { driver.get(pages.xhtmlTestPage); } + private static class ConnectionCapturingDriver extends FirefoxDriver { + public ExtensionConnection keptConnection; + + public ConnectionCapturingDriver() { + super(); + } + + @Override + protected ExtensionConnection connectTo(FirefoxBinary binary, FirefoxProfile profile, String host) { + this.keptConnection = super.connectTo(binary, profile, host); + + return keptConnection; + } + } + + public void testShouldGetMeaningfulExceptionOnBrowserDeath() { + ConnectionCapturingDriver driver2 = new ConnectionCapturingDriver(); + driver2.get(pages.formPage); + + try { + driver2.keptConnection.quit(); + driver2.get(pages.formPage); + fail("Should have thrown."); + } catch (WebDriverException e) { + assertThat("Must contain descriptive error", e.getMessage(), + containsString("Error communicating with the remote browser")); + } + } + + @NeedsFreshDriver @Ignore(value = FIREFOX, reason = "Need to figure out how to open a new browser instance mid-test") public void testShouldWaitUntilBrowserHasClosedProperly() throws Exception {
EranMes: More informative error message when the remote webdriver client fails to communicate with the browser. r<I>
SeleniumHQ_selenium
train
1c305e6d1d1a83b3ce47a2dc1392edc20c884b57
diff --git a/zxb.py b/zxb.py index <HASH>..<HASH> 100755 --- a/zxb.py +++ b/zxb.py @@ -344,5 +344,4 @@ def main(argv): return 0 # Exit success -if __name__ == '__main__': - sys.exit(main(sys.argv)) # Exit +sys.exit(main(sys.argv)) # Exit diff --git a/zxbpp.py b/zxbpp.py index <HASH>..<HASH> 100755 --- a/zxbpp.py +++ b/zxbpp.py @@ -700,7 +700,9 @@ ID_TABLE = DefinesTable() # ------- ERROR And Warning messages ---------------- - -if __name__ == '__main__': +def entry_point(): main(sys.argv[1:]) OPTIONS.stdout.value.write(OUTPUT) + +if __name__ == '__main__': + entry_point()
Uses a single entry point for setup.py To correctly create a python package, this module must have a callable function (later specified as zxbpp:entrypoint in the setup.py file).
boriel_zxbasic
train
203bba5e59f33104ae89322bee94dd7b13d566c8
diff --git a/tools/image_export.py b/tools/image_export.py index <HASH>..<HASH> 100755 --- a/tools/image_export.py +++ b/tools/image_export.py @@ -56,6 +56,11 @@ def Main(): try: tool.ProcessSources() + # Writing to stdout and stderr will raise BrokenPipeError if it + # receives a SIGPIPE. + except BrokenPipeError: + pass + except (KeyboardInterrupt, errors.UserAbort): logging.warning('Aborted by user.') return False diff --git a/tools/log2timeline.py b/tools/log2timeline.py index <HASH>..<HASH> 100755 --- a/tools/log2timeline.py +++ b/tools/log2timeline.py @@ -71,6 +71,11 @@ def Main(): try: tool.ExtractEventsFromSources() + # Writing to stdout and stderr will raise BrokenPipeError if it + # receives a SIGPIPE. + except BrokenPipeError: + pass + except (KeyboardInterrupt, errors.UserAbort): logging.warning('Aborted by user.') return False diff --git a/tools/pinfo.py b/tools/pinfo.py index <HASH>..<HASH> 100755 --- a/tools/pinfo.py +++ b/tools/pinfo.py @@ -49,6 +49,15 @@ def Main(): else: tool.PrintStorageInformation() + # Writing to stdout and stderr will raise BrokenPipeError if it + # receives a SIGPIPE. + except BrokenPipeError: + pass + + except (KeyboardInterrupt, errors.UserAbort): + logging.warning('Aborted by user.') + return False + except errors.BadConfigOption as exception: logging.warning(exception) return False diff --git a/tools/psort.py b/tools/psort.py index <HASH>..<HASH> 100755 --- a/tools/psort.py +++ b/tools/psort.py @@ -71,6 +71,11 @@ def Main(): try: tool.ProcessStorage() + # Writing to stdout and stderr will raise BrokenPipeError if it + # receives a SIGPIPE. + except BrokenPipeError: + pass + except (KeyboardInterrupt, errors.UserAbort): logging.warning('Aborted by user.') return False diff --git a/tools/psteal.py b/tools/psteal.py index <HASH>..<HASH> 100755 --- a/tools/psteal.py +++ b/tools/psteal.py @@ -86,6 +86,11 @@ def Main(): tool.ExtractEventsFromSources() tool.AnalyzeEvents() + # Writing to stdout and stderr will raise BrokenPipeError if it + # receives a SIGPIPE. + except BrokenPipeError: + pass + except (KeyboardInterrupt, errors.UserAbort): logging.warning('Aborted by user.') return False
Changes to have tools handle SIGPIPE #<I> (#<I>)
log2timeline_plaso
train
94ce3c07d302bc693fce74f0198ca81333ba9c81
diff --git a/tests/Aws/Tests/Common/Integration/ClientIntegrationTest.php b/tests/Aws/Tests/Common/Integration/ClientIntegrationTest.php index <HASH>..<HASH> 100644 --- a/tests/Aws/Tests/Common/Integration/ClientIntegrationTest.php +++ b/tests/Aws/Tests/Common/Integration/ClientIntegrationTest.php @@ -70,4 +70,19 @@ class IntegrationTest extends \Aws\Tests\IntegrationTestCase $this->assertEquals(200, $command->getResponse()->getStatusCode()); } } + + public function testCanInstantiateRegionlessClientsWithoutParameters() + { + $config = array('key' => 'foo', 'secret' => 'bar'); + + try { + // Instantiate all of the clients that do not require a region + \Aws\S3\S3Client::factory($config); + \Aws\CloudFront\CloudFrontClient::factory($config); + \Aws\Route53\Route53Client::factory($config); + \Aws\Sts\StsClient::factory($config); + } catch (\InvalidArgumentException $e) { + $this->fail('All of the above clients should have been instantiated without errors.'); + } + } }
Added integration tests to make sure regionless clients do not require region.
aws_aws-sdk-php
train
67cf6ea0edf270cd289292bb5ef25ca311e5f004
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -65,6 +65,13 @@ class ReactF1 extends React.Component { this.f1.update(); } + if(props.targets){ + this.f1.targets(props.targets); + // force an update to f1 since we received new props + this.f1.update(); + } + + if (props.go) { if (this.f1) { this.f1.go(props.go, props.onComplete);
Adding an ability to update the target after initialization
Jam3_react-f1
train
6bc2b2f6c028e7b60006f73813a8cbc31f95ee9a
diff --git a/.gitignore b/.gitignore index <HASH>..<HASH> 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ *.pyc +.DS_Store /build/ /dist/ -/sniffer.egg-info/ \ No newline at end of file +/sniffer.egg-info/ diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index <HASH>..<HASH> 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -2,3 +2,5 @@ Jeff Hui Patrice Neff Andrew Dunham Will Harris +Martin Griffin +Hansel Dunlop diff --git a/sniffer/metadata.py b/sniffer/metadata.py index <HASH>..<HASH> 100644 --- a/sniffer/metadata.py +++ b/sniffer/metadata.py @@ -6,15 +6,17 @@ __all__ = [ __author__ = "Jeff Hui" __author_email__ = "[email protected]" -__copyright__ = "Copyright 2013, Jeff Hui" +__copyright__ = "Copyright 2015, Jeff Hui" __credits__ = [ "Jeff Hui", "Patrice Neff", "Andrew Lee", "Will Harris", "Jonas Tingeborn", - "Sebastian Wagner" + "Sebastian Wagner", + "Martin Griffin", + "Hansel Dunlop", ] __license__ = "MIT" -__version__ = "0.3.3" +__version__ = "0.3.4" diff --git a/sniffer/tests/test_api.py b/sniffer/tests/test_api.py index <HASH>..<HASH> 100644 --- a/sniffer/tests/test_api.py +++ b/sniffer/tests/test_api.py @@ -13,6 +13,3 @@ class SelectRunnableDecorator(TestCase): self.assertEqual(validator.runnable, 'tagged') - - - diff --git a/sniffer/tests/test_scent_module.py b/sniffer/tests/test_scent_module.py index <HASH>..<HASH> 100644 --- a/sniffer/tests/test_scent_module.py +++ b/sniffer/tests/test_scent_module.py @@ -26,8 +26,3 @@ class ScentModuleTest(TestCase): self.assertTrue(scanner.is_valid_type('file.type2')) self.assertFalse(scanner.is_valid_type('file.negative')) - - - - -
Updated contributors. Set version to <I>. Added .DS_Store to git ignore.
jeffh_sniffer
train
25f188b1d5c2474740166e249886c832a39e8617
diff --git a/cerberus/cerberus.py b/cerberus/cerberus.py index <HASH>..<HASH> 100644 --- a/cerberus/cerberus.py +++ b/cerberus/cerberus.py @@ -195,10 +195,13 @@ class Validator(object): raise ValidationError(errors.ERROR_DOCUMENT_FORMAT % str(document)) # make root document available for validators (Cerberus #42, Eve #295) - if context is not None: - self.document = copy.copy(context) - else: - self.document = copy.copy(document) + target = context if context is not None else document + try: + # might fail when dealing with complex document values + self.document = copy.deepcopy(target) + except: + # fallback on a shallow copy + self.document = copy.copy(target) for field, value in document.items(): if self.ignore_none_values and value is None:
Adopt a 'deepcopy first' approach. Always attempt a deepcopy first. If it fails, fallback on a shallow copy.
pyeve_cerberus
train
5270932dfd3d7772532305d3d13f51b25a2863d5
diff --git a/Controller/AdminController.php b/Controller/AdminController.php index <HASH>..<HASH> 100644 --- a/Controller/AdminController.php +++ b/Controller/AdminController.php @@ -288,7 +288,7 @@ class AdminController extends Controller throw new \Exception(sprintf('It\'s not possible to toggle the value of the "%s" boolean property of the "%s" entity.', $propertyName, $this->entity['name'])); } - $newValue = $this->request->query->getBoolean('newValue'); + $newValue = (bool) $this->request->query->get('newValue'); if (null !== $setter = $propertyMetadata['setter']) { $entity->{$setter}($newValue); } else {
Don't use getBoolean() because it was introduced in Symfony <I>
EasyCorp_EasyAdminBundle
train
d05242b502b56e1be6952b8e5bb04daf9ad02cec
diff --git a/graylog2-server/src/main/java/org/graylog/security/shares/GranteeSharesService.java b/graylog2-server/src/main/java/org/graylog/security/shares/GranteeSharesService.java index <HASH>..<HASH> 100644 --- a/graylog2-server/src/main/java/org/graylog/security/shares/GranteeSharesService.java +++ b/graylog2-server/src/main/java/org/graylog/security/shares/GranteeSharesService.java @@ -19,6 +19,7 @@ package org.graylog.security.shares; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; +import com.google.common.primitives.Ints; import org.graylog.grn.GRN; import org.graylog.grn.GRNDescriptor; import org.graylog.grn.GRNDescriptorService; @@ -41,7 +42,9 @@ import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; +import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import static com.google.common.base.MoreObjects.firstNonNull; @@ -69,7 +72,7 @@ public class GranteeSharesService { final Map<GRN, Set<EntityDescriptor.Owner>> targetOwners = getTargetOwners(targets); - final List<EntityDescriptor> entityDescriptors = targets.stream() + final Supplier<Stream<EntityDescriptor>> filteredStream = () -> targets.stream() .map(descriptorService::getDescriptor) .filter(queryPredicate(paginationParameters)) .filter(entityTypeFilterPredicate(entityTypeFilterString)) @@ -79,7 +82,11 @@ public class GranteeSharesService { return t2.compareTo(t1); } return t1.compareTo(t2); - })) + })); + + final int filteredResultCount = Ints.saturatedCast(filteredStream.get().count()); + + final List<EntityDescriptor> entityDescriptors = filteredStream.get() .skip(paginationParameters.getPerPage() * (paginationParameters.getPage() - 1)) .limit(paginationParameters.getPerPage()) .collect(Collectors.toList()); @@ -94,9 +101,10 @@ public class GranteeSharesService { final PaginatedList<EntityDescriptor> paginatedList = new PaginatedList<>( entityDescriptors, - targets.size(), + filteredResultCount, paginationParameters.getPage(), - paginationParameters.getPerPage() + paginationParameters.getPerPage(), + (long) targets.size() ); return SharesResponse.create(paginatedList, granteeCapabilities); diff --git a/graylog2-server/src/main/java/org/graylog2/database/PaginatedList.java b/graylog2-server/src/main/java/org/graylog2/database/PaginatedList.java index <HASH>..<HASH> 100644 --- a/graylog2-server/src/main/java/org/graylog2/database/PaginatedList.java +++ b/graylog2-server/src/main/java/org/graylog2/database/PaginatedList.java @@ -37,10 +37,25 @@ public class PaginatedList<E> extends ForwardingList<E> { private final Long grandTotal; + /** + * Creates a PaginatedList + * @param delegate the actual entries + * @param total the count of all entries (ignoring pagination) + * @param page the page this PaginatedList represents + * @param perPage the size limit for each page + */ public PaginatedList(@Nonnull List<E> delegate, int total, int page, int perPage) { this(delegate, total, page, perPage, null); } + /** + * Creates a PaginatedList + * @param delegate the actual entries + * @param total the count of all entries (ignoring pagination) + * @param page the page this PaginatedList represents + * @param perPage the size limit for each page + * @param grandTotal the count of all entries (ignoring query filters and pagination) + */ public PaginatedList(@Nonnull List<E> delegate, int total, int page, int perPage, Long grandTotal) { this.delegate = delegate; this.paginationInfo = PaginationInfo.create(total, delegate.size(), page, perPage); diff --git a/graylog2-server/src/test/java/org/graylog/security/shares/GranteeSharesServiceTest.java b/graylog2-server/src/test/java/org/graylog/security/shares/GranteeSharesServiceTest.java index <HASH>..<HASH> 100644 --- a/graylog2-server/src/test/java/org/graylog/security/shares/GranteeSharesServiceTest.java +++ b/graylog2-server/src/test/java/org/graylog/security/shares/GranteeSharesServiceTest.java @@ -168,6 +168,8 @@ class GranteeSharesServiceTest { assertThat(response.capabilities()).doesNotContainKey(dashboard0); assertThat(response.capabilities()).doesNotContainKey(dashboard1); assertThat(response.capabilities()).doesNotContainKey(stream0); + + assertThat(response.paginatedEntities().pagination().total()).isEqualTo(3); } @DisplayName("paginated shares with reverse order")
Return correct PaginatedList results (#<I>) * Return correct PaginatedList results - 'total' should contain the count of all filtered results (ignoring pagination) - 'grand_total' should contain the count of all results (also ignoring filters) Fixes #<I> * Add documentation for PaginatedList and a simple test
Graylog2_graylog2-server
train
9bbf8d76c0612a7fb3531fa836bcee740ec2193f
diff --git a/src/r/appendChild.js b/src/r/appendChild.js index <HASH>..<HASH> 100644 --- a/src/r/appendChild.js +++ b/src/r/appendChild.js @@ -8,7 +8,10 @@ import appendListenerToElement from './utils/appendListenerToElement'; * @returns {function(*)} */ const appendChild = element => child => { - if (!child && typeof child !== 'number') ''; + if (!child && typeof child !== 'number') { + // Needs to render every child, even empty ones to preserve dom hierarchy + child = '' + } if (child instanceof Component) { element.appendChild(child.render()); @@ -33,7 +36,9 @@ const appendChild = element => child => { if (typeof local.default === 'function' && local.default.isComponent && local.default.isComponent()) { + /*eslint-disable*/ appendChild(el)(new local.default()); + /* eslint-enable */ } else { appendChild(el)(local.default); }
Appends empty childs and '0'
radi-js_radi
train
c5d48ba9e65511d53b68ed10e32315157fce71c2
diff --git a/codegen/src/main/java/org/web3j/codegen/unit/gen/CompilerClassLoader.java b/codegen/src/main/java/org/web3j/codegen/unit/gen/CompilerClassLoader.java index <HASH>..<HASH> 100644 --- a/codegen/src/main/java/org/web3j/codegen/unit/gen/CompilerClassLoader.java +++ b/codegen/src/main/java/org/web3j/codegen/unit/gen/CompilerClassLoader.java @@ -54,7 +54,7 @@ class CompilerClassLoader extends ClassLoader { private Optional<File> compileClass(final String name) { - final String path = name.replaceAll("\\.", File.separator); + final String path = name.replace(".", File.separator); final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); File sourceFile = null;
Replaced 'replaceAll' with 'replace in CompilerClassLoader'. This was failing on windows.
web3j_web3j
train
3135e19f25e5db64cd31146a18b4d04cc2f3c707
diff --git a/httprunner/__init__.py b/httprunner/__init__.py index <HASH>..<HASH> 100644 --- a/httprunner/__init__.py +++ b/httprunner/__init__.py @@ -1,11 +0,0 @@ -# encoding: utf-8 - -try: - # monkey patch ssl at beginning to avoid RecursionError when running locust. - from gevent import monkey - if not monkey.is_module_patched('ssl'): - monkey.patch_ssl() -except ImportError: - pass - -from httprunner.api import HttpRunner diff --git a/httprunner/cli.py b/httprunner/cli.py index <HASH>..<HASH> 100644 --- a/httprunner/cli.py +++ b/httprunner/cli.py @@ -90,6 +90,9 @@ def main_hrun(): def main_locust(): """ Performance test with locust: parse command line options and run commands. """ + # monkey patch ssl at beginning to avoid RecursionError when running locust. + from gevent import monkey; monkey.patch_ssl() + import multiprocessing import sys from httprunner import logger diff --git a/tests/test_api.py b/tests/test_api.py index <HASH>..<HASH> 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -3,7 +3,8 @@ import shutil import time import unittest -from httprunner import HttpRunner, api, loader, parser +from httprunner import loader, parser +from httprunner.api import HttpRunner, prepare_locust_tests from locust import HttpLocust from tests.api_server import HTTPBIN_SERVER from tests.base import ApiServerUnittest @@ -570,7 +571,7 @@ class TestLocust(unittest.TestCase): def test_prepare_locust_tests(self): path = os.path.join( os.getcwd(), 'tests/locust_tests/demo_simple_locust.yml') - locust_tests = api.prepare_locust_tests(path) + locust_tests = prepare_locust_tests(path) self.assertIn("gen_md5", locust_tests["functions"]) self.assertEqual(len(locust_tests["tests"]), 10) self.assertEqual(locust_tests["tests"][0]["name"], "index")
change httprunner api HttpRunner location to avoid monkey patch
HttpRunner_HttpRunner
train
61b1e65582389bfcb2e59fd4cba3b4c3d0376022
diff --git a/export.go b/export.go index <HASH>..<HASH> 100644 --- a/export.go +++ b/export.go @@ -105,7 +105,6 @@ func runExport(cmd *Command, args []string) { {Name: []string{"ContractSettings"}, Members: []string{"*"}}, {Name: []string{"CustomApplication"}, Members: []string{"*"}}, {Name: []string{"CustomApplicationComponent"}, Members: []string{"*"}}, - {Name: []string{"CustomApplication"}, Members: []string{"*"}}, {Name: []string{"CustomField"}, Members: []string{"*"}}, {Name: []string{"CustomLabels"}, Members: []string{"*"}}, {Name: []string{"CustomMetadata"}, Members: []string{"*"}},
Removed duplicate reference to CustomApplication
ForceCLI_force
train
e7240d91a49880725950201c792fd09ae203faea
diff --git a/lib/linkedin-scraper/profile.rb b/lib/linkedin-scraper/profile.rb index <HASH>..<HASH> 100755 --- a/lib/linkedin-scraper/profile.rb +++ b/lib/linkedin-scraper/profile.rb @@ -157,12 +157,11 @@ module Linkedin end def certifications - @certifications ||= @page.search('background-certifications').map do |item| + @certifications ||= @page.search('#certifications ul li').map do |item| name = item.at('h4').text.gsub(/\s+|\n/, ' ').strip rescue nil - authority = item.at('h5').text.gsub(/\s+|\n/, ' ').strip rescue nil - license = item.at('.specifics/.licence-number').text.gsub(/\s+|\n/, ' ').strip rescue nil - start_date = item.at('.certification-date').text.gsub(/\s+|\n/, ' ').strip rescue nil - + authority_with_license = item.at('h5').text.gsub(/\s+|\n/, ' ').strip rescue nil + authority, license = authority_with_license.split(/,\sLicense\s/) rescue nil + start_date = item.at('time').text.gsub(/\s+|\n/, ' ').strip rescue nil { name: name, authority: authority, license: license, start_date: start_date } end end
fix certifications section parsing (#<I>) * fix certifications section parsing * fix license and authority parsing in #certifications
yatish27_linkedin-scraper
train
4a41e0454e417ad5f8ffb51d7e3eeaf6bcffcc1e
diff --git a/juju/charmhub.py b/juju/charmhub.py index <HASH>..<HASH> 100644 --- a/juju/charmhub.py +++ b/juju/charmhub.py @@ -1,5 +1,6 @@ from .client import client from .errors import JujuError +from juju import jasyncio import requests import json @@ -9,13 +10,19 @@ class CharmHub: def __init__(self, model): self.model = model + def request_charmhub_with_retry(self, url, retries): + for attempt in range(retries): + _response = requests.get(url) + if _response.status_code == 200: + return _response + jasyncio.sleep(5) + raise JujuError("Got {} from {}".format(_response.status_code, url)) + def get_charm_id(self, charm_name): conn, headers, path_prefix = self.model.connection().https_connection() url = "http://api.snapcraft.io/v2/charms/info/{}".format(charm_name) - _response = requests.get(url) - if not _response.status_code == 200: - raise JujuError("Got {} from {}".format(_response.status_code, url)) + _response = self.request_charmhub_with_retry(url, 5) response = json.loads(_response.text) return response['id'], response['name'] @@ -23,9 +30,7 @@ class CharmHub: conn, headers, path_prefix = self.model.connection().https_connection() url = "http://api.snapcraft.io/v2/charms/info/{}?fields=default-release.revision.subordinate".format(charm_name) - _response = requests.get(url) - if not _response.status_code == 200: - raise JujuError("Got {} from {}".format(_response.status_code, url)) + _response = self.request_charmhub_with_retry(url, 5) response = json.loads(_response.text) return 'subordinate' in response['default-release']['revision']
Improve charmhub api request code with retries
juju_python-libjuju
train
13b7c3e1d07e4dce6c63aa158299631be3c2422d
diff --git a/moco-core/src/main/java/com/github/dreamhead/moco/resource/reader/FileResourceReader.java b/moco-core/src/main/java/com/github/dreamhead/moco/resource/reader/FileResourceReader.java index <HASH>..<HASH> 100644 --- a/moco-core/src/main/java/com/github/dreamhead/moco/resource/reader/FileResourceReader.java +++ b/moco-core/src/main/java/com/github/dreamhead/moco/resource/reader/FileResourceReader.java @@ -5,11 +5,12 @@ import com.github.dreamhead.moco.MocoException; import com.github.dreamhead.moco.Request; import com.github.dreamhead.moco.resource.Resource; -import java.io.File; import java.io.IOException; import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; -import static com.google.common.io.Files.toByteArray; import static java.lang.String.format; public final class FileResourceReader extends AbstractFileResourceReader { @@ -26,14 +27,15 @@ public final class FileResourceReader extends AbstractFileResourceReader { @Override protected byte[] doReadFor(final Request request) { - File file = new File(targetFileName(request)); + String pathname = targetFileName(request); + Path path = Paths.get(pathname); - if (!file.exists()) { - throw new IllegalArgumentException(format("%s does not exist", file.getPath())); + if (!Files.exists(path)) { + throw new IllegalArgumentException(format("%s does not exist", path.toString())); } try { - return toByteArray(file); + return Files.readAllBytes(path); } catch (IOException e) { throw new MocoException(e); }
replaced file resource reader with java 7 style
dreamhead_moco
train
ca7f26ecb04f8092897b05cdc2ccb03827a15da9
diff --git a/flag_test.go b/flag_test.go index <HASH>..<HASH> 100644 --- a/flag_test.go +++ b/flag_test.go @@ -825,6 +825,14 @@ func TestUint64FlagWithEnvVarHelpOutput(t *testing.T) { } } +func TestUint64FlagValueFromContext(t *testing.T) { + set := flag.NewFlagSet("test", 0) + set.Uint64("myflag", 42, "doc") + ctx := NewContext(nil, set, nil) + f := &Uint64Flag{Name: "myflag"} + expect(t, f.ValueFromContext(ctx), uint64(42)) +} + var durationFlagTests = []struct { name string expected string diff --git a/flag_uint64.go b/flag_uint64.go index <HASH>..<HASH> 100644 --- a/flag_uint64.go +++ b/flag_uint64.go @@ -101,6 +101,11 @@ func (f *Uint64Flag) GetEnvVars() []string { return f.EnvVars } +// ValueFromContext returns the flag’s value in the given Context. +func (f *Uint64Flag) ValueFromContext(ctx *Context) uint64 { + return ctx.Uint64(f.Name) +} + // Uint64 looks up the value of a local Uint64Flag, returns // 0 if not found func (c *Context) Uint64(name string) uint64 {
Uint<I>Flag.ValueFromContext() as convenient accessor
urfave_cli
train
68b1f5f0ff37691b21bd90f7aaa127b341d83be4
diff --git a/src/showcase/messages/MessagesDoc.js b/src/showcase/messages/MessagesDoc.js index <HASH>..<HASH> 100644 --- a/src/showcase/messages/MessagesDoc.js +++ b/src/showcase/messages/MessagesDoc.js @@ -430,36 +430,36 @@ messages.current.show({severity: 'success', summary: 'Success Message', detail: <CodeHighlight> {` -<Messages ref={(el) => this.messages = el}></Messages> +<Messages ref={messages}></Messages> -<Button onClick={this.showSuccess} label="Success" className="p-button-success" /> -<Button onClick={this.showInfo} label="Info" className="p-button-info" /> -<Button onClick={this.showWarn} label="Warn" className="p-button-warning" /> -<Button onClick={this.showError} label="Error" className="p-button-danger" /> -<Button onClick={this.showMultiple} label="Multiple" /> +<Button onClick={showSuccess} label="Success" className="p-button-success" /> +<Button onClick={showInfo} label="Info" className="p-button-info" /> +<Button onClick={showWarn} label="Warn" className="p-button-warning" /> +<Button onClick={showError} label="Error" className="p-button-danger" /> +<Button onClick={showMultiple} label="Multiple" /> `} </CodeHighlight> <CodeHighlight lang="js"> {` -showSuccess() { - this.messages.show({ severity: 'success', summary: 'Success Message', detail: 'Order submitted' }); +const showSuccess = () => { + messages.current.show({ severity: 'success', summary: 'Success Message', detail: 'Order submitted' }); } -showInfo() { - this.messages.show({ severity: 'info', summary: 'Info Message', detail: 'PrimeReact rocks' }); +const showInfo = () => { + messages.current.show({ severity: 'info', summary: 'Info Message', detail: 'PrimeReact rocks' }); } -showWarn() { - this.messages.show({ severity: 'warn', summary: 'Warn Message', detail: 'There are unsaved changes' }); +const showWarn = () => { + messages.current.show({ severity: 'warn', summary: 'Warn Message', detail: 'There are unsaved changes' }); } -showError() { - this.messages.show({ severity: 'error', summary: 'Error Message', detail: 'Validation failed' }); +const showError = () => { + messages.current.show({ severity: 'error', summary: 'Error Message', detail: 'Validation failed' }); } -showMultiple() { - this.messages.show([ +const showMultiple = () => { + messages.current.show([ {severity:'info', summary:'Message 1', detail:'PrimeReact rocks'}, {severity:'info', summary:'Message 2', detail:'PrimeReact rocks'}, {severity:'info', summary:'Message 3', detail:'PrimeFaces rocks'} @@ -473,7 +473,7 @@ showMultiple() { <CodeHighlight lang="js"> {` -this.messages.clear(); +messages.current.clear(); `} </CodeHighlight> @@ -482,7 +482,7 @@ this.messages.clear(); <CodeHighlight lang="js"> {` -this.messages.replace(newMessages); +messages.current.replace(newMessages); `} </CodeHighlight> @@ -491,7 +491,7 @@ this.messages.replace(newMessages); <CodeHighlight lang="js"> {` -this.messages.show({closable: false, severity: 'error', summary: 'Error Message', detail: 'Validation failed'}); +messages.current.show({closable: false, severity: 'error', summary: 'Error Message', detail: 'Validation failed'}); `} </CodeHighlight> @@ -502,10 +502,10 @@ this.messages.show({closable: false, severity: 'error', summary: 'Error Message' <CodeHighlight lang="js"> {` //sticky -this.messages.show({ sticky: true, severity: 'error', summary: 'Error Message', detail: 'Validation failed' }); +messages.current.show({ sticky: true, severity: 'error', summary: 'Error Message', detail: 'Validation failed' }); //automatically removed after 5 seconds -this.messages.show({ life: 5000, severity: 'error', summary: 'Error Message', detail: 'Validation failed' }); +messages.current.show({ life: 5000, severity: 'error', summary: 'Error Message', detail: 'Validation failed' }); `} </CodeHighlight>
Refactor on MessagesDoc
primefaces_primereact
train
55913e7cce578186d7a1ba34d48b6711ae38f8b3
diff --git a/javaee/impl/src/main/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommand.java b/javaee/impl/src/main/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommand.java index <HASH>..<HASH> 100644 --- a/javaee/impl/src/main/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommand.java +++ b/javaee/impl/src/main/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommand.java @@ -15,7 +15,6 @@ import javax.ws.rs.HttpMethod; import javax.ws.rs.container.ContainerRequestContext; import javax.ws.rs.container.ContainerResponseContext; import javax.ws.rs.container.ContainerResponseFilter; -import javax.ws.rs.container.PreMatching; import javax.ws.rs.ext.Provider; import org.jboss.forge.addon.projects.Project; @@ -60,12 +59,14 @@ public class CrossOriginResourceSharingFilterCommand extends AbstractRestNewComm // @Inject // @WithAttributes(label = "Access-Control-Expose-Headers", description = - // "The Access-Control-Expose-Headers header indicates which headers are safe to expose to the API of a CORS API specification.") + // "The Access-Control-Expose-Headers header indicates which headers are safe to expose to the API of a CORS API + // specification.") // private UIInputMany<String> accessControlExposeHeaders; // // @Inject // @WithAttributes(label = "Access-Control-Max-Age", defaultValue = "151200", description = - // "The Access-Control-Max-Age header indicates how long the results of a preflight request can be cached in a preflight result cache.") + // "The Access-Control-Max-Age header indicates how long the results of a preflight request can be cached in a + // preflight result cache.") // private UIInput<Integer> accessControlMaxAge; // // @Inject @@ -75,12 +76,14 @@ public class CrossOriginResourceSharingFilterCommand extends AbstractRestNewComm // // @Inject // @WithAttributes(label = "Access-Control-Request-Method", description = - // "The Access-Control-Request-Method header indicates which method will be used in the actual request as part of the preflight request") + // "The Access-Control-Request-Method header indicates which method will be used in the actual request as part of the + // preflight request") // private UIInput<String> accessControlRequestMethod; // // @Inject // @WithAttributes(label = "Access-Control-Request-Headers", description = - // "The Access-Control-Request-Headers header indicates which headers will be used in the actual request as part of the preflight request") + // "The Access-Control-Request-Headers header indicates which headers will be used in the actual request as part of + // the preflight request") // private UIInputMany<String> accessControlRequestHeaders; @Override @@ -122,7 +125,6 @@ public class CrossOriginResourceSharingFilterCommand extends AbstractRestNewComm throws Exception { source.addAnnotation(Provider.class); - source.addAnnotation(PreMatching.class); source.addInterface(ContainerResponseFilter.class); MethodSource<JavaClassSource> method = source.addMethod().setName("filter").setPublic().setReturnTypeVoid(); method.addAnnotation(Override.class); diff --git a/javaee/tests/src/test/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommandTest.java b/javaee/tests/src/test/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommandTest.java index <HASH>..<HASH> 100644 --- a/javaee/tests/src/test/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommandTest.java +++ b/javaee/tests/src/test/java/org/jboss/forge/addon/javaee/rest/ui/CrossOriginResourceSharingFilterCommandTest.java @@ -86,7 +86,7 @@ public class CrossOriginResourceSharingFilterCommandTest JavaClass<?> filterClass = filterResource.getJavaType(); Assert.assertFalse(filterClass.hasSyntaxErrors()); Assert.assertTrue(filterClass.hasAnnotation(Provider.class)); - Assert.assertTrue(filterClass.hasAnnotation(PreMatching.class)); + Assert.assertFalse(filterClass.hasAnnotation(PreMatching.class)); Method<?, ?> method = filterClass .getMethod("filter", ContainerRequestContext.class, ContainerResponseContext.class); Assert.assertNotNull(method);
FORGE-<I>: @PreMatching is for ContainerRequestFilters only
forge_core
train
f149d0e4f8284c54ae34e20066350ce573ee23c6
diff --git a/phonopy/api_phonopy.py b/phonopy/api_phonopy.py index <HASH>..<HASH> 100644 --- a/phonopy/api_phonopy.py +++ b/phonopy/api_phonopy.py @@ -487,6 +487,7 @@ class Phonopy(object): if 'first_atoms' in dataset: self._displacement_dataset = dataset elif 'displacements' in dataset: + self._displacement_dataset = {} self.displacements = dataset['displacements'] if 'forces' in dataset: self.forces = dataset['forces']
Minor fix to accept type1 dataset at Phonopy.dataset.setter
atztogo_phonopy
train
960a0f5e9b048ad2097bd98270af116e4a79110b
diff --git a/src/bosh-dev/lib/bosh/dev/sandbox/main.rb b/src/bosh-dev/lib/bosh/dev/sandbox/main.rb index <HASH>..<HASH> 100644 --- a/src/bosh-dev/lib/bosh/dev/sandbox/main.rb +++ b/src/bosh-dev/lib/bosh/dev/sandbox/main.rb @@ -99,7 +99,6 @@ module Bosh::Dev::Sandbox @director_service = DirectorService.new( { database: @database, - database_proxy: @database_proxy, director_port: director_ruby_port, base_log_path: base_log_path, director_tmp_path: director_tmp_path, diff --git a/src/bosh-dev/lib/bosh/dev/sandbox/nginx.rb b/src/bosh-dev/lib/bosh/dev/sandbox/nginx.rb index <HASH>..<HASH> 100644 --- a/src/bosh-dev/lib/bosh/dev/sandbox/nginx.rb +++ b/src/bosh-dev/lib/bosh/dev/sandbox/nginx.rb @@ -14,7 +14,11 @@ module Bosh::Dev::Sandbox def install sync_release_blobs - compile + if blob_has_changed + compile + else + puts 'Skipping compiling nginx because shasums have not changed' + end end def executable_path @@ -23,6 +27,22 @@ module Bosh::Dev::Sandbox private + def blob_has_changed + release_nginx_path = File.join(RELEASE_ROOT, 'blobs', 'nginx') + blobs_shasum = shasum(release_nginx_path) + working_dir_nginx_path = "#{@working_dir}/nginx" + sandbox_copy_shasum = shasum(working_dir_nginx_path) + + blobs_shasum.sort != sandbox_copy_shasum.sort + end + + def shasum(directory) + output = @runner.run("find #{directory} \\! -type d -print0 | xargs -0 shasum -a 256") + output.split("\n").map do |line| + line.split(' ').first + end + end + def sync_release_blobs Dir.chdir(RELEASE_ROOT) { @runner.run('bundle exec bosh sync blobs') } end diff --git a/src/bosh-dev/lib/bosh/dev/sandbox/services/connection_proxy_service.rb b/src/bosh-dev/lib/bosh/dev/sandbox/services/connection_proxy_service.rb index <HASH>..<HASH> 100644 --- a/src/bosh-dev/lib/bosh/dev/sandbox/services/connection_proxy_service.rb +++ b/src/bosh-dev/lib/bosh/dev/sandbox/services/connection_proxy_service.rb @@ -45,7 +45,8 @@ module Bosh::Dev::Sandbox end def install - # Clean up old compiled nginx bits to stay up-to-date + return if File.exists?(executable_path) + FileUtils.rm_rf(@install_dir) FileUtils.mkdir_p(@install_dir)
Not re-compiling nginx in install_dependencies if blob checksum hasn't changed. [#<I>](<URL>)
cloudfoundry_bosh
train
ba6aee308818b386571a9ce40f6f395374252830
diff --git a/bundles/org.eclipse.orion.client.javascript/web/javascript/commands/openDeclaration.js b/bundles/org.eclipse.orion.client.javascript/web/javascript/commands/openDeclaration.js index <HASH>..<HASH> 100644 --- a/bundles/org.eclipse.orion.client.javascript/web/javascript/commands/openDeclaration.js +++ b/bundles/org.eclipse.orion.client.javascript/web/javascript/commands/openDeclaration.js @@ -9,7 +9,7 @@ * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ - /*eslint-env amd*/ + /*eslint-env amd, browser*/ define([ 'orion/objects', 'javascript/finder', @@ -20,16 +20,20 @@ define([ * @description Creates a new open declaration command * @constructor * @public + * @param {javascript.ASTManager} ASTManager The backing AST manager + * @param {javascript.ScriptResolver} Resolver The backing script resolver * @returns {javascript.commands.OpenDeclarationCommand} A new command * @since 8.0 */ - function OpenDeclarationCommand(ASTManager) { + function OpenDeclarationCommand(ASTManager, Resolver) { this.astManager = ASTManager; + this.resolver = Resolver; } Objects.mixin(OpenDeclarationCommand.prototype, { /* override */ - execute: function(editorContext, options) { + execute: function(editorContext/*, options*/) { + var that = this; return Deferred.all([ this.astManager.getAST(editorContext), editorContext.getCaretOffset() @@ -40,14 +44,32 @@ define([ var parent = parents.pop(); switch(parent.type) { case 'CallExpression': { - if(parent.callee.type === 'Identifier') { - var decl = Finder.findDeclaration(results[1], results[0], {id: parent.callee.name, kind: Finder.SearchOptions.FUNCTION_DECLARATION}); - if(decl) { - return editorContext.setSelection(decl.id.range[0], decl.id.range[1]); + if(node.type === 'Literal' && (parent.callee.name === 'require' || parent.callee.name === 'importScripts')) { + that.resolver.getWorkspaceFile(node.value).then(function(files) { + // TODO uncomment when we get a file open strategy + //window.open(that.resolver.convertToURL(files[0])); + }); + } else { + if(parent.callee.type === 'Identifier') { + var decl = Finder.findDeclaration(results[1], results[0], {id: parent.callee.name, kind: Finder.SearchOptions.FUNCTION_DECLARATION}); + if(decl) { + return editorContext.setSelection(decl.id.range[0], decl.id.range[1]); + } + } else if(parent.callee.type === 'MemberExpression') { + //TODO need the env to find the containing object expression / func expr } - } else if(parent.callee.type === 'MemberExpression') { - //TODO need the env to find the containing object expression / func expr } + break; + } + case 'ArrayExpression': { + parent = parents.pop(); + if(parent.type === 'CallExpression' && parent.callee.name === 'define') { + that.resolver.getWorkspaceFile(node.value).then(function(files) { + // TODO uncomment when we get a file open strategy + //window.open(that.resolver.convertToURL(files[0])); + }); + } + break; } } }
Bug <I> - Add support to find the declaration of a node - hooks for define/require/importScripts
eclipse_orion.client
train
75ab19373b34d486e692193c512ff500a1662866
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -22,8 +22,6 @@ "use strict"; -exports.iotdb_module = true; - exports.Bridge = require('./SmartThingsBridge').Bridge; exports.bindings = [ require('./SmartThingsBattery').binding,
don't need iotdb_module anymore
dpjanes_homestar-smartthings
train
f46e2d2d6ba0e0183d386721480c29ae4703d4c6
diff --git a/src/com/aoindustries/taglib/RedirectTag.java b/src/com/aoindustries/taglib/RedirectTag.java index <HASH>..<HASH> 100644 --- a/src/com/aoindustries/taglib/RedirectTag.java +++ b/src/com/aoindustries/taglib/RedirectTag.java @@ -53,9 +53,11 @@ public class RedirectTag /** * The maximum length of a URL allowed for redirect. * + * Matching limit of Internet Explorer: http://support.microsoft.com/kb/208427 + * * @see <a href="http://www.boutell.com/newfaq/misc/urllength.html">WWW FAQs: What is the maximum length of a URL?</a> */ - private static final int MAXIMUM_GET_REQUEST_LENGTH = 2000; // A little conservative below 2048 of Internet Explorer. + private static final int MAXIMUM_GET_REQUEST_LENGTH = 2048; public static boolean isValidStatusCode(String statusCode) { return
Raised limit from <I> to <I> to match IE
aoindustries_ao-taglib
train
6de0c4b3ed2193800b0f81839dedded355a4d52c
diff --git a/lib/ryodo/suffix_list_fetcher.rb b/lib/ryodo/suffix_list_fetcher.rb index <HASH>..<HASH> 100644 --- a/lib/ryodo/suffix_list_fetcher.rb +++ b/lib/ryodo/suffix_list_fetcher.rb @@ -41,7 +41,8 @@ module Ryodo def prepare_data @prepared_data = @fetched_data.inject([]) do |acc, line| - next(acc) if SKIPPABLE_LINE_REGEXP.match?(line) + # Using `Regexp#===` instead of `.match?`, to be compatible with Ruby 2.3 and older + next(acc) if SKIPPABLE_LINE_REGEXP === line # rubocop:disable Style/CaseEquality acc << reverse_dn(line) end.sort end
Fix method issue (.match? exists in Ruby <I>+ only)
asaaki_ryodo
train
649a68dffa4c319fcacf59ee1395d75f1ef835b4
diff --git a/Feed.php b/Feed.php index <HASH>..<HASH> 100644 --- a/Feed.php +++ b/Feed.php @@ -500,8 +500,9 @@ abstract class Feed // $value is an array , so check every element foreach ($value as $linkItem) { + $attrib = $linkItem['attributes']; // Only one link with relation alternate and same hreflang & type is allowed. - if (@$linkItem['rel'] == 'alternate' && @$linkItem['hreflang'] == $hreflang && @$linkItem['type'] == $type) + if (@$attrib['rel'] == 'alternate' && @$attrib['hreflang'] == $hreflang && @$attrib['type'] == $type) die('The feed must not contain more than one link element with a relation of "alternate"' . ' that has the same combination of type and hreflang attribute values.'); }
Bugfix: The detection of twice the same link with rel=alternate, hreflang & type did not work. See RFC <I> <I> for details.
mibe_FeedWriter
train
3d2b334e521eb39d202c7954103eb093fd0986c0
diff --git a/src/view-vertex/html-view/html-view.js b/src/view-vertex/html-view/html-view.js index <HASH>..<HASH> 100644 --- a/src/view-vertex/html-view/html-view.js +++ b/src/view-vertex/html-view/html-view.js @@ -168,12 +168,11 @@ export default class HTMLView extends LiveSchema { } } ); - deleted.map( ({ box, signature: $ }) => { - const deleted = store.findIndex( ({ signature }) => equal([], signature, $)); + deleted.map( item => { + const deleted = store.indexOf(item); store.splice(deleted, 1); - box.remove(); + item.box.remove(); } ); - } ));
- cleaning items from the kit container - fixed
artifishional_air-m2
train
f3434ac1146f0fc61d1d56d34ff32149b64cf68a
diff --git a/indra/assemblers/cyjs_assembler.py b/indra/assemblers/cyjs_assembler.py index <HASH>..<HASH> 100644 --- a/indra/assemblers/cyjs_assembler.py +++ b/indra/assemblers/cyjs_assembler.py @@ -273,36 +273,32 @@ class CyJSAssembler(object): cyjs_str : str A json string representation of the Cytoscape JS network. """ - exp_colorscale_str = json.dumps(self._exp_colorscale) - mut_colorscale_str = json.dumps(self._mut_colorscale) cyjs_dict = {'edges': self._edges, 'nodes': self._nodes} - model_str = json.dumps(cyjs_dict, indent=1, sort_keys=True) - model_dict = {'exp_colorscale_str': exp_colorscale_str, - 'mut_colorscale_str': mut_colorscale_str, - 'model_elements_str': model_str} + model_dict = {'exp_colorscale': self._exp_colorscale, + 'mut_colorscale': self._mut_colorscale, + 'model_elements': cyjs_dict} cyjs_str = json.dumps(model_dict, indent=1) return cyjs_str - def save_model(self, fname='model.js'): - """Save the assembled Cytoscape JS network in a file. + def save_json(self, fname='model.json'): + """Save the assembled Cytoscape JS network in a json file. Parameters ---------- file_name : Optional[str] The name of the file to save the Cytoscape JS network to. - Default: model.js + Default: model.json """ - model_dict = json.loads(self.print_cyjs()) - - s = '' - s += 'var exp_colorscale = %s;\n' % model_dict['exp_colorscale_str'] - s += 'var mut_colorscale = %s;\n' % model_dict['mut_colorscale_str'] - s += 'var model_elements = %s;\n' % model_dict['model_elements_str'] + cyjs_dict = {'edges': self._edges, 'nodes': self._nodes} + model_dict = {'exp_colorscale': self._exp_colorscale, + 'mut_colorscale': self._mut_colorscale, + 'model_elements': cyjs_dict} + json_str = json.dumps(model_dict, indent=1) with open(fname, 'wt') as fh: - fh.write(s) + fh.write(json_str) - def save_json(self, fname='model.json'): - """Save the assembled Cytoscape JS network in a file. + def save_model(self, fname='model.js'): + """Save the assembled Cytoscape JS network in a js file. Parameters ---------- @@ -310,13 +306,19 @@ class CyJSAssembler(object): The name of the file to save the Cytoscape JS network to. Default: model.js """ + exp_colorscale_str = json.dumps(self._exp_colorscale) + mut_colorscale_str = json.dumps(self._mut_colorscale) cyjs_dict = {'edges': self._edges, 'nodes': self._nodes} - model_dict = {'exp_colorscale': self._exp_colorscale, - 'mut_colorscale': self._mut_colorscale, - 'model_elements': cyjs_dict} - json_str = json.dumps(model_dict, indent=1) + model_str = json.dumps(cyjs_dict, indent=1, sort_keys=True) + model_dict = {'exp_colorscale_str': exp_colorscale_str, + 'mut_colorscale_str': mut_colorscale_str, + 'model_elements_str': model_str} + s = '' + s += 'var exp_colorscale = %s;\n' % model_dict['exp_colorscale_str'] + s += 'var mut_colorscale = %s;\n' % model_dict['mut_colorscale_str'] + s += 'var model_elements = %s;\n' % model_dict['model_elements_str'] with open(fname, 'wt') as fh: - fh.write(json_str) + fh.write(s) def _add_regulate_activity(self, stmt): edge_type, edge_polarity = _get_stmt_type(stmt)
Seperate functions to save js and json Seperate functions were necessary to save js and json. It is impractical to load a js file after the page has loaded and this is necessary for having interactivity when it comes to loading data with a rendering of the pathway map. The old version of save_model is retained as it is still used by the notebook. The new version of print_cyjs() generates a json string for save_json(). save_model() does not rely on this string anymore.
sorgerlab_indra
train
489e8d8df78a72fb3561dff45233c6b7171aa65c
diff --git a/lib/express-handlebars.js b/lib/express-handlebars.js index <HASH>..<HASH> 100644 --- a/lib/express-handlebars.js +++ b/lib/express-handlebars.js @@ -26,7 +26,7 @@ function ExpressHandlebars(config) { extname : '.handlebars', layoutsDir : undefined, // Default layouts directory is relative to `express settings.view` + `layouts/` partialsDir : undefined, // Default partials directory is relative to `express settings.view` + `partials/` - defaultLayout : 'default', + defaultLayout : 'main', helpers : undefined, compilerOptions: undefined, }, config);
Update express-handlebars.js
ericf_express-handlebars
train
0dfc64ac9d6805ef70b1ea184a3329d638d471bb
diff --git a/lib/tinder/campfire.rb b/lib/tinder/campfire.rb index <HASH>..<HASH> 100644 --- a/lib/tinder/campfire.rb +++ b/lib/tinder/campfire.rb @@ -36,18 +36,25 @@ module Tinder @logged_in = !result end end + + # Get an array of all the available rooms + # TODO: detect rooms that are full (no link) + def rooms + Hpricot(get.body).search("//h2/a").collect do |a| + Room.new(self, room_id_from_url(a.attributes['href']), a.inner_html) + end + end + # Find a campfire room by name + def find_room_by_name(name) + rooms.detect {|room| room.name == name } + end + # Creates and returns a new Room with the given +name+ and optionally a +topic+ def create_room(name, topic = nil) find_room_by_name(name) if verify_response(post("account/create/room?from=lobby", {:room => {:name => name, :topic => topic}}, :ajax => true), :success) end - # Find a campfire room by name - def find_room_by_name(name) - link = Hpricot(get.body).search("//h2/a").detect { |a| a.inner_html == name } - link.blank? ? nil : Room.new(self, room_id_from_url(link.attributes['href']), name) - end - def find_or_create_room_by_name(name) find_room_by_name(name) || create_room(name) end
added Campfire#rooms to get a list of available rooms
collectiveidea_tinder
train
3186f544c2a7fdaaff991a4abfc038b4a1b76917
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "universal-webpack", - "version": "0.2.3", + "version": "0.2.4", "description": "Isomorphic Webpack", "main": "index.umd.js", "jsnext:main": "index.es6.js", diff --git a/source/loaders.js b/source/loaders.js index <HASH>..<HASH> 100644 --- a/source/loaders.js +++ b/source/loaders.js @@ -163,5 +163,10 @@ export function normalize_rule_loaders(rule) throw new Error(`Neither "loaders" nor "loader" nor "use" are present inside a module rule: ${util.inspect(rule)}`) } + if (typeof rule.use === 'string') + { + rule.use = [rule.use] + } + rule.use = rule.use.map(parse_loader) } \ No newline at end of file diff --git a/test/loaders.js b/test/loaders.js index <HASH>..<HASH> 100644 --- a/test/loaders.js +++ b/test/loaders.js @@ -86,6 +86,8 @@ describe(`webpack loader utilities`, function() { let loader + // Convert `loader` and `query` to `use` and `options` + loader = { loader: 'style-loader', @@ -111,6 +113,25 @@ describe(`webpack loader utilities`, function() }] }) + // Convert `use` string to array + + loader = + { + use: 'style-loader' + } + + normalize_rule_loaders(loader) + + loader.should.deep.equal + ({ + use: + [{ + loader: 'style-loader' + }] + }) + + // Shouldn't convert compound `loader` and `query` + loader = { loader: 'style-loader!another-loader', @@ -124,6 +145,8 @@ describe(`webpack loader utilities`, function() let execute = () => normalize_rule_loaders(loader) execute.should.throw(`You have both a compound ".loader" and a ".query"`) + // No `loader` is specified + loader = { query: @@ -136,6 +159,8 @@ describe(`webpack loader utilities`, function() execute = () => normalize_rule_loaders(loader) execute.should.throw(`Neither "loaders" nor "loader" nor "use" are present inside a module rule`) + // Convert compound `loader` to `use` array + loader = { loader: 'style-loader?query=true&gay=porn!css-loader?a=b'
Small fix for `rule.use` string. Closes #<I>
catamphetamine_universal-webpack
train
adbf8ead2ec1c6b801137d9b9f7cf0ea0a91f467
diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # psphere documentation build configuration file, created by -# sphinx-quickstart on Sat Jul 31 15:42:03 2010. +# sphinx-quickstart on Sat Jul 31 16:51:35 2010. # # This file is execfile()d with the current directory set to its containing dir. # @@ -22,7 +22,7 @@ import sys, os # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo'] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates']
Regenerated by sphinx.
psphere-project_psphere
train
0667326a228bda02f5c5a331b2f2c5dff05aa2b5
diff --git a/Model/Cache.php b/Model/Cache.php index <HASH>..<HASH> 100644 --- a/Model/Cache.php +++ b/Model/Cache.php @@ -107,7 +107,8 @@ class Cache extends AbstractCommonModel ) { $duration = $rule->duration; $scope = intval($rule->scope); - $key = $duration.'-'.$scope; + $value = isset($rule->value) ? strval($rule->value) : ''; + $key = $duration.'-'.$scope.'-'.$value; if (!isset($newRules[$key])) { $newRules[$key] = []; if (!empty($rule->matching)) { @@ -115,6 +116,7 @@ class Cache extends AbstractCommonModel } $newRules[$key]['scope'] = $scope; $newRules[$key]['duration'] = $duration; + $newRules[$key]['value'] = $value; } elseif (!empty($rule->matching)) { $newRules[$key]['matching'] += intval($rule->matching); }
[ENG-<I>] Support limits with value-based filters, and group them.
TheDMSGroup_mautic-contact-client
train
580a5b0aba0ca05be9f42d1cd890403afc547b2f
diff --git a/tools/strip_type_annotations.sh b/tools/strip_type_annotations.sh index <HASH>..<HASH> 100755 --- a/tools/strip_type_annotations.sh +++ b/tools/strip_type_annotations.sh @@ -45,6 +45,7 @@ for dir in $CODEMOD_DIRS; do cnt_dest=`wc -l <$dest/$b` if [ $cnt_src -ne $cnt_dest ]; then echo "ERROR: mismatch file length $b $cnt_src != $cnt_dest" + exit 1 fi done diff --git a/wandb/sdk/wandb_config.py b/wandb/sdk/wandb_config.py index <HASH>..<HASH> 100644 --- a/wandb/sdk/wandb_config.py +++ b/wandb/sdk/wandb_config.py @@ -6,8 +6,12 @@ config. import logging +try: + from collections.abc import Sequence +except ImportError: + from collections import Sequence + import six -from six.moves.collections_abc import Sequence import wandb from wandb.util import json_friendly diff --git a/wandb/sdk/wandb_init.py b/wandb/sdk/wandb_init.py index <HASH>..<HASH> 100644 --- a/wandb/sdk/wandb_init.py +++ b/wandb/sdk/wandb_init.py @@ -424,9 +424,7 @@ def getcaller(): def init( job_type: Optional[str] = None, dir=None, - config: Union[ - Dict, str, None - ] = None, # TODO(jhr): type is a union for argparse/absl + config: Union[Dict, str, None] = None, project: Optional[str] = None, entity: Optional[str] = None, reinit: bool = None, @@ -434,7 +432,7 @@ def init( group: Optional[str] = None, name: Optional[str] = None, notes: Optional[str] = None, - magic: Union[dict, str, bool] = None, # TODO(jhr): type is union + magic: Union[dict, str, bool] = None, config_exclude_keys=None, config_include_keys=None, anonymous: Optional[str] = None, diff --git a/wandb/sdk_py27/wandb_config.py b/wandb/sdk_py27/wandb_config.py index <HASH>..<HASH> 100644 --- a/wandb/sdk_py27/wandb_config.py +++ b/wandb/sdk_py27/wandb_config.py @@ -6,8 +6,12 @@ config. import logging +try: + from collections.abc import Sequence +except ImportError: + from collections import Sequence + import six -from six.moves.collections_abc import Sequence import wandb from wandb.util import json_friendly diff --git a/wandb/sdk_py27/wandb_init.py b/wandb/sdk_py27/wandb_init.py index <HASH>..<HASH> 100644 --- a/wandb/sdk_py27/wandb_init.py +++ b/wandb/sdk_py27/wandb_init.py @@ -424,7 +424,7 @@ def getcaller(): def init( job_type = None, dir=None, - config = None, # TODO(jhr): type is a union for argparse/absl + config = None, project = None, entity = None, reinit = None, @@ -432,7 +432,7 @@ def init( group = None, name = None, notes = None, - magic = None, # TODO(jhr): type is union + magic = None, config_exclude_keys=None, config_include_keys=None, anonymous = None,
Merge/pr <I> (#<I>)
wandb_client
train
eb18c9be6d1cf3e9d6aca73b21549285a43c9c63
diff --git a/lib/sinatra/base.rb b/lib/sinatra/base.rb index <HASH>..<HASH> 100644 --- a/lib/sinatra/base.rb +++ b/lib/sinatra/base.rb @@ -407,7 +407,13 @@ module Sinatra private # Run before filters and then locate and run a matching route. def route! - @params = nested_params(@request.params) + # enable nested params in Rack < 1.0; allow indifferent access + @params = + if Rack::Utils.respond_to?(:parse_nested_query) + indifferent_params(@request.params) + else + nested_params(@request.params) + end # before filters self.class.filters.each { |block| instance_eval(&block) } @@ -468,6 +474,18 @@ module Sinatra end end + # Enable string or symbol key access to the nested params hash. + def indifferent_params(params) + params = indifferent_hash.merge(params) + params.each do |key, value| + next unless value.is_a?(Hash) + params[key] = indifferent_params(value) + end + end + + # Recursively replace the params hash with a nested indifferent + # hash. Rack 1.0 has a built in implementation of this method - remove + # this once Rack 1.0 is required. def nested_params(params) return indifferent_hash.merge(params) if !params.keys.join.include?('[') params.inject indifferent_hash do |res, (key,val)|
Use Rack <I>'s built in nested params Our implementation is retained but is not used when Rack <I> is detected. We should remove Base#nested_params as soon as we make Rack <I> a requirement.
sinatra_sinatra
train
73a2f60f5c41f033610fbf6528f607f5c4b2701e
diff --git a/blueocean-core-js/gulpfile.js b/blueocean-core-js/gulpfile.js index <HASH>..<HASH> 100644 --- a/blueocean-core-js/gulpfile.js +++ b/blueocean-core-js/gulpfile.js @@ -15,6 +15,8 @@ const copy = require('gulp-copy'); const fs = require('fs'); const ts = require('gulp-typescript'); const tsProject = ts.createProject('./tsconfig.json'); +const eslint = require('gulp-eslint'); + // Options, src/dest folders, etc @@ -96,6 +98,7 @@ gulp.task('copy', ['copy-less-assets']); gulp.task('copy-less-assets', () => gulp.src(config.copy.less_assets.sources).pipe(copy(config.copy.less_assets.dest, { prefix: 2 }))); + // Validate contents gulp.task('validate', ['lint', 'test'], () => { const paths = [config.react.dest]; @@ -112,6 +115,20 @@ gulp.task('validate', ['lint', 'test'], () => { var builder = require('@jenkins-cd/js-builder'); +builder.defineTask('lint', () => gulp.src([process.cwd()+"/src/**/*.{js,jsx}", process.cwd()+"/test/**/*.{js,jsx}"]) + .pipe(eslint(process.cwd()+'/../.eslintrc')) + .pipe(eslint.format()) + .pipe(eslint.results(function (results) { + if (results.errorCount > 0 || results.warningCount > 0) { + gutil.log(gutil.colors.magenta('Oops, there are some eslint errors/warnings:')); + if (results.warningCount > 0) { + gutil.log(gutil.colors.magenta('\tWarnings: ' + results.warningCount)); + } + if (results.errorCount > 0) { + gutil.log(gutil.colors.red('\tErrors: ' + results.errorCount)); + process.exit(1); + } + }}))) builder.src([config.ts.destBundle, 'less']); //
Fix Linting in core-js (#<I>)
jenkinsci_blueocean-plugin
train
bfe40c05dc6dc2656a4dde11948ff617f2fa00d6
diff --git a/lib/sup/mbox/loader.rb b/lib/sup/mbox/loader.rb index <HASH>..<HASH> 100644 --- a/lib/sup/mbox/loader.rb +++ b/lib/sup/mbox/loader.rb @@ -116,7 +116,7 @@ class Loader < Source need_blank = File.exists?(@filename) && !File.zero?(@filename) File.open(@filename, "a") do |f| f.puts if need_blank - f.puts "From #{from_email} #{date.utc}" + f.puts "From #{from_email} #{date.rfc2822}" yield f end end
bugfix: write mbox messages in RFC<I> format Otherwise, non-en_US locales may screw up the mbox format and Sup won't be able to read its own output.
sup-heliotrope_sup
train
028656e1e95049609d9f19ea8df499ffe9a9eb6f
diff --git a/gitenberg/travis/__init__.py b/gitenberg/travis/__init__.py index <HASH>..<HASH> 100644 --- a/gitenberg/travis/__init__.py +++ b/gitenberg/travis/__init__.py @@ -142,5 +142,3 @@ def build_epub(epub_title='book'): # error code? # http://stackoverflow.com/questions/6180185/custom-python-exceptions-with-error-codes-and-error-messages raise Exception ('no suitable book found') - -
Merge branch 'master' into covers # Conflicts: # gitenberg/travis/__init__.py
gitenberg-dev_gitberg
train
f95ffb282998024a22424cd39b73c224048c9bfb
diff --git a/src/webroot/cms/content-manager/pagecontent/includes/contents/editable.js b/src/webroot/cms/content-manager/pagecontent/includes/contents/editable.js index <HASH>..<HASH> 100644 --- a/src/webroot/cms/content-manager/pagecontent/includes/contents/editable.js +++ b/src/webroot/cms/content-manager/pagecontent/includes/contents/editable.js @@ -404,6 +404,10 @@ YUI.add('supra.page-content-editable', function (Y) { page_data = Page.getPageData(), data = null; + if ( ! this.properties) { + throw new Error("Properties not found for object " + this.constructor.name); + } + data = { 'page_id': page_data.id, 'block_id': this.getId(),
Bug #<I> – situation is handled by raising exception now.
sitesupra_sitesupra
train
b0c6535359fb671385428a3d9f9dd53b92caf49f
diff --git a/test/geocoders/algolia.py b/test/geocoders/algolia.py index <HASH>..<HASH> 100644 --- a/test/geocoders/algolia.py +++ b/test/geocoders/algolia.py @@ -1,17 +1,11 @@ # coding: utf-8 from __future__ import unicode_literals -import unittest - from geopy.geocoders import AlgoliaPlaces from geopy.point import Point from test.geocoders.util import GeocoderTestBase, env [email protected]( - bool(env.get('ALGOLIA_PLACES_APP_ID')) and bool(env.get('ALGOLIA_PLACES_API_KEY')), - 'No ALGOLIA_PLACES_APP_ID and/or no ALGOLIA_PLACES_API_KEY env variables setted' -) class AlgoliaPlacesTestCase(GeocoderTestBase): @classmethod @@ -21,8 +15,8 @@ class AlgoliaPlacesTestCase(GeocoderTestBase): @classmethod def make_geocoder(cls, **kwargs): return AlgoliaPlaces( - env['ALGOLIA_PLACES_APP_ID'], - env['ALGOLIA_PLACES_API_KEY'], + app_id=env.get('ALGOLIA_PLACES_APP_ID'), + api_key=env.get('ALGOLIA_PLACES_API_KEY'), timeout=3, **kwargs)
AlgoliaPlaces: run tests without credentials too
geopy_geopy
train
c09a52c6ab850886b0751151116ee0951cceb7a8
diff --git a/lib/conceptql/nodes/intersect.rb b/lib/conceptql/nodes/intersect.rb index <HASH>..<HASH> 100644 --- a/lib/conceptql/nodes/intersect.rb +++ b/lib/conceptql/nodes/intersect.rb @@ -17,7 +17,7 @@ module ConceptQL end typed_queries = exprs.map do |type, queries| queries.inject do |q, query| - q.intersect(query, all: true) + q.intersect(query) end end
Intersect: Oracle doesn't support INTERSECT ALL?
outcomesinsights_conceptql
train
8b831d5e29c56ecbb5a5c66f8d07698e372c11a9
diff --git a/src/Dialog.js b/src/Dialog.js index <HASH>..<HASH> 100644 --- a/src/Dialog.js +++ b/src/Dialog.js @@ -259,11 +259,16 @@ OO.ui.Dialog.prototype.initialize = function () { // Parent method OO.ui.Dialog.super.prototype.initialize.call( this ); + var titleId = OO.ui.generateElementId(); + // Properties - this.title = new OO.ui.LabelWidget(); + this.title = new OO.ui.LabelWidget( { + id: titleId + } ); // Initialization this.$content.addClass( 'oo-ui-dialog-content' ); + this.$element.attr( 'aria-labelledby', titleId ); this.setPendingElement( this.$head ); }; diff --git a/src/core.js b/src/core.js index <HASH>..<HASH> 100644 --- a/src/core.js +++ b/src/core.js @@ -31,6 +31,21 @@ OO.ui.Keys = { }; /** + * @property {Number} + */ +OO.ui.elementId = 0; + +/** + * Generate a unique ID for element + * + * @return {String} [id] + */ +OO.ui.generateElementId = function () { + OO.ui.elementId += 1; + return 'oojsui-' + OO.ui.elementId; +}; + +/** * Check if an element is focusable. * Inspired from :focusable in jQueryUI v1.11.4 - 2015-04-14 *
Dialog: Label in aria terms Give the title a unique ID, and use it to add the 'aria-labelledby' attribute on the dialog. Bug: T<I> Change-Id: Ia<I>adbb<I>f<I>b<I>fcc<I>de<I>eecf
wikimedia_oojs-ui
train
00d71972f010e615aee490f33293b73f9fe04381
diff --git a/value/src/test/java/com/google/auto/value/processor/TypeSimplifierTest.java b/value/src/test/java/com/google/auto/value/processor/TypeSimplifierTest.java index <HASH>..<HASH> 100644 --- a/value/src/test/java/com/google/auto/value/processor/TypeSimplifierTest.java +++ b/value/src/test/java/com/google/auto/value/processor/TypeSimplifierTest.java @@ -106,6 +106,7 @@ public class TypeSimplifierTest { } @Test + @SuppressWarnings("TypeEquals") // We want to test the equals method invocation on TypeMirror. public void testTypeMirrorSet() { // Test the TypeMirrorSet methods. Resist the temptation to rewrite these in terms of // Truth operations! For example, don't change assertThat(set.size()).isEqualTo(0) into
Add @SuppressWarnings to a test that deliberately calls TypeMirror.equals. See [] for more details #TypeEqualsCleanup RELNOTES=Suppress TypeEquals warning ------------- Created by MOE: <URL>
google_auto
train
aebf83b2cd69f5e98d41d797bdcbb138c8eca2ed
diff --git a/client/__init__.py b/client/__init__.py index <HASH>..<HASH> 100644 --- a/client/__init__.py +++ b/client/__init__.py @@ -1,4 +1,4 @@ -__version__ = 'v1.6.16' +__version__ = 'v1.6.14' FILE_NAME = 'ok'
Pretend to be <I> for migration
okpy_ok-client
train
52e4e6825db7099988e8cfc07c00755024c32e71
diff --git a/app/models/ordinary_cms/section.rb b/app/models/ordinary_cms/section.rb index <HASH>..<HASH> 100644 --- a/app/models/ordinary_cms/section.rb +++ b/app/models/ordinary_cms/section.rb @@ -17,7 +17,7 @@ module OrdinaryCms def render(what=:content) if what == :alias - ActionController::Base.helpers.content_tag(:div, {id: "#{self.name}_title", 'data-mercury' => 'full'}) {self.alias} + ActionController::Base.helpers.content_tag(:div, {id: "#{self.name}_title", 'data-mercury' => 'full'}) {raw self.alias} else ActionController::Base.helpers.content_tag( :div,
alias style should be editable
max-konin_ordinary_cms
train
6a5460eb3d3c854f747b13ce0f97b1e81ef9fb45
diff --git a/gputools/convolve/convolve_spatial3.py b/gputools/convolve/convolve_spatial3.py index <HASH>..<HASH> 100644 --- a/gputools/convolve/convolve_spatial3.py +++ b/gputools/convolve/convolve_spatial3.py @@ -232,8 +232,8 @@ def _convolve_spatial3(im, hs, prog.run_kernel("fill_psf_grid3", Nblocks[::-1],None, tmp_g.data, - np.int32(im.shape[1]), np.int32(im.shape[2]), + np.int32(im.shape[1]), np.int32(i*Nblocks[2]), np.int32(j*Nblocks[1]), np.int32(k*Nblocks[0]), @@ -271,7 +271,9 @@ def _convolve_spatial3(im, hs, # convolution fft(patches_g,inplace=True, batch = np.prod(Gs), plan = plan) fft(h_g,inplace=True, batch = np.prod(Gs), plan = plan) - patches_g = patches_g *h_g + prog.run_kernel("mult_inplace",(np.prod(Npatchs)*np.prod(Gs),),None, + patches_g.data, h_g.data) + fft(patches_g, inplace=True, inverse = True,
fixed bug in conv_spat3
maweigert_gputools
train
9591d72e569d7813f50fcabe6aeb2b4fa2eb13a1
diff --git a/spec/integration/struct_as_embedded_value_spec.rb b/spec/integration/struct_as_embedded_value_spec.rb index <HASH>..<HASH> 100644 --- a/spec/integration/struct_as_embedded_value_spec.rb +++ b/spec/integration/struct_as_embedded_value_spec.rb @@ -15,7 +15,7 @@ describe 'Using Struct as an embedded value attribute' do end subject do - Examples::Rectangle.new(top_left: [ 3, 5 ], bottom_right: [ 8, 7 ]) + Examples::Rectangle.new(:top_left => [ 3, 5 ], :bottom_right => [ 8, 7 ]) end specify 'initialize a struct object with correct attributes' do
Dooh, fix spec for <I>
solnic_virtus
train
7a816c7ea1ffcb58c700c1b0ad97a4a210399af0
diff --git a/dist/client.js b/dist/client.js index <HASH>..<HASH> 100644 --- a/dist/client.js +++ b/dist/client.js @@ -6,7 +6,7 @@ var _extends = Object.assign || function (target) { for (var i = 1; i < argument // --- Begin Awkward Hackzorz --- -var REACTOTRON_VERSION = '@@REACTOTRON_VERSION@@'; +var REACTOTRON_VERSION = '0.8.0'; var R = require('ramda'); // client enabled flag @@ -165,7 +165,9 @@ client.createSubscriptionListener = function (store) { var values = R.map(function (key) { return [key, RS.dotPath(key, state)]; }, expanded); - client.sendCommand('redux.subscribe.values', { values: values }); + if (R.length(values) > 0) { + client.sendCommand('redux.subscribe.values', { values: values }); + } }; client.onCommand('redux.subscribe.request', function (action, client) { @@ -321,4 +323,4 @@ client.bench = function (title) { return { step: step, stop: stop }; }; -module.exports = client; \ No newline at end of file +module.exports = client;
Builds the <I> release.
infinitered_reactotron
train
b0e65c2d5d2cd285b3059a7b7e43edec16798708
diff --git a/polyaxon_schemas/polyaxonfile/specification.py b/polyaxon_schemas/polyaxonfile/specification.py index <HASH>..<HASH> 100644 --- a/polyaxon_schemas/polyaxonfile/specification.py +++ b/polyaxon_schemas/polyaxonfile/specification.py @@ -432,9 +432,10 @@ class GroupSpecification(BaseSpecification): @cached_property def early_stopping(self): + early_stopping = None if self.settings: - return self.settings.early_stopping or [] - return [] + early_stopping = self.settings.early_stopping + return early_stopping or [] @cached_property def n_experiments(self): @@ -443,18 +444,26 @@ class GroupSpecification(BaseSpecification): return None @cached_property - def experiments_def(self): + def search_method(self): + search_method = None if self.settings: - concurrent_experiments = self.settings.concurrent_experiments search_method = self.settings.search_method - else: - concurrent_experiments = 1 - search_method = SEARCH_METHODS.SEQUENTIAL + return search_method or SEARCH_METHODS.SEQUENTIAL + + @cached_property + def concurrent_experiments(self): + concurrent_experiments = None + if self.settings: + concurrent_experiments = self.settings.concurrent_experiments + return concurrent_experiments or 1 + + @cached_property + def experiments_def(self): return ( self.matrix_space, self.n_experiments, - concurrent_experiments, - search_method + self.concurrent_experiments, + self.search_method ) @cached_property
Add search_method and concurrency property
polyaxon_polyaxon
train
c95b294345b6be0093da4318dd7919fda9ac105a
diff --git a/salt/cloud/clouds/openstack.py b/salt/cloud/clouds/openstack.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/openstack.py +++ b/salt/cloud/clouds/openstack.py @@ -660,7 +660,8 @@ def request_instance(vm_, conn=None, call=None): kwargs = copy.deepcopy(vm_) log.info("Creating Cloud VM %s", vm_["name"]) __utils__["cloud.check_name"](vm_["name"], "a-zA-Z0-9._-") - conn = get_conn() + if conn is None: + conn = get_conn() userdata = config.get_cloud_config_value( "userdata", vm_, __opts__, search_global=False, default=None ) diff --git a/tests/unit/cloud/clouds/test_openstack.py b/tests/unit/cloud/clouds/test_openstack.py index <HASH>..<HASH> 100644 --- a/tests/unit/cloud/clouds/test_openstack.py +++ b/tests/unit/cloud/clouds/test_openstack.py @@ -6,17 +6,15 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """ -# Import Python libs from __future__ import absolute_import, print_function, unicode_literals -# Import Salt Libs from salt.cloud.clouds import openstack - -# Import Salt Testing Libs from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import patch +from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase +# pylint: disable=confusing-with-statement + class MockImage(object): name = "image name" @@ -135,3 +133,41 @@ class OpenstackTestCase(TestCase, LoaderModuleMockMixin): with patch("salt.cloud.clouds.openstack._get_ips", return_value=[]): ret = openstack.show_instance(conn.node.name, conn=conn, call="action") self.assertEqual(ret["image"], MockImage.name) + + def test_request_instance_should_use_provided_connection_if_not_None(self): + fake_conn = MagicMock() + + patch_get_conn = patch("salt.cloud.clouds.openstack.get_conn", autospec=True) + patch_utils = patch.dict( + openstack.__utils__, + {"cloud.check_name": MagicMock(), "dictupdate.update": MagicMock()}, + ) + patch_shade = patch.object( + openstack, "shade.exc.OpenStackCloudException", Exception, create=True + ) + + with patch_get_conn as fake_get_conn, patch_utils, patch_shade: + openstack.request_instance( + vm_={"name": "fnord", "driver": "fnord"}, conn=fake_conn + ) + + fake_get_conn.assert_not_called() + + def test_request_instance_should_create_conn_if_provided_is_None(self): + none_conn = None + + patch_get_conn = patch("salt.cloud.clouds.openstack.get_conn", autospec=True) + patch_utils = patch.dict( + openstack.__utils__, + {"cloud.check_name": MagicMock(), "dictupdate.update": MagicMock()}, + ) + patch_shade = patch.object( + openstack, "shade.exc.OpenStackCloudException", Exception, create=True + ) + + with patch_get_conn as fake_get_conn, patch_utils, patch_shade: + openstack.request_instance( + vm_={"name": "fnord", "driver": "fnord"}, conn=none_conn + ) + + fake_get_conn.assert_called_once_with()
Fix openstack.request_instance conn parameter It looks like this one got missed when this was introduced. Everywhere else within openstack.py uses this pattern. Also added some tests for this function to ensure that it does the expected thing.
saltstack_salt
train