hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
36dfad011ab079c077e6020f5a07739e01ebdb48
diff --git a/datadog_checks_base/datadog_checks/base/utils/db/query.py b/datadog_checks_base/datadog_checks/base/utils/db/query.py index <HASH>..<HASH> 100644 --- a/datadog_checks_base/datadog_checks/base/utils/db/query.py +++ b/datadog_checks_base/datadog_checks/base/utils/db/query.py @@ -59,7 +59,7 @@ class Query(object): query = self.query_data.get('query') if not query: raise ValueError('field `query` for {} is required'.format(query_name)) - elif not isinstance(query, str): + elif query_name.startswith('custom query #') and not isinstance(query, str): raise ValueError('field `query` for {} must be a string'.format(query_name)) columns = self.query_data.get('columns') diff --git a/datadog_checks_base/tests/base/utils/db/test_query_manager.py b/datadog_checks_base/tests/base/utils/db/test_query_manager.py index <HASH>..<HASH> 100644 --- a/datadog_checks_base/tests/base/utils/db/test_query_manager.py +++ b/datadog_checks_base/tests/base/utils/db/test_query_manager.py @@ -33,10 +33,10 @@ class TestQueryCompilation: with pytest.raises(ValueError, match='^field `query` for test query is required$'): query_manager.compile_queries() - def test_query_not_string(self): - query_manager = create_query_manager({'name': 'test query', 'query': 5}) + def test_custom_query_not_string(self): + query_manager = create_query_manager({'name': 'custom query #1', 'query': {'query': 'example'}}) - with pytest.raises(ValueError, match='^field `query` for test query must be a string$'): + with pytest.raises(ValueError, match='^field `query` for custom query #1 must be a string$'): query_manager.compile_queries() def test_no_columns(self):
Extend `QueryManager` query type (#<I>) * Add 'timeout' field to QueryManager queries (cherry picked from commit 7c<I>f<I>b4f6ee7bfe<I>fb7e<I>b6b<I>ad<I>) * Apply suggestions from code review
DataDog_integrations-core
train
a6a5c8852d2b2abaf1489ac3687d9dffdf48120f
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -101,6 +101,12 @@ fs.readdirSync(polyfillSourceFolder).forEach(function (polyfillName) { } return expandedPolyfillNames; + + // Add all the dependencies of a polyfill by resolving the polyfill to itself plus all its dependencies + }).addResolver(function addDependencies(polyfillIdentifierName) { + if (sources[polyfillIdentifierName] && sources[polyfillIdentifierName].config.dependencies) { + return [polyfillIdentifierName].concat(sources[polyfillIdentifierName].config.dependencies); + } }); }());
Add dependencies to the bundle automatically, see #<I>
Financial-Times_polyfill-service
train
bf8f4b2423620bffe431fa2612c110035b89a59b
diff --git a/pkg/server/sync.go b/pkg/server/sync.go index <HASH>..<HASH> 100644 --- a/pkg/server/sync.go +++ b/pkg/server/sync.go @@ -740,13 +740,13 @@ func (sh *SyncHandler) startValidatePrefix(ctx *context.Context, pfx string, doD go func() { defer close(c) err := blobserver.EnumerateAllFrom(ctx, e, pfx, func(sb blob.SizedRef) error { + // TODO: could add a more efficient method on blob.Ref to do this, + // that doesn't involve call String(). + if !strings.HasPrefix(sb.Ref.String(), pfx) { + return errNotPrefix + } select { case c <- sb: - // TODO: could add a more efficient method on blob.Ref to do this, - // that doesn't involve call String(). - if !strings.HasPrefix(sb.Ref.String(), pfx) { - return errNotPrefix - } sh.mu.Lock() if doDest { sh.vdestCount++
sync: fix bug in prefix enumeration. could send one extra item. Depending on timing, could lead to ListMissingDestinationBlobs getting out of sync and causing a lot of blobs to be replicated that were fine and already on the server. Change-Id: I<I>e<I>f1fe4e<I>f8f<I>bc9d<I>a<I>e<I>
perkeep_perkeep
train
44f9848a43dba3faedb043255e2d86d87af6fe32
diff --git a/lib/dialog.js b/lib/dialog.js index <HASH>..<HASH> 100644 --- a/lib/dialog.js +++ b/lib/dialog.js @@ -475,7 +475,7 @@ class Dialog extends Emitter { break; case 'ACK': - this.emit('ack', req) ; + setImmediate(() => this.emit('ack', req)); break ; default: diff --git a/lib/srf.js b/lib/srf.js index <HASH>..<HASH> 100644 --- a/lib/srf.js +++ b/lib/srf.js @@ -771,7 +771,7 @@ class Srf extends Emitter { .then((uac) => { //success establishing uac (B) leg, now establish uas (A) leg - debug('createB2BUA: successfully created UAC..'); + debug('createB2BUA: successfully created UAC..queueing requests..'); // need to hold any reINVITEs etc on the B leg until we establish A uac.queueRequests = true; @@ -784,7 +784,10 @@ class Srf extends Emitter { }) .then((uas) => { debug('createB2BUA: successfully created UAS..done!'); - uas.once('ack', () => uac.queueRequests = false); + uas.once('ack', () => { + debug('createB2BUA: got ACK from UAS, process any queued UAC requests') + uac.queueRequests = false; + }); uac.other = uas; uas.other = uac; return callback(null, {uac, uas}); // successfully connected! resolve promise with both dialogs diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "drachtio-srf", - "version": "4.4.20-beta.2", + "version": "4.4.20", "description": "drachtio signaling resource framework", "main": "lib/srf.js", "scripts": {
<I> (release fix for #<I>)
davehorton_drachtio-srf
train
1011acaf243271bc04f3cd31bd3f67f40827d393
diff --git a/hazelcast-jet-core/src/test/java/com/hazelcast/jet/pipeline/BatchStageTest.java b/hazelcast-jet-core/src/test/java/com/hazelcast/jet/pipeline/BatchStageTest.java index <HASH>..<HASH> 100644 --- a/hazelcast-jet-core/src/test/java/com/hazelcast/jet/pipeline/BatchStageTest.java +++ b/hazelcast-jet-core/src/test/java/com/hazelcast/jet/pipeline/BatchStageTest.java @@ -296,21 +296,22 @@ public class BatchStageTest extends PipelineTestSupport { @Test public void mapUsingIMap() { + // Given List<Integer> input = sequence(itemCount); putToBatchSrcMap(input); - IMap<Integer, String> map = member.getMap(randomMapName()); for (int i : input) { map.put(i, String.valueOf(i)); } + // When + BatchStage<Entry<Integer, String>> stage = srcStage.mapUsingIMap(map, (m, r) -> entry(r, m.get(r))); - srcStage.mapUsingIMap(map, (m, r) -> entry(r, m.get(r))) - .drainTo(sink); - + // Then + stage.drainTo(sink); execute(); - - List<Entry<Integer, String>> expected = input.stream() + List<Entry<Integer, String>> expected = input + .stream() .map(i -> entry(i, String.valueOf(i))) .collect(toList()); assertEquals(toBag(expected), sinkToBag()); @@ -318,20 +319,21 @@ public class BatchStageTest extends PipelineTestSupport { @Test public void mapUsingIMap_keyed() { + // Given List<Integer> input = sequence(itemCount); putToBatchSrcMap(input); - IMap<Integer, String> map = member.getMap(randomMapName()); for (int integer : input) { map.put(integer, String.valueOf(integer)); } - srcStage.groupingKey(r -> r) - .mapUsingIMap(map, (k, v) -> Util.entry(k, v)) - .drainTo(sink); + // When + BatchStage<Entry<Integer, String>> stage = srcStage.groupingKey(r -> r) + .mapUsingIMap(map, (k, v) -> entry(k, v)); + // Then + stage.drainTo(sink); execute(); - List<Entry<Integer, String>> expected = input.stream() .map(i -> entry(i, String.valueOf(i))) .collect(toList());
Add given-when-then to test methods
hazelcast_hazelcast
train
3b74dcd3fe13aa67189007251078c9d79f55cd60
diff --git a/Resources/public/js/views/ez-languageselectionboxview.js b/Resources/public/js/views/ez-languageselectionboxview.js index <HASH>..<HASH> 100644 --- a/Resources/public/js/views/ez-languageselectionboxview.js +++ b/Resources/public/js/views/ez-languageselectionboxview.js @@ -274,13 +274,15 @@ YUI.add('ez-languageselectionboxview', function (Y) { * * @protected * @method _cancelLanguageSelection + * @param {EventFacade} e */ - _cancelLanguageSelection: function () { + _cancelLanguageSelection: function (e) { /** * Fired when language selection is being cancelled * * @event cancelLanguageSelection */ + e.preventDefault(); this.fire(CANCEL_LANGUAGE_SELECTION); }, diff --git a/Tests/js/views/assets/ez-languageselectionboxview-tests.js b/Tests/js/views/assets/ez-languageselectionboxview-tests.js index <HASH>..<HASH> 100644 --- a/Tests/js/views/assets/ez-languageselectionboxview-tests.js +++ b/Tests/js/views/assets/ez-languageselectionboxview-tests.js @@ -103,17 +103,25 @@ YUI.add('ez-languageselectionboxview-tests', function (Y) { "Should fire the cancelLanguageSelection event": function () { var cancel = this.view.get('container').one('.ez-languageselectionbox-close'), that = this, - cancelFired = false; + cancelFired = false, + isPrevented = false; this.view.on('cancelLanguageSelection', function (e) { cancelFired = true; }); + this.view.get('container').on('tap', function (e) { + isPrevented = !!e.prevented; + }); cancel.simulateGesture('tap', function () { that.resume(function () { Assert.isTrue( cancelFired, "The cancelLanguageSelection event should have been fired" ); + Assert.isTrue( + isPrevented, + "The tap event should have been prevented" + ); }); }); this.wait();
EZP-<I>: When closing language selection box it change URI
ezsystems_PlatformUIBundle
train
57c2fe5eb58979a3f596dda34c06109890ee1784
diff --git a/lib/hocho/drivers/ssh_base.rb b/lib/hocho/drivers/ssh_base.rb index <HASH>..<HASH> 100644 --- a/lib/hocho/drivers/ssh_base.rb +++ b/lib/hocho/drivers/ssh_base.rb @@ -71,7 +71,12 @@ module Hocho temporary_passphrase = SecureRandom.base64(129).chomp - derive = system(*%w(openssl enc -pbkdf2), in: File::NULL, out: File::NULL, err: [:child, :out]) ? %w(-pbkdf2) : [] + local_supports_pbkdf2 = system(*%w(openssl enc -pbkdf2), in: File::NULL, out: File::NULL, err: [:child, :out]) + remote_supports_pbkdf2 = begin + exitstatus, * = ssh_run("openssl enc -pbkdf2", error: false, &:eof!) + exitstatus == 0 + end + derive = local_supports_pbkdf2 && remote_supports_pbkdf2 ? %w(-pbkdf2) : [] encrypted_password = IO.pipe do |r,w| w.write temporary_passphrase
ssh_base(sudo): Check that OpenSSL at both ends support pbkdf2
sorah_hocho
train
90cc8293339b056a88d0ce183606e48adacc8327
diff --git a/ezp/Persistence/LegacyStorage/Content/LocationHandler.php b/ezp/Persistence/LegacyStorage/Content/LocationHandler.php index <HASH>..<HASH> 100644 --- a/ezp/Persistence/LegacyStorage/Content/LocationHandler.php +++ b/ezp/Persistence/LegacyStorage/Content/LocationHandler.php @@ -268,5 +268,15 @@ class LocationHandler implements BaseLocationHandler { throw new RuntimeException( '@TODO: Implement' ); } + + /** + * Removes a location from its $locationId + * + * @param mixed $locationId + */ + public function delete( $locationId ) + { + throw new RuntimeException( '@TODO: Implement' ); + } } ?>
Add: Missing delete() function after last commit by jv
ezsystems_ezpublish-kernel
train
e9e1ea42c7e7f6d816a0ad7bffd5cdd43de06c5a
diff --git a/lib/collection.js b/lib/collection.js index <HASH>..<HASH> 100644 --- a/lib/collection.js +++ b/lib/collection.js @@ -138,6 +138,10 @@ Collection.prototype.previous = function(name, ns) { return this._related('previousSibling', name, ns); } +Collection.prototype.root = function() { + return wrap((this[0] ? this[0].ownerDocument : [])); +} + /** * Get the parent of each element in the current set of matched elements, * optionally filtered by a name and namespace. @@ -248,18 +252,17 @@ Collection.prototype.ns = function() { * @api public */ Collection.prototype.attr = function(name, val) { - var n = this[0]; if (val || typeof name == 'object') { var attrs = {}; if (val) { attrs[name] = val; } else { attrs = name; } - - for (var name in attrs) { - n && n.setAttribute(name, attrs[name]); - } - return this; + return this.forEach(function(n) { + for (var name in attrs) { + n && n.setAttribute(name, attrs[name]); + } + }); } - return (n ? n.getAttribute(name) || null : null); + return (this[0] ? this[0].getAttribute(name) || null : null); } Collection.prototype.attrNS = function(name, ns) { @@ -267,20 +270,22 @@ Collection.prototype.attrNS = function(name, ns) { } Collection.prototype.text = function(val) { - var n = this[0]; if (val) { - if (!n) { return this; } - var doc = this[0].ownerDocument - , c = doc.createTextNode(val); - // remove existing child nodes + return this.empty().forEach(function(n) { + var doc = n.ownerDocument + , c = doc.createTextNode(val); + n.appendChild(c); + }); + } + return (this[0] ? this[0].textContent || null : null); +} + +Collection.prototype.empty = function() { + return this.forEach(function(n) { while (n.firstChild) { - n.removeChild(this[0].firstChild); + n.removeChild(n.firstChild); } - // append text node - n.appendChild(c); - return this; - } - return (n ? n.textContent || null : null); + }); } Collection.prototype.toString = function() { diff --git a/test/collection.getters.test.js b/test/collection.getters.test.js index <HASH>..<HASH> 100644 --- a/test/collection.getters.test.js +++ b/test/collection.getters.test.js @@ -11,6 +11,11 @@ describe('getters', function() { expect(el.attr('bar')).to.equal('baz'); }); + it('should get value of first element in set', function() { + var el = $('<foo><bar a="one">baz</bar><qux a="two">qoo</qux></foo>').children(); + expect(el.attr('a')).to.equal('one'); + }); + it('should not get value of non-existent attribute', function() { expect(el.attr('bux')).to.be.null; }); @@ -21,6 +26,11 @@ describe('getters', function() { expect(el.text()).to.equal('hello'); }); + it('should get text of first element in set', function() { + var el = $('<foo><bar>baz</bar><qux>qoo</qux></foo>').children(); + expect(el.text()).to.equal('baz'); + }); + it('should not get value of empty element', function() { var el = $('<foo bar="baz"></foo>'); expect(el.text()).to.be.null; diff --git a/test/collection.setters.test.js b/test/collection.setters.test.js index <HASH>..<HASH> 100644 --- a/test/collection.setters.test.js +++ b/test/collection.setters.test.js @@ -31,6 +31,13 @@ describe('setters', function() { expect(el.toString()).to.equal('<foo bar="bux" a="1" b="2">hello</foo>'); }); + it('should set value of all elements in set', function() { + var el = $('<foo><bar>hello</bar><baz>world</baz></foo>').children(); + + expect(el.attr('a', '1')).to.equal(el); + expect(el.root().toString()).to.equal('<foo><bar a="1">hello</bar><baz a="1">world</baz></foo>'); + }); + it('should be noop on empty collection', function() { var el = $(); @@ -48,6 +55,14 @@ describe('setters', function() { expect(el.toString()).to.equal('<foo bar="baz">goodbye</foo>'); }); + it('should set value of all elements in set', function() { + var el = $('<foo><bar>hello</bar><baz>world</baz></foo>').children(); + + expect(el.text('goodbye')).to.equal(el); + expect(el.text()).to.equal('goodbye'); + expect(el.root().toString()).to.equal('<foo><bar>goodbye</bar><baz>goodbye</baz></foo>'); + }); + it('should be noop on empty collection', function() { var el = $();
Set text and attr of multiple elements in a set.
jaredhanson_node-xtraverse
train
e1076162832b159395655752005d188191238cc5
diff --git a/test_src/test_proj/views.py b/test_src/test_proj/views.py index <HASH>..<HASH> 100644 --- a/test_src/test_proj/views.py +++ b/test_src/test_proj/views.py @@ -1,6 +1,6 @@ -from vstutils.api.serializers import VSTSerializer +from vstutils.api.serializers import VSTSerializer, EmptySerializer from vstutils.api.base import ModelViewSetSet, Response, CopyMixin -from vstutils.api.decorators import nested_view, action +from vstutils.api.decorators import nested_view, subaction, action from vstutils.api import filters from vstutils.api import fields from .models import Host, HostGroup @@ -66,11 +66,14 @@ class HostViewSet(ModelViewSetSet): } filter_class = HostFilter - @action(detail=True) + @subaction( + response_code=200, response_serializer=EmptySerializer, detail=True, + description='Some desc' + ) def test(self, request, *args, **kwargs): return Response("OK", 200).resp - @action(detail=True, serializer_class=HostSerializer) + @subaction(detail=True, serializer_class=HostSerializer) def test2(self, request, *args, **kwargs): self.get_object() return Response("OK", 201).resp @@ -123,3 +126,12 @@ try: pass except nested_view.NoView: pass + + +try: + class ErrorView(_HostGroupViewSet): + @subaction(response_code=200, detail=True) + def test_err(self, request, *args, **kwargs): + return Response("OK", 200).resp +except AssertionError: + pass diff --git a/vstutils/api/decorators.py b/vstutils/api/decorators.py index <HASH>..<HASH> 100644 --- a/vstutils/api/decorators.py +++ b/vstutils/api/decorators.py @@ -4,6 +4,7 @@ from inspect import getmembers from django.db import transaction from rest_framework.decorators import action from rest_framework import viewsets as vsets, response, status +from drf_yasg.utils import swagger_auto_schema from ..exceptions import VSTUtilsException @@ -72,6 +73,33 @@ def nested_action(name, arg=None, methods=None, manager_name=None, *args, **kwar return decorator +def subaction(*args, **kwargs): + operation_description = kwargs.pop('description', None) + response_code = kwargs.pop('response_code', None) + response_serializer = kwargs.pop( + 'response_serializer', kwargs.get('serializer_class', None) + ) + assert ( + (response_code is None) or + (response_code is not None and response_serializer is not None) + ), "If `response_code` was setted, `response_serializer` should be setted too." + + def decorator(func): + func_object = action(*args, **kwargs)(func) + override_kw = dict() + if response_code: + override_kw['responses'] = { + response_code: response_serializer() + } + if operation_description: + override_kw['operation_description'] = operation_description + else: + override_kw['operation_description'] = str(func.__doc__ or '').strip() + return swagger_auto_schema(**override_kw)(func_object) + + return decorator + + def get_action_name(master_view, method): method = method.lower() if method == 'post':
Add `subaction` decorator with response data and `operation_description` * Auto add operation description in openapi from function docs. * Set `response_code` and `response_serializer` info from decorator.
vstconsulting_vstutils
train
8ad9ecdb15433fe76ac726b86783de3b206ce62a
diff --git a/gyp/AUTHORS b/gyp/AUTHORS index <HASH>..<HASH> 100644 --- a/gyp/AUTHORS +++ b/gyp/AUTHORS @@ -1,9 +1,10 @@ # Names should be added to this file like so: # Name or Organization <email address> -Google Inc. -Bloomberg Finance L.P. -Yandex LLC +Google Inc. <*@google.com> +Bloomberg Finance L.P. <*@bloomberg.net> +IBM Inc. <*@*.ibm.com> +Yandex LLC <*@yandex-team.ru> Steven Knight <[email protected]> Ryan Norton <[email protected]> diff --git a/gyp/PRESUBMIT.py b/gyp/PRESUBMIT.py index <HASH>..<HASH> 100644 --- a/gyp/PRESUBMIT.py +++ b/gyp/PRESUBMIT.py @@ -73,23 +73,15 @@ PYLINT_DISABLED_WARNINGS = [ ] -def CheckChangeOnUpload(input_api, output_api): - report = [] - report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api)) - return report - - -def CheckChangeOnCommit(input_api, output_api): - report = [] - +def _LicenseHeader(input_api): # Accept any year number from 2009 to the current year. current_year = int(input_api.time.strftime('%Y')) allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) + years_re = '(' + '|'.join(allowed_years) + ')' # The (c) is deprecated, but tolerate it until it's removed from all files. - license = ( + return ( r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n' r'.*? Use of this source code is governed by a BSD-style license that ' r'can be\n' @@ -98,8 +90,18 @@ def CheckChangeOnCommit(input_api, output_api): 'year': years_re, } +def CheckChangeOnUpload(input_api, output_api): + report = [] + report.extend(input_api.canned_checks.PanProjectChecks( + input_api, output_api, license_header=_LicenseHeader(input_api))) + return report + + +def CheckChangeOnCommit(input_api, output_api): + report = [] + report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api, license_header=license)) + input_api, output_api, license_header=_LicenseHeader(input_api))) report.extend(input_api.canned_checks.CheckTreeIsOpen( input_api, output_api, 'http://gyp-status.appspot.com/status', diff --git a/gyp/pylib/gyp/generator/make.py b/gyp/pylib/gyp/generator/make.py index <HASH>..<HASH> 100644 --- a/gyp/pylib/gyp/generator/make.py +++ b/gyp/pylib/gyp/generator/make.py @@ -90,7 +90,10 @@ def CalculateVariables(default_variables, params): if flavor == 'android': operating_system = 'linux' # Keep this legacy behavior for now. default_variables.setdefault('OS', operating_system) - default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') + if flavor == 'aix': + default_variables.setdefault('SHARED_LIB_SUFFIX', '.a') + else: + default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)') default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)') @@ -1369,7 +1372,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD if target[:3] == 'lib': target = target[3:] target_prefix = 'lib' - target_ext = '.so' + if self.flavor == 'aix': + target_ext = '.a' + else: + target_ext = '.so' elif self.type == 'none': target = '%s.stamp' % target elif self.type != 'executable':
gyp: backport GYP fix to fix AIX shared suffix Required to support the shared library builds on AIX - this sets the shared library suffix within GYP to .a instead of .so on AIX My patch: <URL> changes: PR-URL: <URL>
janeasystems_nodejs-mobile-gyp
train
a21b6f792fd0ef175f53aa75a81191db8dbd28d2
diff --git a/packages/selenium-ide/src/neo/IO/playback/webdriver.js b/packages/selenium-ide/src/neo/IO/playback/webdriver.js index <HASH>..<HASH> 100644 --- a/packages/selenium-ide/src/neo/IO/playback/webdriver.js +++ b/packages/selenium-ide/src/neo/IO/playback/webdriver.js @@ -73,6 +73,10 @@ export default class WebDriverExecutor { return func } + async beforeCommand(_commandObject) {} + + async afterCommand(_commandObject) {} + // Commands go after this line // TODO
add before and after command to the webdriver executor
SeleniumHQ_selenium-ide
train
a2eeec3597740cd79a2b88f24427c3d3e8f072e6
diff --git a/test/tools/javac/lambda/abort/Abort.java b/test/tools/javac/lambda/abort/Abort.java index <HASH>..<HASH> 100644 --- a/test/tools/javac/lambda/abort/Abort.java +++ b/test/tools/javac/lambda/abort/Abort.java @@ -85,7 +85,7 @@ public class Abort { } SimpleJavaFileObject asJFO(java.io.File dir) { - return new SimpleJavaFileObject(URI.create(dir.getAbsolutePath() + "/" + filename), JavaFileObject.Kind.SOURCE) { + return new SimpleJavaFileObject(new java.io.File(dir, filename).toURI(), JavaFileObject.Kind.SOURCE) { @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { return contents;
<I>: lambda test fails on Windows Summary: fix path separator issue in test Reviewed-by: jjg
wmdietl_jsr308-langtools
train
c126122bdb940eee1989a861231501f28aca19d6
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -85,7 +85,7 @@ yeoman.generators = { * Named Base Generator * {@link NamedBase} */ - NamedBase: yeoman.NamedBase + NamedBase: require('./lib/named-base') }; deprecate.property( 'require(\'yeoman-generator\').generators.Base is deprecated. Use require(\'yeoman-generator\').Base directly',
Fix issue with deprecation warning being log all the time for NamedBase - rel #<I>
yeoman_generator
train
0af3aff96d865f04d555dc1f4360c287cdc9d6c2
diff --git a/ggplot/facets/layouts.py b/ggplot/facets/layouts.py index <HASH>..<HASH> 100644 --- a/ggplot/facets/layouts.py +++ b/ggplot/facets/layouts.py @@ -138,7 +138,7 @@ def layout_base(data, vars=None, drop=True): "used for facetting") base = pd.concat([x for i, x in enumerate(values) if has_all[i]], axis=0) - base.drop_duplicates(inplace=True) + base = base.drop_duplicates() if not drop: base = unique_combs(base) diff --git a/ggplot/geoms/geom_abline.py b/ggplot/geoms/geom_abline.py index <HASH>..<HASH> 100644 --- a/ggplot/geoms/geom_abline.py +++ b/ggplot/geoms/geom_abline.py @@ -47,7 +47,7 @@ class geom_abline(geom): data['xend'] = ranges.x[1] data['y'] = ranges.x[0] * data['slope'] + data['intercept'] data['yend'] = ranges.x[1] * data['slope'] + data['intercept'] - data.drop_duplicates(inplace=True) + data = data.drop_duplicates() for _, gdata in data.groupby('group'): pinfos = self._make_pinfos(gdata, params) diff --git a/ggplot/geoms/geom_hline.py b/ggplot/geoms/geom_hline.py index <HASH>..<HASH> 100644 --- a/ggplot/geoms/geom_hline.py +++ b/ggplot/geoms/geom_hline.py @@ -36,7 +36,7 @@ class geom_hline(geom): data['yend'] = data['yintercept'] data['x'] = ranges.x[0] data['xend'] = ranges.x[1] - data.drop_duplicates(inplace=True) + data = data.drop_duplicates() for _, gdata in data.groupby('group'): pinfos = self._make_pinfos(gdata, params) diff --git a/ggplot/geoms/geom_vline.py b/ggplot/geoms/geom_vline.py index <HASH>..<HASH> 100644 --- a/ggplot/geoms/geom_vline.py +++ b/ggplot/geoms/geom_vline.py @@ -36,7 +36,7 @@ class geom_vline(geom): data['xend'] = data['xintercept'] data['y'] = ranges.y[0] data['yend'] = ranges.y[1] - data.drop_duplicates(inplace=True) + data = data.drop_duplicates() for _, gdata in data.groupby('group'): pinfos = self._make_pinfos(gdata, params) diff --git a/ggplot/utils/utils.py b/ggplot/utils/utils.py index <HASH>..<HASH> 100644 --- a/ggplot/utils/utils.py +++ b/ggplot/utils/utils.py @@ -510,7 +510,7 @@ def remove_missing(df, na_rm=False, vars=None, name='', finite=False): else: txt = 'missing' - df.dropna(inplace=True) + df = df.dropna() df.reset_index(drop=True, inplace=True) if len(df) < n and not na_rm: msg = '{} : Removed {} rows containing {} values.'
Remove unnecessary inplace pandas operations Most `inplace` operations are fake.
has2k1_plotnine
train
cc205b7f1be32f63b4aad35bcb2b497d3d864b43
diff --git a/cmd/dosa/schema.go b/cmd/dosa/schema.go index <HASH>..<HASH> 100644 --- a/cmd/dosa/schema.go +++ b/cmd/dosa/schema.go @@ -196,7 +196,7 @@ func (c *SchemaCmd) doSchemaOpInJavaClient(op string) { return } - fmt.Printf("%s", out) + fmt.Printf("%s", out.String()) } // SchemaCheck holds the options for 'schema check' @@ -404,7 +404,7 @@ func (c *SchemaDump) doSchemaDumpInJavaClient() { return } - fmt.Printf("%s", out) + fmt.Printf("%s", out.String()) } // expandDirectory verifies that each argument is actually a directory or
Add String() to cli output (#<I>)
uber-go_dosa
train
50b609a938c680d64d78322223748669da078256
diff --git a/lib/endtable/couch-connector.js b/lib/endtable/couch-connector.js index <HASH>..<HASH> 100644 --- a/lib/endtable/couch-connector.js +++ b/lib/endtable/couch-connector.js @@ -82,6 +82,8 @@ exports.connector = Class.extend({ loadDocumentByParams: function(params, callback) { + this.fixCommonTypos(params); + callback = this.errorHandler.wrapCallback( callback, 'CouchConnector', @@ -113,6 +115,18 @@ exports.connector = Class.extend({ this.connection.request(viewURL + viewParams, callback); }, + fixCommonTypos: function(params) { + params.keys = params.keys || []; + + if (params.key && !params.keys) { + params.keys = params.key; + } + + if (typeof params.keys == 'string') { + params.keys = [params.keys]; + } + }, + buildViewURL: function(keys, type) { if (this.legacy) { return '/_view/' + type + '/' + this.buildViewName(keys); @@ -127,10 +141,6 @@ exports.connector = Class.extend({ buildViewName: function(keys) { var viewName = 'by'; - - if (typeof keys == 'string') { - keys = [keys]; - } for (var i = 0, key; (key = keys[i]) != null; i++) { viewName += '_' + key; @@ -159,6 +169,8 @@ exports.connector = Class.extend({ createView: function(params, callback) { + this.fixCommonTypos(params); + callback = this.errorHandler.wrapCallback( callback, 'CouchConnector', @@ -196,10 +208,6 @@ exports.connector = Class.extend({ buildEmitKey: function(keys) { var emitKey = 'emit('; - if (typeof keys == 'string') { - keys = [keys]; - } - if (keys.length == 1) { return emitKey + 'doc.' + keys[0] + ', doc);'; } diff --git a/spec/spec.endtable-object.js b/spec/spec.endtable-object.js index <HASH>..<HASH> 100644 --- a/spec/spec.endtable-object.js +++ b/spec/spec.endtable-object.js @@ -220,7 +220,28 @@ describe 'Endtable.Object' end it 'should return an instance of an extending endtable class if an endtable object is sub-classed' - + /* var endtableEngine = new endtable.Engine({ + database: 'test' + }); + + var Person = endtable.Object.extend( + { + testMe: function() { return true } + }, + { + engine: engine, + type: 'person' + } + ); + + endtableObject = new endtable.Object({ + engine: endtableEngine + }).load({ + keys: 'age', + type: 'person', + startkey: 20, + endkey: 40 + }, assertCallback);*/ end end
Cleaned up the couch connector a bit moving things into a common fixCommonTypos method.
bcoe_endtable
train
d2da22b51d2996c20a159d6cc16a77ea2b861939
diff --git a/tests/templatetags/test_syntax_color.py b/tests/templatetags/test_syntax_color.py index <HASH>..<HASH> 100644 --- a/tests/templatetags/test_syntax_color.py +++ b/tests/templatetags/test_syntax_color.py @@ -63,13 +63,12 @@ class SyntaxColorTagTests(TestCase): content = """{% load syntax_color %} {{ code_string|colorize_table }} """ - expected_result = '''<table class="highlighttable"><tr><td class="linenos"><div class="linenodiv"><pre>1</pre></div></td><td class="code"><div class="highlight"><pre><span></span><span class="nt">&lt;h1&gt;</span>TEST<span class="nt">&lt;/h1&gt;</span> -</pre></div> -</td></tr></table>''' - result = Template(content).render(ctx) - self.assertHTMLEqual(result, expected_result) + self.assertIn('<table class="highlighttable">', result) + self.assertIn('<td class="linenos">', result) + self.assertIn('>1</', result) + self.assertIn('<span class="nt">&lt;h1&gt;</span>TEST<span class="nt">&lt;/h1&gt;</span>', result) def test_colorize_table_should_return_value_if_lexer_class_not_found(self): ctx = Context({'code_string': '<h1>TEST</h1>'})
rework syntax color test to break less between different versions of pygments
django-extensions_django-extensions
train
a052c1c309d86d5c328c8941e1cd897a0a13315b
diff --git a/cake/tests/lib/code_coverage_manager.php b/cake/tests/lib/code_coverage_manager.php index <HASH>..<HASH> 100644 --- a/cake/tests/lib/code_coverage_manager.php +++ b/cake/tests/lib/code_coverage_manager.php @@ -168,6 +168,7 @@ class CodeCoverageManager { } $dump = xdebug_get_code_coverage(); + xdebug_stop_code_coverage(); $coverageData = array(); foreach ($dump as $file => $data) { if (in_array($file, $testObjectFiles)) {
Stopping xdebug code coverage once it is no longer needed on CodeCoverageManager git-svn-id: <URL>
cakephp_cakephp
train
d8a474b3a4b636889f377b501d2ca9e44d5c4361
diff --git a/src/toil/cwl/cwltoil.py b/src/toil/cwl/cwltoil.py index <HASH>..<HASH> 100755 --- a/src/toil/cwl/cwltoil.py +++ b/src/toil/cwl/cwltoil.py @@ -216,13 +216,15 @@ class SelfJob(object): def __init__(self, j, v): self.j = j self.v = v - self._children = j._children def rv(self): return self.v def addChild(self, c): - self.j.addChild(c) + return self.j.addChild(c) + + def hasChild(self, c): + return self.j.hasChild(c) class CWLWorkflow(Job): @@ -293,7 +295,7 @@ class CWLWorkflow(Job): connected = False for inp in step.tool["inputs"]: if "source" in inp: - if wfjob not in promises[inp["source"]]._children: + if not promises[inp["source"]].hasChild(wfjob): promises[inp["source"]].addChild(wfjob) connected = True if not connected: diff --git a/src/toil/job.py b/src/toil/job.py index <HASH>..<HASH> 100644 --- a/src/toil/job.py +++ b/src/toil/job.py @@ -116,6 +116,12 @@ class Job(object): childJob._addPredecessor(self) return childJob + def hasChild(self, childJob): + """ + Check if the job is already a child of this job. + """ + return childJob in self._children + def addService(self, service): """ Add a service of type Job.Service. The Job.Service.start() method
Add hasChild() to Job API so that cwltoil isn't forced to access internal attributes (required because adding a job twice as a child throws an exception).
DataBiosphere_toil
train
e2427cd47fb2c427916000ef52b53e162790d0fa
diff --git a/library/ZExt/Cache/Backend/Decorators/Profileable.php b/library/ZExt/Cache/Backend/Decorators/Profileable.php index <HASH>..<HASH> 100755 --- a/library/ZExt/Cache/Backend/Decorators/Profileable.php +++ b/library/ZExt/Cache/Backend/Decorators/Profileable.php @@ -231,7 +231,7 @@ class Profileable extends DecoratorAbstract implements ProfileableInterface { $pos = strrpos($name, '\\'); $profiler->setName($pos === false ? $name : substr($name, $pos + 1)); - $profiler->setIcon('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAEeUlEQVRIS7VVXUwcVRT+7uwvSwe7sAUEky4l4AqI4ko0pURqaB8INo2xjQ0NUmPCm2jsg+XBxJ80pDXRB01M1JhW8EEeMLYvNukf9glKJbWQNlAbWZafdcHpws7s7O7Mes+dnRV91HCHm7M7d/i+c77znVmGbV5sm/FhE7BDZw6N1AXqKv4vIWNZXL1344upoalRwhIEclgO9Jx4OZJdNL3zNyNgTILDIeXZ8znwwPglSXTmgMTPKTodTjCJH+aApJ5E19EnMHZreuTy2cu9HNoU/131UlWos7NzNn4zzvQlHYGyUlRVVSKV0uEr8iIWX4Pb6cT9B78LULfbDY/XA6/HC6/XC7fLzfFzUB4q2P3CbtxdufvzxQ8vdnLotCAIHg927Htm31VOgGwsi9bw05i+PSOi8jABDweUeJZ0jwhcLhfcHiLgmxNQFZKDQdU0+Op9iJmx+dHB0ac4tCoIQv2hY821zd+tT6zDueksVLCyEsP6nwp2BcpQXOzDQiRqycKroSrcHl6J2yO+0/1sNgvNr8EIGIlzb52r4dDrgqBloOWd6vLqj9UJFT7DJx4WOnO9GRPii35IknXfJqBI1dBztHI5LpNLgVwvZ84PnW9M/JaYEwStJ1vPlj1SdhJTAFe1QGATbSX8+zORWQmIJHiTKfxhxFH9bHVuZHikI3IpMi4I9r67d7hULu3JTeTgddqaWi7ZugsO4hnTZ+oLOc5epmlCMRUUtxTj2pVrx2eHZ0cEwcH3Dl7x+/z7M5NZuPJ6SgRuWzIfhWw8TToja0ocnKxrTxMRLKeWoT+nI3o7emryk8khImCHTx++43f4G7RftILP7R7YUWTMJaE+EAmj77Y8+RI07qKYGoO530T8fvzz8Q/G32RohLv3jd6InJHLN2c2RaOEFAQkAAmYQC1JLGm2aE/TxDNX1SSSSRW6oYN1MSgR5YdLg5eOsGBHcGf3q92rRckid/xOXORigVhNtAFFJEmE7tY2DAO6rotNJLTIqpkXM1A31MkLAxfaWfi1cG1XZ9c8izMszSwJKzjyzbM1d3IrNoTqsLS8KkAILJ1Ji0n38FlwihkwxKBRFVqrhgzLLIz2jzaxtrfb2sPPh8c9yx6ocyqi0ShKSkogy3LBHZR9qX8nskYWleW7kNjYgLxjhwCjAVyNxVFRHsDC4hKXSoPWpCEtpzfHBsceZwfeP3CkMlT5vTlnoiZVg8XFRWtK+f7H4j633EJvHapSKsgihkAc8hMuY7o+jUxlJnf9y+vNrPtM98CTTY9+Gl3TkbqVwh7vHqE1Nfu/rkRFAoEG4KfRqS529LNXTp9of+zUjeU0YkoGygMFkmm5RDSVGkoXjSnlmY9Uh/XHL56Mvc2cicZQCZpqZHz9471+1vdV3ze1wWCfQ+LvdWbb0XJMwTn/IrErs0kIlAgoWm4yeb8ymJ769SPW9npbD3/wmIM5eHJWpmIO+CWGlByVH1VOWZhayp7DCS5RhXVDkNBnwzSwFln7lurmSPwNtz3L+sHZzrXtBH8BYRK7mVh8WPgAAAAASUVORK5CYII='); + $profiler->setIcon('lightning'); } }
ZExt\Cache\Backend\Decorators\Profileable: profiler pass now the name of icon instead of icon itself
mikemirten_ZExt
train
59579c0feb25bf493433a130f0213b47562f6269
diff --git a/openquake/supervising/__init__.py b/openquake/supervising/__init__.py index <HASH>..<HASH> 100644 --- a/openquake/supervising/__init__.py +++ b/openquake/supervising/__init__.py @@ -38,6 +38,8 @@ def is_pid_running(pid): :return: True if the process is running, False otherwise """ + # NB: Process ids are not globally unique, so existance of process with + # given pid doesn't guarantee that the job/supervisor/whatever is alive. try: os.kill(pid, 0) except OSError as e:
added a note about pid uniqueness to is_pid_running()
gem_oq-engine
train
fbbf8462271c273b5f7856388c59d4376b469821
diff --git a/Resources/public/base.js b/Resources/public/base.js index <HASH>..<HASH> 100644 --- a/Resources/public/base.js +++ b/Resources/public/base.js @@ -238,4 +238,4 @@ var Admin = { errors.show(); } } -} +};
Missing coma at the end of the file Ressources/public/base.js Leading assetic to miss the merge in production env see : <URL>
sonata-project_SonataAdminBundle
train
b28ce799e27baffc2f577cec87930b2c46b35ae9
diff --git a/glue_vispy_viewers/isosurface/isosurface_visual.py b/glue_vispy_viewers/isosurface/isosurface_visual.py index <HASH>..<HASH> 100644 --- a/glue_vispy_viewers/isosurface/isosurface_visual.py +++ b/glue_vispy_viewers/isosurface/isosurface_visual.py @@ -1,9 +1,9 @@ -# This file implements a IsosurfaceVisual class that includes workarounds for +# This file implements a IsosurfaceVisual class that includes workarounds for # the following VisPy bugs: -# +# # https://github.com/vispy/vispy/pull/1179 # https://github.com/vispy/vispy/pull/1180 -# +# # It is derived from the original code for IsosurfaceVisual in # vispy.visuals.isosurface, which is released under a BSD license included here: # @@ -42,12 +42,20 @@ from __future__ import division +import numpy as np + from vispy.visuals.mesh import MeshVisual from vispy.geometry.isosurface import isosurface from vispy.color import Color from vispy.scene.visuals import create_visual_node +# Find out if we are using the original or new drawing API +from vispy.visuals.isosurface import IsosurfaceVisual as VispyIsosurfaceVisual +HAS_PREPARE_DRAW = hasattr(VispyIsosurfaceVisual, '_prepare_draw') +del VispyIsosurfaceVisual + + class IsosurfaceVisual(MeshVisual): """Displays an isosurface of a 3D scalar array. @@ -124,7 +132,7 @@ class IsosurfaceVisual(MeshVisual): self._update_meshvisual = True self.update() - def _prepare_draw(self, view): + def _update_mesh_visual(self): if self._data is None or self._level is None: return False @@ -144,6 +152,14 @@ class IsosurfaceVisual(MeshVisual): color=self._color) self._update_meshvisual = False - return MeshVisual._prepare_draw(self, view) + if HAS_PREPARE_DRAW: + def _prepare_draw(self, view): + self._update_mesh_visual() + return MeshVisual._prepare_draw(self, view) + else: + def draw(self, transforms): + self._update_mesh_visual() + return MeshVisual.draw(self, transforms) + Isosurface = create_visual_node(IsosurfaceVisual)
Make our Isosurface class compatible with Vispy <I>
glue-viz_glue-vispy-viewers
train
17dc90d9f8a8ed0fc91386ba9f4ec05c522ef099
diff --git a/src/main/java/io/nats/client/impl/PullStatusMessageManager.java b/src/main/java/io/nats/client/impl/PullStatusMessageManager.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/nats/client/impl/PullStatusMessageManager.java +++ b/src/main/java/io/nats/client/impl/PullStatusMessageManager.java @@ -21,7 +21,7 @@ import java.util.List; class PullStatusMessageManager extends MessageManager { - private static final List<Integer> PULL_KNOWN_STATUS_CODES = Arrays.asList(404, 408); + private static final List<Integer> PULL_KNOWN_STATUS_CODES = Arrays.asList(404, 408, 409); private int lastStatusCode = -1; diff --git a/src/main/java/io/nats/client/impl/PushStatusMessageManager.java b/src/main/java/io/nats/client/impl/PushStatusMessageManager.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/nats/client/impl/PushStatusMessageManager.java +++ b/src/main/java/io/nats/client/impl/PushStatusMessageManager.java @@ -20,6 +20,8 @@ import io.nats.client.SubscribeOptions; import io.nats.client.api.ConsumerConfiguration; import io.nats.client.support.Status; +import java.util.Arrays; +import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicLong; @@ -28,6 +30,8 @@ import static io.nats.client.support.NatsJetStreamConstants.CONSUMER_STALLED_HDR class PushStatusMessageManager extends MessageManager { + private static final List<Integer> PUSH_KNOWN_STATUS_CODES = Arrays.asList(409); + private static final int THRESHOLD = 3; private final NatsConnection conn; @@ -175,22 +179,20 @@ class PushStatusMessageManager extends MessageManager { if (fc) { _processFlowControl(msg.getReplyTo(), ErrorListener.FlowControlSource.FLOW_CONTROL); } - return true; } - - if (status.isHeartbeat()) { + else if (status.isHeartbeat()) { if (fc) { // status flowControlSubject is set in the beforeQueueProcessor _processFlowControl(extractFcSubject(msg), ErrorListener.FlowControlSource.HEARTBEAT); } - return true; } - - // this status is unknown to us, always use the error handler. - // If it's a sync call, also throw an exception - conn.getOptions().getErrorListener().unhandledStatus(conn, sub, status); - if (syncMode) { - throw new JetStreamStatusException(sub, status); + else if (!PUSH_KNOWN_STATUS_CODES.contains(status.getCode())) { + // If this status is unknown to us, always use the error handler. + // If it's a sync call, also throw an exception + conn.getOptions().getErrorListener().unhandledStatus(conn, sub, status); + if (syncMode) { + throw new JetStreamStatusException(sub, status); + } } return true; }
statuses with code <I> are known statuses (#<I>)
nats-io_java-nats
train
2dc5b4074c7c201afc9e0573a4402f3cc20ffd64
diff --git a/api/charmhub/data.go b/api/charmhub/data.go index <HASH>..<HASH> 100644 --- a/api/charmhub/data.go +++ b/api/charmhub/data.go @@ -96,7 +96,23 @@ func convertCharm(in interface{}) *Charm { func convertChannels(in map[string]params.Channel) map[string]Channel { out := make(map[string]Channel, len(in)) for k, v := range in { - out[k] = Channel(v) + out[k] = Channel{ + ReleasedAt: v.ReleasedAt, + Track: v.Track, + Risk: v.Risk, + Revision: v.Revision, + Size: v.Size, + Version: v.Version, + Platforms: convertPlatforms(v.Platforms), + } + } + return out +} + +func convertPlatforms(in []params.Platform) []Platform { + out := make([]Platform, len(in)) + for i, v := range in { + out[i] = Platform(v) } return out } @@ -132,12 +148,19 @@ type FindResponse struct { } type Channel struct { - ReleasedAt string `json:"released-at"` - Track string `json:"track"` - Risk string `json:"risk"` - Revision int `json:"revision"` - Size int `json:"size"` - Version string `json:"version"` + ReleasedAt string `json:"released-at"` + Track string `json:"track"` + Risk string `json:"risk"` + Revision int `json:"revision"` + Size int `json:"size"` + Version string `json:"version"` + Platforms []Platform `json:"platforms"` +} + +type Platform struct { + Architecture string `json:"architecture"` + OS string `json:"os"` + Series string `json:"series"` } // Charm matches a params.CharmHubCharm
Add Platforms to Channel of ChannelMaps, for info
juju_juju
train
812e4e14f77464ff1d7d7f96a74ad0cfa7baa4f8
diff --git a/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJobs.java b/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJobs.java index <HASH>..<HASH> 100644 --- a/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJobs.java +++ b/helios-testing/src/main/java/com/spotify/helios/testing/TemporaryJobs.java @@ -87,7 +87,7 @@ public class TemporaryJobs implements TestRule { private static final Prober DEFAULT_PROBER = new DefaultProber(); private static final String DEFAULT_LOCAL_HOST_FILTER = ".+"; private static final String DEFAULT_PREFIX_DIRECTORY = "/tmp/helios-temp-jobs"; - private static final String DEFAULT_TEST_REPORT_DIRECTORY = "./helios-test-reports"; + private static final String DEFAULT_TEST_REPORT_DIRECTORY = "./helios-reports/test"; private static final long JOB_HEALTH_CHECK_INTERVAL_MILLIS = SECONDS.toMillis(5); private static final long DEFAULT_DEPLOY_TIMEOUT_MILLIS = MINUTES.toMillis(10);
TemporaryJobs: Change the test report output dir Instead of helios-test-reports/ use helios-reports/test/ We will also have other subdirs like helios-reports/deploy/ so this lets us collect everything in one place.
spotify_helios
train
4e192a69da721cb29fffb1339078dc8952d083e8
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -52,8 +52,10 @@ setup( 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ],
Update python classifiers (#<I>)
wooey_Wooey
train
e559a7ca265c83f351807d8c7bc35300b3e13b99
diff --git a/spec/mongoid/validatable/uniqueness_spec.rb b/spec/mongoid/validatable/uniqueness_spec.rb index <HASH>..<HASH> 100644 --- a/spec/mongoid/validatable/uniqueness_spec.rb +++ b/spec/mongoid/validatable/uniqueness_spec.rb @@ -5,7 +5,7 @@ describe "Paranoia uniqueness scoped validator" do context "when the document is a root document" do context "when the document is paranoid" do before do - ParanoidPost.validates(:title, uniqueness: { conditions: -> { where(deleted_at: nil) } }) + ParanoidPost.validates(:title, uniqueness: { conditions: -> { ParanoidPost.where(deleted_at: nil) } }) end after do
Use model context in uniqueness validator spec.
simi_mongoid_paranoia
train
08eb4caade9a16f91f5e2da3be18e4e99483b4b8
diff --git a/account/Model/Activate.php b/account/Model/Activate.php index <HASH>..<HASH> 100644 --- a/account/Model/Activate.php +++ b/account/Model/Activate.php @@ -112,11 +112,13 @@ class Activate_Model extends Model ); } $email = Email::instance(); + if (!($data = $user->getArrayCopy())) { + $data = []; + } + $data['url'] = $uri; + $data += $_POST; $email->setSource("monolyth\\account\\$source") - ->setVariables([ - 'name' => $auth['name'], - 'url' => $uri, - ]) + ->setVariables($data) ->headers(['Reply-to' => "noreply@$website"]) ->send($auth['email']); return null;
just add data and hope for the best...
monolyth-php_frontal
train
fafd1a1327b2e9d475e44220f54cfdf555dec76a
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,10 +8,13 @@ language: python python: - '2.7' - '3.5' - - '3.6' + +env: + - NEO4J_VERSION=3.2.2 + - NEO4J_VERSION=latest before_install: - - docker pull neo4j + - docker pull neo4j:$NEO4J_VERSION install: python setup.py testing diff --git a/norduniclient/testing.py b/norduniclient/testing.py index <HASH>..<HASH> 100644 --- a/norduniclient/testing.py +++ b/norduniclient/testing.py @@ -9,6 +9,7 @@ import random import subprocess import base64 import json +from os import environ from socket import error as SocketError try: @@ -20,6 +21,9 @@ from norduniclient.core import init_db __author__ = 'lundberg' +# Run tests with different Neo4j docker image versions using environment variables +NEO4J_VERSION = environ.get('NEO4J_VERSION', 'latest') + class Neo4jTemporaryInstance(object): """ @@ -43,7 +47,7 @@ class Neo4jTemporaryInstance(object): atexit.register(cls._instance.shutdown) return cls._instance - def __init__(self): + def __init__(self, neo4j_version=NEO4J_VERSION): while self._http_port == self._bolt_port: self._http_port = random.randint(40000, 50000) self._bolt_port = random.randint(40000, 50000) @@ -51,7 +55,7 @@ class Neo4jTemporaryInstance(object): self._process = subprocess.Popen(['docker', 'run', '--rm', '--name', '{!s}'.format(self._docker_name), '-p', '{!s}:7474'.format(self.http_port), '-p', '{!s}:7687'.format(self.bolt_port), - 'neo4j:latest'], + 'neo4j:{}'.format(neo4j_version)], stdout=open('/tmp/neo4j-temp.log', 'wb'), stderr=subprocess.STDOUT) self._host = 'localhost' diff --git a/norduniclient/tests/test_models.py b/norduniclient/tests/test_models.py index <HASH>..<HASH> 100644 --- a/norduniclient/tests/test_models.py +++ b/norduniclient/tests/test_models.py @@ -656,7 +656,9 @@ class ModelsTests(Neo4jTestCase): def test_get_dependent_as_types_cable_model(self): cable1 = core.get_node_model(self.neo4jdb, handle_id='28') dependents = cable1.get_dependent_as_types() - self.assertEqual(dependents['links'][0]['name'], 'Optical Link1') + + for optical_link in dependents['links']: + self.assertTrue(optical_link['name'] in ['Optical Link1', 'Optical Link2']) self.assertEqual(dependents['oms'], []) self.assertEqual(dependents['paths'][0]['name'], 'Optical Path1') self.assertEqual(dependents['services'][0]['name'], 'Service2') @@ -708,3 +710,4 @@ class ModelsTests(Neo4jTestCase): customers = service2.get_customers() self.assertEqual(len(customers['customers']), 1) self.assertIsInstance(customers['customers'][0]['node'], models.CustomerModel) + diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -4,6 +4,7 @@ version = '1.0.9' requires = [ 'neo4j-driver>=1.0.0,<1.1.0', + 'six>=1.10.0', ] testing_requires = [ @@ -19,7 +20,7 @@ setup( license='Apache License, Version 2.0', author='Johan Lundberg', author_email='[email protected]', - description='Neo4j (>=3.0) database client using bolt for NORDUnet network inventory', + description='Neo4j (>=3.2.2) database client using bolt for NORDUnet network inventory', packages=find_packages(), zip_safe=False, install_requires=requires,
Updated travis config Fixed a test that was sometimes failing to do lists being unordered.
NORDUnet_python-norduniclient
train
bd24c4c9c5619556034606bf2f0604cfe7e98304
diff --git a/src/Services/InputCalibrator.php b/src/Services/InputCalibrator.php index <HASH>..<HASH> 100644 --- a/src/Services/InputCalibrator.php +++ b/src/Services/InputCalibrator.php @@ -126,6 +126,6 @@ class InputCalibrator */ public function cleanString($string) { - return ucwords(str_replace('_', ' ', $string)); + return str_replace('_', ' ', $string); } } diff --git a/tests/InputMakerTest.php b/tests/InputMakerTest.php index <HASH>..<HASH> 100644 --- a/tests/InputMakerTest.php +++ b/tests/InputMakerTest.php @@ -105,7 +105,7 @@ class InputMakerTest extends TestCase ], $entry); $this->assertTrue(is_string($test)); - $this->assertEquals($test, '<input id="Meta[user[id]]" class="form-control" type="number" name="meta[user[id]]" value="1" placeholder="Meta User Id">'); + $this->assertEquals($test, '<input id="Meta[user[id]]" class="form-control" type="number" name="meta[user[id]]" value="1" placeholder="Meta user id">'); } public function testCreateSingleNestedString() @@ -120,7 +120,7 @@ class InputMakerTest extends TestCase ], $entry); $this->assertTrue(is_string($test)); - $this->assertEquals($test, '<input id="Meta[created_at]" class="form-control" type="text" name="meta[created_at]" value="1999-01-01 06:15:00" placeholder="Meta Created At">'); + $this->assertEquals($test, '<input id="Meta[created_at]" class="form-control" type="text" name="meta[created_at]" value="1999-01-01 06:15:00" placeholder="Meta created at">'); } public function testCreateSpecialString()
Fixes issue with html in form labels
GrafiteInc_FormMaker
train
710e0d7de90a32b1b4100aa41db17d3786e4a769
diff --git a/messaging/client_test.go b/messaging/client_test.go index <HASH>..<HASH> 100644 --- a/messaging/client_test.go +++ b/messaging/client_test.go @@ -98,6 +98,45 @@ func TestClient_Close(t *testing.T) { } } +// Ensure a client's Leader URL can be set correctly. +func TestClient_SetLeaderURL(t *testing.T) { + c := messaging.NewClient(100) + + // Nil shouldn't blow up. + var u *url.URL + c.SetLeaderURL(u) + + tests := []struct { + leader string + expected string + }{ + { + leader: "http://localhost", + expected: "http://localhost", + }, + { + leader: "https://localhost", + expected: "https://localhost", + }, + { + leader: "http://localhost:8045", + expected: "http://localhost:8045", + }, + { + leader: "http://127.0.0.1:46684/messaging/messages?replicaID=100", + expected: "http://127.0.0.1:46684", + }, + } + + for _, tt := range tests { + c.SetLeaderURL(MustParseURL(tt.leader)) + if c.LeaderURL().String() != tt.expected { + t.Errorf("Setting client leader URL failed, expected: %s, got: %s", tt.expected, c.LeaderURL().String()) + } + } + +} + // Ensure that a client can publish messages to the broker. func TestClient_Publish(t *testing.T) { c := OpenClient(1000) diff --git a/messaging/intg_test.go b/messaging/intg_test.go index <HASH>..<HASH> 100644 --- a/messaging/intg_test.go +++ b/messaging/intg_test.go @@ -88,6 +88,26 @@ func BenchmarkCluster_Publish(b *testing.B) { c.MustSync(index) } +// Ensure a client is properly redirected to the leader. +func TestClient_LeaderRedirect(t *testing.T) { + c := NewCluster(3) + defer c.Close() + + // Explicity tell the client to send its writes to a follower. + c.Leader().Broker().CreateReplica(100, &url.URL{Host: "localhost"}) + client := messaging.NewClient(100) + client.Open("", []*url.URL{c.Follower().Broker().URL()}) + + // Ensure after writing to leader, client has correct leader URL. + _, err := client.Publish(&messaging.Message{Type: 0, TopicID: 1, Data: make([]byte, 50)}) + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + if client.LeaderURL().String() != c.URL().String() { + t.Fatalf("client not using expected leader, expected: %s, actual: %s", c.URL().String(), client.LeaderURL().String()) + } +} + // Cluster represents a set of joined Servers. type Cluster struct { Servers []*Server @@ -115,6 +135,16 @@ func NewCluster(n int) *Cluster { func (c *Cluster) Leader() *Server { return c.Servers[0] } func (c *Cluster) URL() *url.URL { return c.Leader().Broker().URL() } +// Follower returns a follower in the cluster -- if it exists -- and assuming +// that the first node is the leader. +func (c *Cluster) Follower() *Server { + if len(c.Servers) > 1 { + return c.Servers[1] + } else { + return nil + } +} + // MustSync runs sync against every server in the cluster. Panic on error. func (c *Cluster) MustSync(index uint64) { for i, s := range c.Servers {
Add unit test for client tracking leader URL
influxdata_influxdb
train
f5e731cc400ca5924974bc61bb4c04f0fab30887
diff --git a/lib/Doctrine/DBAL/Platforms/PostgreSqlPlatform.php b/lib/Doctrine/DBAL/Platforms/PostgreSqlPlatform.php index <HASH>..<HASH> 100644 --- a/lib/Doctrine/DBAL/Platforms/PostgreSqlPlatform.php +++ b/lib/Doctrine/DBAL/Platforms/PostgreSqlPlatform.php @@ -269,10 +269,11 @@ class PostgreSqlPlatform extends AbstractPlatform $whereClause = $namespaceAlias.".nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast') AND "; if (strpos($table, ".") !== false) { list($schema, $table) = explode(".", $table); - $whereClause .= "$classAlias.relname = '" . $table . "' AND $namespaceAlias.nspname = '" . $schema . "'"; + $schema = "'" . $schema . "'"; } else { - $whereClause .= "$classAlias.relname = '" . $table . "'"; + $schema = "ANY(string_to_array((select setting from pg_catalog.pg_settings where name = 'search_path'),','))"; } + $whereClause .= "$classAlias.relname = '" . $table . "' AND $namespaceAlias.nspname = $schema"; return $whereClause; }
[DBAL-<I>] Fix bug with elements in default schema not filtered for really being in the default search path.
doctrine_dbal
train
6ee8cf332125c3c0ba0df6a1d5b3aa66ac6961d6
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -191,9 +191,9 @@ export class Map extends React.Component { const map = this.map; const {google} = this.props; - const maps = google.maps; if (!google) return; + const maps = google.maps; if (map) { let center = this.state.currentLocation;
Fix recentering when google.maps is not yet available
fullstackreact_google-maps-react
train
4ce3d184894f883fe12dd6b1408254c8aa64c2d7
diff --git a/ps_alchemy/__init__.py b/ps_alchemy/__init__.py index <HASH>..<HASH> 100644 --- a/ps_alchemy/__init__.py +++ b/ps_alchemy/__init__.py @@ -5,6 +5,7 @@ # Copyright © 2015 uralbash <[email protected]> # # Distributed under terms of the MIT license. +from sqlalchemy.ext.declarative.api import DeclarativeMeta from pyramid.events import ApplicationCreated from pyramid_sacrud import CONFIG_MODELS from pyramid_sacrud.resources import GroupResource @@ -19,7 +20,9 @@ def models_preparing(app): models = settings[CONFIG_MODELS] def wrapper(resource, parent): - return ListResource(resource, parent=parent) + if isinstance(resource, DeclarativeMeta): + return ListResource(resource, parent=parent) + return resource models = [(k, [wrapper(r, GroupResource(k, v)) for r in v]) for k, v in models] diff --git a/ps_alchemy/resources.py b/ps_alchemy/resources.py index <HASH>..<HASH> 100644 --- a/ps_alchemy/resources.py +++ b/ps_alchemy/resources.py @@ -9,7 +9,6 @@ """ Provide SQLAlchemy resource for pyramid_sacrud. """ -# from zope.interface import implementer from zope.sqlalchemy import ZopeTransactionExtension import sqlalchemy @@ -19,7 +18,6 @@ from sacrud_deform import SacrudForm from sqlalchemy.orm import sessionmaker, scoped_session from pyramid.location import lineage from pyramid.threadlocal import get_current_registry -# from pyramid_sacrud.interfaces import ISacrudResource class BaseResource(object): @@ -53,8 +51,6 @@ class BaseResource(object): self.column = column def value(self, row): - # if col.__class__.__name__ - # in ['Column', 'InstrumentedAttribute'] return getattr( row, get_attrname_by_colname(row, self.column.name) diff --git a/ps_alchemy/views.py b/ps_alchemy/views.py index <HASH>..<HASH> 100644 --- a/ps_alchemy/views.py +++ b/ps_alchemy/views.py @@ -62,10 +62,11 @@ class CRUD(object): permission=PYRAMID_SACRUD_VIEW ) def list_view(self): - items_per_page = getattr(self.context, 'items_per_page', 10) rows = self.context.crud.read() try: - paginator_attr = get_paginator(self.request, items_per_page - 1) + paginator_attr = get_paginator( + self.request, self.context.items_per_page - 1 + ) except ValueError: raise HTTPNotFound params = {
preparing only SQLAlchemy models
sacrud_ps_alchemy
train
7180e5d0bd64a61c536e60d4050a778dd37a6189
diff --git a/Manager/IPWhiteListManager.php b/Manager/IPWhiteListManager.php index <HASH>..<HASH> 100644 --- a/Manager/IPWhiteListManager.php +++ b/Manager/IPWhiteListManager.php @@ -43,6 +43,10 @@ class IPWhiteListManager public function getIPs() { + if (!file_exists($this->ipFile)) { + touch($this->ipFile); + } + return file($this->ipFile, FILE_IGNORE_NEW_LINES); } @@ -50,4 +54,4 @@ class IPWhiteListManager { return in_array($ip, $this->getIPs()); } -} \ No newline at end of file +}
Create ips file if not exist
claroline_CoreBundle
train
49e46f3232e94380336696b48a4a27672d580d2f
diff --git a/wallet/rescan.go b/wallet/rescan.go index <HASH>..<HASH> 100644 --- a/wallet/rescan.go +++ b/wallet/rescan.go @@ -5,6 +5,7 @@ package wallet import ( + "github.com/btcsuite/btcd/txscript" "github.com/btcsuite/btcd/wire" "github.com/btcsuite/btcutil" "github.com/btcsuite/btcwallet/chain" @@ -34,7 +35,7 @@ type RescanFinishedMsg struct { type RescanJob struct { InitialSync bool Addrs []btcutil.Address - OutPoints []*wire.OutPoint + OutPoints map[wire.OutPoint]btcutil.Address BlockStamp waddrmgr.BlockStamp err chan error } @@ -44,7 +45,7 @@ type RescanJob struct { type rescanBatch struct { initialSync bool addrs []btcutil.Address - outpoints []*wire.OutPoint + outpoints map[wire.OutPoint]btcutil.Address bs waddrmgr.BlockStamp errChans []chan error } @@ -78,7 +79,11 @@ func (b *rescanBatch) merge(job *RescanJob) { b.initialSync = true } b.addrs = append(b.addrs, job.Addrs...) - b.outpoints = append(b.outpoints, job.OutPoints...) + + for op, addr := range job.OutPoints { + b.outpoints[op] = addr + } + if job.BlockStamp.Height < b.bs.Height { b.bs = job.BlockStamp } @@ -240,9 +245,19 @@ out: // current best block in the main chain, and is considered an initial sync // rescan. func (w *Wallet) Rescan(addrs []btcutil.Address, unspent []wtxmgr.Credit) error { - outpoints := make([]*wire.OutPoint, len(unspent)) - for i, output := range unspent { - outpoints[i] = &output.OutPoint + outpoints := make(map[wire.OutPoint]btcutil.Address, len(unspent)) + for _, output := range unspent { + _, outputAddrs, _, err := txscript.ExtractPkScriptAddrs( + output.PkScript, w.chainParams, + ) + if err != nil { + return err + } + + outpoints[output.OutPoint] = outputAddrs[0] + if err != nil { + return err + } } job := &RescanJob{
wallet: update rescan to take addrs along with their outpoints
btcsuite_btcwallet
train
bcb5cffb5aaadb70fa457f80cb369faa75147fa8
diff --git a/lib/lanes/api/routing.rb b/lib/lanes/api/routing.rb index <HASH>..<HASH> 100644 --- a/lib/lanes/api/routing.rb +++ b/lib/lanes/api/routing.rb @@ -9,7 +9,7 @@ module Lanes end def root(&block) - API::Root.get('/', &block) + API::Root.get('/*', &block) end [:get, :post, :put, :patch, :delete].each do | method_name |
Use splat to match urls for html5 matching
argosity_hippo
train
5126c3aeb5298a9ccd1e2ac6b51a2b6a4e40f867
diff --git a/zinnia/__init__.py b/zinnia/__init__.py index <HASH>..<HASH> 100644 --- a/zinnia/__init__.py +++ b/zinnia/__init__.py @@ -1,5 +1,5 @@ """Zinnia""" -__version__ = '0.18.dev0' +__version__ = '0.18' __license__ = 'BSD License' __author__ = 'Fantomas42'
Bumping to version <I>
Fantomas42_django-blog-zinnia
train
8aa7da0d42a2fad9cd86f2b015172710fa6a2770
diff --git a/tools/fontbakery-build-fontmetadata.py b/tools/fontbakery-build-fontmetadata.py index <HASH>..<HASH> 100755 --- a/tools/fontbakery-build-fontmetadata.py +++ b/tools/fontbakery-build-fontmetadata.py @@ -99,6 +99,32 @@ BLACKLIST = [ "AdobeBlank", # Testing font, gives ZeroDivisionError: float division by zero ] +# nameID definitions for the name table: +NAMEID_COPYRIGHT_NOTICE = 0 +NAMEID_FONT_FAMILY_NAME = 1 +NAMEID_FONT_SUBFAMILY_NAME = 2 +NAMEID_UNIQUE_FONT_IDENTIFIER = 3 +NAMEID_FULL_FONT_NAME = 4 +NAMEID_VERSION_STRING = 5 +NAMEID_POSTSCRIPT_NAME = 6 +NAMEID_TRADEMARK = 7 +NAMEID_MANUFACTURER_NAME = 8 +NAMEID_DESIGNER = 9 +NAMEID_DESCRIPTION = 10 +NAMEID_VENDOR_URL = 11 +NAMEID_DESIGNER_URL = 12 +NAMEID_LICENSE_DESCRIPTION = 13 +NAMEID_LICENSE_INFO_URL = 14 +# Name ID 15 is RESERVED +NAMEID_TYPOGRAPHIC_FAMILY_NAME = 16 +NAMEID_TYPOGRAPHIC_SUBFAMILY_NAME = 17 +NAMEID_COMPATIBLE_FULL_MACONLY = 18 +NAMEID_SAMPLE_TEXT = 19 +NAMEID_POSTSCRIPT_CID_NAME = 20 +NAMEID_WWS_FAMILY_NAME = 21 +NAMEID_WWS_SUBFAMILY_NAME = 22 +NAMEID_LIGHT_BACKGROUND_PALETTE = 23 +NAMEID_DARK_BACKGROUD_PALETTE = 24 def generate_italic_angle_images(): from PIL import Image, ImageDraw @@ -493,11 +519,31 @@ def get_gfn(fontfile): if fontfname in fontfile: gfn = "{}:{}:{}".format(family, style, weight) break - #except ParseError, KeyError: except: - print ("Failed to detect GFN value for '{}'. Defaults to 'unknown'.".format(fontfile)) pass + if gfn == 'unknown': + #This font lacks a METADATA.pb file and also failed + # to auto-detect the GFN value. As a last resort + # we'll try to extract the info from the NAME table entries. + try: + ttfont = TTFont(fontfile) + for entry in ttfont['name'].names: + if entry.nameID == NAMEID_FONT_FAMILY_NAME: + family = entry.string.decode(entry.getEncoding()).encode('ascii', 'ignore').strip() + if entry.nameID == NAMEID_FONT_SUBFAMILY_NAME: + style, weight = StyleWeight(entry.string.decode(entry.getEncoding()).encode('ascii', 'ignore').strip()) + ttfont.close() + if family != "": #avoid empty string in cases of misbehaved family names in the name table + gfn = "{}:{}:{}".format(family, style, weight) + print ("Detected GFN from name table entries: '{}' (file='{}')".format(gfn, fontfile)) + except: + #This seems to be a really bad font file... + pass + + if gfn == 'unknown': + print ("Failed to detect GFN value for '{}'. Defaults to 'unknown'.".format(fontfile)) + return gfn # Returns fontinfo dict
detect GFN from name table (issue #<I>) As a last resort in cases where font filename does not follow naming guidelines and METADATA.pb is missing.
googlefonts_fontbakery
train
b1f01cacc92d90122db42fcad59f707a31dc4105
diff --git a/optaplanner-core/src/main/java/org/optaplanner/core/impl/heuristic/selector/common/nearby/BetaDistributionNearbyRandom.java b/optaplanner-core/src/main/java/org/optaplanner/core/impl/heuristic/selector/common/nearby/BetaDistributionNearbyRandom.java index <HASH>..<HASH> 100644 --- a/optaplanner-core/src/main/java/org/optaplanner/core/impl/heuristic/selector/common/nearby/BetaDistributionNearbyRandom.java +++ b/optaplanner-core/src/main/java/org/optaplanner/core/impl/heuristic/selector/common/nearby/BetaDistributionNearbyRandom.java @@ -27,11 +27,11 @@ public class BetaDistributionNearbyRandom implements NearbyRandom { public BetaDistributionNearbyRandom(double betaDistributionAlpha, double betaDistributionBeta) { if (betaDistributionAlpha <= 0) { throw new IllegalArgumentException("The betaDistributionAlpha (" + betaDistributionAlpha - + ") must be positive."); + + ") must be greater than 0."); } if (betaDistributionBeta <= 0) { throw new IllegalArgumentException("The betaDistributionBeta (" + betaDistributionBeta - + ") must be positive."); + + ") must be greater than 0."); } betaDistribution = new BetaDistribution(betaDistributionAlpha, betaDistributionBeta); }
fix error message: 0 is positive, but not strictly positive.
kiegroup_optaplanner
train
2808c47b6b89b1e94db1fa12033c471ef3a48c49
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -1048,6 +1048,13 @@ * */ 'watch': function(prop, callback){ this.watcher.watch(this.data, prop, callback); + }, + + /** + * Unwatches the property in the shared data associated with the callback function + * */ + 'unwatch': function(prop, callback){ + this.watcher.unwatch(this.data, prop, callback); } },
Added unwatch function to sharedData Simple unwatch function for the sharedData object based on watchJS. It is more coherent to use watch and unwatch in the same way, than accessing to the watcher object to unwatch property.
TamkeenLMS_electron-window-manager
train
2b816af9a8769c4c8043acbcde0c66572272f063
diff --git a/lib/modules/blockchain_process/proxy.js b/lib/modules/blockchain_process/proxy.js index <HASH>..<HASH> 100644 --- a/lib/modules/blockchain_process/proxy.js +++ b/lib/modules/blockchain_process/proxy.js @@ -65,7 +65,7 @@ const parseResponse = function (ipc, resBody) { }; exports.serve = async function (ipc, host, port, ws, origin) { - const _origin = origin ? origin.split(',')[0] : void 0; + const _origin = origin ? origin.split(',')[0] : undefined; const start = Date.now(); function awaitTarget() {
eslint doesn't like `void 0` here
embark-framework_embark
train
ac74ce6d8595af55a5fd2763db1e06cfabcca942
diff --git a/client/js/Dialogs/Projects/ProjectsDialog.js b/client/js/Dialogs/Projects/ProjectsDialog.js index <HASH>..<HASH> 100644 --- a/client/js/Dialogs/Projects/ProjectsDialog.js +++ b/client/js/Dialogs/Projects/ProjectsDialog.js @@ -72,16 +72,29 @@ define(['logManager', } }); + var openProject = function (projId) { + self._client.selectProjectAsync(projId,function(){ + self._dialog.modal('hide'); + }); + }; + + this._ul.on("dblclick", "a", function (event) { + selectedId = $(this).attr("data-id"); + + event.stopPropagation(); + event.preventDefault(); + + openProject(selectedId); + }); + this._btnOpen.on('click', function (event) { self._btnOpen.addClass("disabled"); self._btnDelete.addClass("disabled"); - self._client.selectProjectAsync(selectedId,function(){ - self._dialog.modal('hide'); - }); - event.stopPropagation(); event.preventDefault(); + + openProject(selectedId); }); this._btnDelete.on('click', function (event) {
open selected project on doubleclick Former-commit-id: a9ccd5cebb<I>dc<I>bb8cc0d<I>
webgme_webgme-engine
train
ced5e6f647abbdd3d54d0f32ed18770ff7b8a25d
diff --git a/blockstack_client/backend/nameops.py b/blockstack_client/backend/nameops.py index <HASH>..<HASH> 100644 --- a/blockstack_client/backend/nameops.py +++ b/blockstack_client/backend/nameops.py @@ -56,6 +56,7 @@ from ..operations import fees_update, fees_transfer, fees_revoke, fees_registrat from .safety import * from ..logger import get_logger +from ..utxo import get_unspents import virtualchain from virtualchain.lib.ecdsalib import ecdsa_private_key @@ -69,6 +70,7 @@ class UTXOWrapper(object): Also, cache unspents we fetch upstream. """ def __init__(self, utxo_client): + assert not isinstance(utxo_client, UTXOWrapper) self.utxos = {} self.utxo_client = utxo_client @@ -87,16 +89,16 @@ class UTXOWrapper(object): if self.utxos.has_key(addr): return self.utxos[addr] - unspents = self.utxo_client.get_unspents(addr) + unspents = get_unspents(addr, self.utxo_client) return unspents def __getattr__(self, name): - try: - return getattr(self, name) - except: - return getattr(self.utxo_client, name) + if name == 'get_unspents': + return self.get_unspents + else: + return getattr(self.utxo_client, name) def estimate_dust_fee( tx, fee_estimator ):
fix stack overflow on getattr() with UTXOWrapper
blockstack_blockstack-core
train
66e28f56b69c7cf24594b728563bf344b2c5df46
diff --git a/cli.js b/cli.js index <HASH>..<HASH> 100755 --- a/cli.js +++ b/cli.js @@ -67,6 +67,7 @@ commander .option('--canvas-capture-mode [type]', '(experimental) Switches to canvas mode, capturing the canvas selected by --selector as image type (default: png)') .option('-q, --quiet', 'Suppress console logging') .option('--executable-path <path>', 'Uses Chromium/Chrome application at specified path for puppeteer') + .option('--remote-url <path>', 'Connect to remote Chromium/Chrome instance using puppeteer.connect()') .option('-L, --launch-arguments <arguments>', 'Custom launch arguments for Puppeteer browser', function (str) { // TODO: make a more sophisticated parser for options that can handle quote marks return str.split(' '); diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -35,7 +35,7 @@ const path = require('path'); const defaultDuration = 5; const defaultFPS = 60; const { overwriteRandom } = require('./lib/overwrite-random'); -const { promiseLoop, getBrowserFrames } = require('./lib/utils'); +const { promiseLoop, getBrowserFrames, getBorwser } = require('./lib/utils'); const initializePageUtils = require('./lib/page-utils'); const initializeMediaTimeHandler = require('./lib/media-time-handler'); @@ -104,7 +104,8 @@ module.exports = function (config) { args: config.launchArguments || [] }; - return puppeteer.launch(launchOptions).then(function (browser) { + + return getBorwser(config.remoteUrl,launchOptions).then(function (browser) { return browser.newPage().then(function (page) { config = Object.assign({ log, diff --git a/lib/utils.js b/lib/utils.js index <HASH>..<HASH> 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -29,7 +29,7 @@ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - +const puppeteer = require('puppeteer'); const fs = require('fs'); const path = require('path'); const sprintf = require('sprintf-js').sprintf; @@ -119,11 +119,23 @@ const makeFileDirectoryIfNeeded = function (filepath) { } }; + +const getBorwser = async function(url, launchOptions) { + if(url){ + var queryString = Object.keys(launchOptions).map(key => key + '=' + launchOptions[key]).join('&'); + let remote = url+"?"+queryString + return await puppeteer.connect({ browserWSEndpoint: remote }) + }else{ + return await puppeteer.launch(launchOptions) + } +}; + module.exports = { promiseLoop, getBrowserFrames, getSelectorDimensions, writeFile, makeFilePathConverter, - makeFileDirectoryIfNeeded + makeFileDirectoryIfNeeded, + getBorwser }; \ No newline at end of file
Connect to remote Chromium/Chrome instance using puppeteer.connect()
tungs_timesnap
train
cca6b70f1e569f48c4d73db5121524fea8504f7e
diff --git a/lib/Image/Operation/Resize.php b/lib/Image/Operation/Resize.php index <HASH>..<HASH> 100644 --- a/lib/Image/Operation/Resize.php +++ b/lib/Image/Operation/Resize.php @@ -7,14 +7,14 @@ use Timber\ImageHelper; use Timber\Image\Operation as ImageOperation; /** - * Changes image to new size, by shrinking/enlarging - * then cropping to respect new ratio. - * - * Arguments: - * - width of new image - * - height of new image - * - crop method - */ + * Changes image to new size, by shrinking/enlarging + * then cropping to respect new ratio. + * + * Arguments: + * - width of new image + * - height of new image + * - crop method + */ class Resize extends ImageOperation { private $w, $h, $crop; diff --git a/lib/ImageHelper.php b/lib/ImageHelper.php index <HASH>..<HASH> 100644 --- a/lib/ImageHelper.php +++ b/lib/ImageHelper.php @@ -111,7 +111,7 @@ class ImageHelper { * @return boolean true if it's an animated gif, false if not */ public static function is_animated_gif( $file ) { - if ( strpos(strtolower($file), '.gif') === FALSE ) { + if ( strpos(strtolower($file), '.gif') === false ) { //doesn't have .gif, bail return false; } diff --git a/lib/Loader.php b/lib/Loader.php index <HASH>..<HASH> 100644 --- a/lib/Loader.php +++ b/lib/Loader.php @@ -123,7 +123,7 @@ class Loader { */ public function get_loader() { $open_basedir = ini_get('open_basedir'); - $paths = array_merge($this->locations, array( $open_basedir ? ABSPATH : '/')); + $paths = array_merge($this->locations, array($open_basedir ? ABSPATH : '/')); $paths = apply_filters('timber/loader/paths', $paths); $rootPath = '/'; @@ -175,25 +175,32 @@ class Loader { $object_cache = true; } $cache_mode = $this->_get_cache_mode($cache_mode); - if ( self::CACHE_TRANSIENT === $cache_mode || self::CACHE_SITE_TRANSIENT === $cache_mode) { - global $wpdb; - $query = $wpdb->prepare("DELETE FROM $wpdb->options WHERE option_name LIKE '%s'", '_transient_timberloader_%'); - $wpdb->query($query); - return true; + if ( self::CACHE_TRANSIENT === $cache_mode || self::CACHE_SITE_TRANSIENT === $cache_mode ) { + return self::clear_cache_timber_database(); } else if ( self::CACHE_OBJECT === $cache_mode && $object_cache ) { - global $wp_object_cache; - if ( isset($wp_object_cache->cache[self::CACHEGROUP]) ) { - $items = $wp_object_cache->cache[self::CACHEGROUP]; - foreach( $items as $key => $value) { - if (is_multisite() ) { - $key = preg_replace('/^(.*?):/', '', $key); - } - wp_cache_delete($key, self::CACHEGROUP); + return self::clear_cache_timber_object(); + } + return false; + } + + protected static function clear_cache_timber_database() { + global $wpdb; + $query = $wpdb->prepare("DELETE FROM $wpdb->options WHERE option_name LIKE '%s'", '_transient_timberloader_%'); + return $wpdb->query($query); + } + + protected static function clear_cache_timber_object() { + global $wp_object_cache; + if ( isset($wp_object_cache->cache[self::CACHEGROUP]) ) { + $items = $wp_object_cache->cache[self::CACHEGROUP]; + foreach( $items as $key => $value ) { + if ( is_multisite() ) { + $key = preg_replace('/^(.*?):/', '', $key); } - return true; + wp_cache_delete($key, self::CACHEGROUP); } + return true; } - return false; } public function clear_cache_twig() {
ref #<I> -- some things found by Scrutinizer
timber_timber
train
de3404dbc608e598b3ccdbf862972e65198efcf4
diff --git a/src/main/java/org/junit/experimental/theories/Theories.java b/src/main/java/org/junit/experimental/theories/Theories.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/junit/experimental/theories/Theories.java +++ b/src/main/java/org/junit/experimental/theories/Theories.java @@ -73,6 +73,11 @@ public class Theories extends BlockJUnit4ClassRunner { super(klass); } + /** @since 4.13 */ + protected Theories(TestClass testClass) throws InitializationError { + super(testClass); + } + @Override protected void collectInitializationErrors(List<Throwable> errors) { super.collectInitializationErrors(errors); @@ -215,7 +220,7 @@ public class Theories extends BlockJUnit4ClassRunner { protected void runWithCompleteAssignment(final Assignments complete) throws Throwable { - new BlockJUnit4ClassRunner(getTestClass().getJavaClass()) { + new BlockJUnit4ClassRunner(getTestClass()) { @Override protected void collectInitializationErrors( List<Throwable> errors) {
Update Theories to pass the TestClass to BlockJUnit4ClassRunner. This avoids repeated class scanning.
junit-team_junit4
train
850a3ae1c3d59823335b3c0406de1a878af53870
diff --git a/testsuite.py b/testsuite.py index <HASH>..<HASH> 100644 --- a/testsuite.py +++ b/testsuite.py @@ -35,7 +35,8 @@ # # The tests have been roughly arranged in order of time needed. # -# All tests should pass. Report regressions to [email protected] +# All tests should pass. Report regressions to ulfalizer a.t Google's email +# service. import kconfiglib import os
Change email in testsuite.py.
ulfalizer_Kconfiglib
train
91aaaecd40d74607e44ab6f8854dfd717efc986f
diff --git a/lib/pdf/info.rb b/lib/pdf/info.rb index <HASH>..<HASH> 100644 --- a/lib/pdf/info.rb +++ b/lib/pdf/info.rb @@ -1,5 +1,5 @@ +require 'date' unless defined? DateTime require 'pdf/info/exceptions' -require 'date' module PDF class Info @@ -52,6 +52,8 @@ module PDF metadata = {} rows.each do |row| pair = row.split(':', 2) + pair.map!(&:strip) + case pair.first when "Pages" metadata[:page_count] = pair.last.to_i @@ -64,9 +66,11 @@ module PDF when "PDF version" metadata[:version] = pair.last.to_f when "CreationDate" - metadata[:creation_date] = ::DateTime.parse(pair.last) + creation_date = parse_datetime(pair.last) + metadata[:creation_date] = creation_date if creation_date when "ModDate" - metadata[:modification_date] = ::DateTime.parse(pair.last) + modification_date = parse_datetime(pair.last) + metadata[:modification_date] = modification_date if modification_date when /^Page.*size$/ metadata[:pages] ||= [] metadata[:pages] << pair.last.scan(/[\d.]+/).map(&:to_f) @@ -79,5 +83,17 @@ module PDF metadata end + private + + def parse_datetime(value) + DateTime.parse(value) + rescue + begin + DateTime.strptime(value, '%m/%d/%Y %k:%M:%S') + rescue + nil + end + end + end end diff --git a/spec/pdf_info_spec.rb b/spec/pdf_info_spec.rb index <HASH>..<HASH> 100644 --- a/spec/pdf_info_spec.rb +++ b/spec/pdf_info_spec.rb @@ -87,6 +87,58 @@ describe PDF::Info do end end + describe ".process_output" do + subject do + PDF::Info.new('test.pdf') + end + + it "symbolizes all keys" do + output = "a:foo\nb:bar\nc:baz" + [:a, :b, :c].each do |key| + expect(subject.process_output(output)).to have_key key + expect(subject.process_output(output)).to_not have_key key.to_s + end + end + + it "downcases key" do + output = "I AM ALL CAPITAL:I STAY ALL CAPITAL" + expected = {:'i_am_all_capital' => 'I STAY ALL CAPITAL'} + expect(subject.process_output(output)).to include expected + end + + it "replaces whitespace in key with underscore" do + output = "key with space:value without underscore" + expected = {:'key_with_space' => 'value without underscore'} + expect(subject.process_output(output)).to include expected + end + + it "strips whitespace from metadata pair" do + output = " key with space :value without space\nkey without space: value with space " + expected = {:'key_with_space' => 'value without space', :'key_without_space' => 'value with space'} + expect(subject.process_output(output)).to include expected + end + end + + describe ".parse_datetime" do + subject do + pdf_info = PDF::Info.new('test.pdf') + pdf_info.stub!(:command).and_return(output('successful.txt')) + pdf_info + end + + it 'parse standard datetime format' do + expect(subject.send(:parse_datetime, '2001-02-03T04:05:06+07:00')).to be_kind_of DateTime + end + + it 'parse american datetime format' do + expect(subject.send(:parse_datetime, '4/23/2004 18:37:34')).to be_kind_of DateTime + end + + it 'return nil if string can not be parsed' do + expect(subject.send(:parse_datetime, 'asdf')).to be_nil + end + end + describe "running on sample.pdf" do subject do PDF::Info.command_path = "pdfinfo"
Squashed commit of the following: commit fcf<I>a1fbe<I>e1bac<I>fa7de<I>c5d<I>cfc2c3
newspaperclub_pdf_info
train
8c32c6c784006ce11a641a8dbcc135a4eac900d2
diff --git a/lib/seahorse/client/request_context.rb b/lib/seahorse/client/request_context.rb index <HASH>..<HASH> 100644 --- a/lib/seahorse/client/request_context.rb +++ b/lib/seahorse/client/request_context.rb @@ -35,10 +35,6 @@ module Seahorse # @return [String] Name of the API operation called. attr_accessor :operation_name - def operation - @operation ||= config.api.operations[operation_name] - end - # @return [Hash] The hash of request parameters. attr_accessor :params @@ -67,6 +63,11 @@ module Seahorse @metadata[key] = value end + # @return [Model::Operation] + def operation + @operation ||= config.api.operations[operation_name] + end + end end end diff --git a/spec/seahorse/client/base_spec.rb b/spec/seahorse/client/base_spec.rb index <HASH>..<HASH> 100644 --- a/spec/seahorse/client/base_spec.rb +++ b/spec/seahorse/client/base_spec.rb @@ -246,7 +246,6 @@ module Seahorse Plugins::Endpoint, Plugins::NetHttp, Plugins::OperationMethods, - Plugins::ValidateParams, ]) end
Moved a method down below the list of attribute accessors.
aws_aws-sdk-ruby
train
7364139118eb2dbd4380fbd3dfb174b843a0923e
diff --git a/src/main/java/org/sahagin/runlib/runresultsgen/HookMethodManager.java b/src/main/java/org/sahagin/runlib/runresultsgen/HookMethodManager.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/sahagin/runlib/runresultsgen/HookMethodManager.java +++ b/src/main/java/org/sahagin/runlib/runresultsgen/HookMethodManager.java @@ -44,7 +44,6 @@ public class HookMethodManager { private TestMethod methodCache = null; // used in getTestMethod method private long startMethodTime; private LinkedHashMap<String, Long> startTimeMap = new LinkedHashMap<>(); - private long latestStartTime; public HookMethodManager(SrcTree srcTree, Config config) { if (srcTree == null) { @@ -256,7 +255,6 @@ public class HookMethodManager { } logger.info(String.format("putting codeLineKey: %s", codeLineKey)); startTimeMap.put(codeLineKey, System.currentTimeMillis()); - latestStartTime = startTimeMap.get(codeLineKey); logger.info(String.format("beforeCodeLineHook: start: %s: %d(%d)", hookedMethodSimpleName, hookedLine, actualHookedLine)); @@ -287,9 +285,8 @@ public class HookMethodManager { hookedArgClassesStr, hookedLine); Long startTime = startTimeMap.get(codeLineKey); if (startTime == null) { - // TODO Reach here when local variable is stored with annotated submethods - logger.info("codeLineKey not found: " + codeLineKey); - startTime = latestStartTime; + // maybe beforeHook for codeLineKey has not been called unexpectedly + throw new RuntimeException("codeLineKey not found: " + codeLineKey); } int executionTime = (int) (System.currentTimeMillis() - startTime); startTimeMap.remove(codeLineKey); diff --git a/src/main/java/org/sahagin/runlib/runresultsgen/RunResultsGenerateHookSetter.java b/src/main/java/org/sahagin/runlib/runresultsgen/RunResultsGenerateHookSetter.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/sahagin/runlib/runresultsgen/RunResultsGenerateHookSetter.java +++ b/src/main/java/org/sahagin/runlib/runresultsgen/RunResultsGenerateHookSetter.java @@ -170,22 +170,15 @@ public class RunResultsGenerateHookSetter implements ClassFileTransformer { String initializeSrc = hookInitializeSrc(); boolean transformed = false; - // iterate code body in the inverse order + // iterate code body in the inverse order, // so that beforeHook is always inserted after the afterHook of the previous line // even if target line of these two hooks are the same for (int i = method.getCodeBody().size() - 1; i >= 0; i--) { int hookedLine = method.getCodeBody().get(i).getStartLine(); - int beforeHookInsertedLine = beforeHookInsertLine(method, i); - if (beforeHookInsertedLine != -1) { - int actualBeforeHookInsertedLine = ctMethod.insertAt(beforeHookInsertedLine, false, null); - ctMethod.insertAt(beforeHookInsertedLine, - String.format("%s%s.beforeCodeLineHook(\"%s\",\"%s\",\"%s\",\"%s\",%d, %d);", - initializeSrc, hookClassName, classQualifiedName, - methodSimpleName, methodSimpleName, - methodArgClassesStr, hookedLine, actualBeforeHookInsertedLine)); - transformed = true; - } + // insert afterHook first and beforeHook second in each iteration, + // so that beforeHook is always inserted before the afterHook + // even if actual inserted lines for these two hooks are the same int afterHookInsertedLine = afterHookInsertLine(method, i); if (afterHookInsertedLine != -1) { @@ -197,6 +190,17 @@ public class RunResultsGenerateHookSetter implements ClassFileTransformer { methodArgClassesStr, hookedLine, actualAfterHookInsertedLine)); transformed = true; } + + int beforeHookInsertedLine = beforeHookInsertLine(method, i); + if (beforeHookInsertedLine != -1) { + int actualBeforeHookInsertedLine = ctMethod.insertAt(beforeHookInsertedLine, false, null); + ctMethod.insertAt(beforeHookInsertedLine, + String.format("%s%s.beforeCodeLineHook(\"%s\",\"%s\",\"%s\",\"%s\",%d, %d);", + initializeSrc, hookClassName, classQualifiedName, + methodSimpleName, methodSimpleName, + methodArgClassesStr, hookedLine, actualBeforeHookInsertedLine)); + transformed = true; + } } return transformed;
Sometimes beforeHook was called after afterHook
SahaginOrg_sahagin-java
train
334bfc0e382669c4ac1a98ec14d29cd070e3ea93
diff --git a/lib/vlc-client/connection.rb b/lib/vlc-client/connection.rb index <HASH>..<HASH> 100644 --- a/lib/vlc-client/connection.rb +++ b/lib/vlc-client/connection.rb @@ -19,7 +19,7 @@ module VLC # Connects to VLC RC interface on Client#host and Client#port def connect @socket = TCPSocket.new(@host, @port) - 2.times { read(0.4) } #Clean the reading channel + 2.times { read(0.4) rescue nil } #Clean the reading channel true rescue Errno::ECONNREFUSED => e raise VLC::ConnectionRefused, "Could not connect to #{@host}:#{@port}: #{e}"
Allow reading channel cleanup to timeout silently
mguinada_vlc-client
train
c454717fa67df71b4a3c2f16cd0f6bc186148a04
diff --git a/core/chain_indexer_test.go b/core/chain_indexer_test.go index <HASH>..<HASH> 100644 --- a/core/chain_indexer_test.go +++ b/core/chain_indexer_test.go @@ -18,6 +18,7 @@ package core import ( "context" + "errors" "fmt" "math/big" "math/rand" @@ -224,7 +225,10 @@ func (b *testChainIndexBackend) Process(ctx context.Context, header *types.Heade //t.processCh <- header.Number.Uint64() select { case <-time.After(10 * time.Second): - b.t.Fatal("Unexpected call to Process") + b.t.Error("Unexpected call to Process") + // Can't use Fatal since this is not the test's goroutine. + // Returning error stops the chainIndexer's updateLoop + return errors.New("Unexpected call to Process") case b.processCh <- header.Number.Uint64(): } return nil
core: fix potential race in chainIndexerTest (#<I>)
ethereum_go-ethereum
train
e7ee71d4a3534c477e307b025701143ad055fb41
diff --git a/lib/websearch_templates.py b/lib/websearch_templates.py index <HASH>..<HASH> 100644 --- a/lib/websearch_templates.py +++ b/lib/websearch_templates.py @@ -2066,12 +2066,12 @@ class Template: out += '</table>' if row.has_key ('viewsimilarity'): - out += '<p>&nbsp' + out += '<p>&nbsp;' out += self.tmpl_print_record_list_for_similarity_boxen ( _("People who viewed this page also viewed:"), row ['viewsimilarity'], ln) if row.has_key ('reviews'): - out += '<p>&nbsp' + out += '<p>&nbsp;' out += row['reviews'] if row.has_key ('comments'):
Fixed &nbsp; typos, thanks to Ferran Jorba.
inveniosoftware_invenio-records
train
2a26c7600650e5146bf847d95ca00108c9c2b2ca
diff --git a/src/java/com/threerings/media/util/BackgroundTiler.java b/src/java/com/threerings/media/util/BackgroundTiler.java index <HASH>..<HASH> 100644 --- a/src/java/com/threerings/media/util/BackgroundTiler.java +++ b/src/java/com/threerings/media/util/BackgroundTiler.java @@ -1,5 +1,5 @@ // -// $Id: BackgroundTiler.java,v 1.3 2002/09/25 21:49:53 shaper Exp $ +// $Id: BackgroundTiler.java,v 1.4 2002/10/22 02:02:40 shaper Exp $ package com.threerings.media.util; @@ -36,6 +36,13 @@ public class BackgroundTiler _h3 = _height/3; _ch3 = _height-2*_h3; + // make sure the image suits our minimum useful dimensions + if (_w3 <= 0 || _cw3 <= 0 || _h3 <= 0 || _ch3 <= 0) { + Log.warning("Backgrounder given source image of insufficient " + + "size for tiling [src=" + src + "]."); + return; + } + // create our sub-divided images _tiles = new BufferedImage[9]; int[] sy = { 0, _h3, _h3+_ch3 };
Deal gracefully with images that we can't usefully tile (which includes the default 1x1 transparent image we're given if the image manager failed to load an image successfully.) git-svn-id: svn+ssh://src.earth.threerings.net/narya/trunk@<I> <I>f4-<I>e9-<I>-aa3c-eee0fc<I>fb1
threerings_narya
train
27b480ba552adb256d5ee1338da981bb3a60dc62
diff --git a/oauthlib/oauth2/rfc6749/tokens.py b/oauthlib/oauth2/rfc6749/tokens.py index <HASH>..<HASH> 100644 --- a/oauthlib/oauth2/rfc6749/tokens.py +++ b/oauthlib/oauth2/rfc6749/tokens.py @@ -180,11 +180,18 @@ class BearerToken(TokenBase): def create_token(self, request, refresh_token=False): """Create a BearerToken, by default without refresh token.""" + + if callable(self.expires_in): + expires_in = self.expires_in(request) + else: + expires_in = self.expires_in + token = { 'access_token': self.token_generator(request), - 'expires_in': self.expires_in, + 'expires_in': expires_in, 'token_type': 'Bearer', } + if request.scopes is not None: token['scope'] = ' '.join(request.scopes)
The ability to vary expires_in per request.
oauthlib_oauthlib
train
d69278229396a7f89a2bdf4b36f97b92054521b3
diff --git a/lib/makewcs.py b/lib/makewcs.py index <HASH>..<HASH> 100644 --- a/lib/makewcs.py +++ b/lib/makewcs.py @@ -60,9 +60,8 @@ import string,types, os.path import pyfits from pydrizzle import drutil -from pydrizzle.distortion import models +from pydrizzle.distortion import models,mutil from pytools import fileutil, wcsutil, parseinput -from pydrizzle.distortion import mutil import numpy as N yes = True @@ -79,7 +78,7 @@ PARITY = {'WFC':[[1.0,0.0],[0.0,-1.0]],'HRC':[[-1.0,0.0],[0.0,1.0]], NUM_PER_EXTN = {'ACS':3,'WFPC2':1,'STIS':3,'NICMOS':5, 'WFC3':3} -__version__ = '0.8.2dev (4 March 2008)' +__version__ = '0.8.3dev (8 May 2008)' def run(input,quiet=yes,restore=no,prepend='O'): print "+ MAKEWCS Version %s" % __version__ @@ -325,6 +324,7 @@ def _update(image,idctab,nimsets,quiet=None,instrument=None,prepend=None,nrchip= fy = idcmodel.cy refpix = idcmodel.refpix order = idcmodel.norder + # # Look for any subarray offset # @@ -352,6 +352,13 @@ def _update(image,idctab,nimsets,quiet=None,instrument=None,prepend=None,nrchip= if ltv1 != 0. or ltv2 != 0.: fx,fy = idcmodel.shift(idcmodel.cx,idcmodel.cy,offsetx,offsety) + if hdr.has_key('WFCTDD') and hdr['WFCTDD'] == 'T': + # Implement time-dependent correction here + alpha,beta = mutil.compute_wfc_tdd_coeffs(hdr['date-obs']) + else: + alpha= 0.0 + beta = 0.0 + # Extract the appropriate information for reference chip rfx,rfy,rrefpix,rorder=mutil.readIDCtab(idctab,chip=Nrefchip, direction='forward', filter1=filter1,filter2=filter2,offtab=offtab, @@ -402,7 +409,7 @@ def _update(image,idctab,nimsets,quiet=None,instrument=None,prepend=None,nrchip= R.cd12=parity[0][0] * -sin(pv*pi/180.0)*R_scale R.cd21=parity[1][1] * sin(pv*pi/180.0)*R_scale R.cd22=parity[1][1] * cos(pv*pi/180.0)*R_scale - + if not quiet: print " Reference Chip Scale (arcsec/pix): ",rrefpix['PSCALE'] @@ -462,7 +469,7 @@ def _update(image,idctab,nimsets,quiet=None,instrument=None,prepend=None,nrchip= delYX=fy[1,1]/R_scale/3600. delXY=fx[1,0]/R_scale/3600. delYY=fy[1,0]/R_scale/3600. - + # Convert to radians rr=dtheta*pi/180.0 @@ -499,8 +506,9 @@ def _update(image,idctab,nimsets,quiet=None,instrument=None,prepend=None,nrchip= New.cd11 = New.cd11*VA_fac New.cd12 = New.cd12*VA_fac New.cd21 = New.cd21*VA_fac - New.cd22 = New.cd22*VA_fac - + New.cd22 = New.cd22*VA_fac + + # Store new one # archive=yes specifies to also write out archived WCS keywords # overwrite=no specifies do not overwrite any pre-existing archived keywords @@ -545,7 +553,7 @@ def _update(image,idctab,nimsets,quiet=None,instrument=None,prepend=None,nrchip= _new_extn.header.update(Akey,Aval) _new_extn.header.update(Bkey,Bval) - + # Update the SIP flag keywords as well #iraf.hedit(image,"CTYPE1","RA---TAN-SIP",verify=no,show=no) #iraf.hedit(image,"CTYPE2","DEC--TAN-SIP",verify=no,show=no) @@ -557,12 +565,26 @@ def _update(image,idctab,nimsets,quiet=None,instrument=None,prepend=None,nrchip= #iraf.hedit(image,"B_ORDER","%d" % order,add=yes,verify=no,show=no) _new_extn.header.update("A_ORDER",order) _new_extn.header.update("B_ORDER",order) + + # Update header with additional keywords required for proper + # interpretation of SIP coefficients by PyDrizzle. + _new_extn.header.update("TDDALPHA",alpha) + _new_extn.header.update("TDDBETA",beta) + _new_extn.header.update("SICSPS",refpix['PSCALE']) + _new_extn.header.update("SICS1POS",refpix['V2REF']) + _new_extn.header.update("SICS2POS",refpix['V3REF']) + _new_extn.header.update("SICSROT",refpix['THETA']) + _new_extn.header.update("OCX10",fx[1][0]) + _new_extn.header.update("OCX11",fx[1][1]) + _new_extn.header.update("OCY10",fy[1][0]) + _new_extn.header.update("OCY11",fy[1][1]) + + # Close image now fimg.close() del fimg - def diff_angles(a,b): """ Perform angle subtraction a-b taking into account small-angle differences across 360degree line. """
Updated 'makewcs' to <I>dev to report additional keywords to the SCI extension header in order to allow full interpretation of the SIP coeffs by PyDrizzle (convert SIP into IDCTAB coeffs). The computation of TDD alpha and beta for ACS/WFC was also added, but their values are simply recorded as keywords and not applied directly anywhere in this code. WJH git-svn-id: <URL>
spacetelescope_stsci.tools
train
80e736a7a78dbbe7890b6436228cacb2e0927922
diff --git a/openquake/calculators/views.py b/openquake/calculators/views.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/views.py +++ b/openquake/calculators/views.py @@ -1013,7 +1013,7 @@ def view_delta_loss(token, dstore): Estimate the stocastic error on the loss curve by splitting the events in odd and even. Example: - $ oq show delta_loss:1 # consider the second loss type + $ oq show delta_loss # consider the first loss type """ if ':' in token: _, li = token.split(':')
Improved comment [ci skip]
gem_oq-engine
train
35f32db70e7d130dcb882971a88c7a8f746827b1
diff --git a/packages/game-ui/lib/player-box/index.js b/packages/game-ui/lib/player-box/index.js index <HASH>..<HASH> 100644 --- a/packages/game-ui/lib/player-box/index.js +++ b/packages/game-ui/lib/player-box/index.js @@ -5,6 +5,7 @@ var BOX_WIDTH = 600, BOX_HEIGHT_OTHERS = 300; var q = require("q"), + _ = require("lodash"), PlayerDcCards = require("./player-dc-cards"), PlayerCars = require("./player-cars"), AvatarDisplay = require("./avatar-display"); @@ -13,6 +14,8 @@ function PlayerBox(user, isMe, debugMode, callbacks) { this.Container_constructor(); this._setup(user, isMe, debugMode, callbacks); + + this._disabledDcCards = {}; } var p = createjs.extend(PlayerBox, createjs.Container); @@ -116,6 +119,9 @@ p._dcCardClick = function(e) { if(!this.defPlayedCardId) return; + if(this._disabledDcCards[e.cardIndex]) + return; + var card = this._player.dcCards[e.cardIndex]; this._callbacks.canPlayDcCard(card.id) .done(function(canPlay) { @@ -241,8 +247,10 @@ p.removeCar = function(carIdx, transitionTime) { }; }; -p.removeDcCard = function(carIdx, transitionTime) { - let cardDisp = this._playerDcCards.removeCard(carIdx, transitionTime); +p.removeDcCard = function(cardIdx, transitionTime) { + this._disabledDcCards[cardIdx] = false; + + let cardDisp = this._playerDcCards.removeCard(cardIdx, transitionTime); let coords = this._playerDcCards.localToLocal(cardDisp.x, cardDisp.y, this); @@ -255,8 +263,18 @@ p.removeDcCard = function(carIdx, transitionTime) { }; }; +p.disableDcCard = function(cardIdx) { + this._disabledDcCards[cardIdx] = true; +}; + p.removeRandomDcCard = function(transitionTime) { - let cardIdx = Math.floor(Math.random() * this._player.dcCards.length); + let possibleIndices = []; + for(let i = 0; i < this._player.dcCards.length; ++i) { + if(!this._disabledDcCards[i]) + possibleIndices.push(i); + } + + let cardIdx = _.sample(possibleIndices); return this.removeDcCard(cardIdx, transitionTime); }; @@ -290,15 +308,25 @@ p.stopAskingForCar = function() { this.defCarId = null; }; -p._highlightDcCardIfCanPlay = function(idx, canPlay) { +p._getDcCardIdxFromId = function(cardId) { + return _.findIndex(this._player.dcCards, { id: cardId }); +}; + +p._highlightDcCardIfCanPlay = function(cardId, canPlay) { + let idx = this._getDcCardIdxFromId(cardId); + if(canPlay) this._playerDcCards.highlightCard(idx); }; p._highlightPlayableCards = function() { - for(var i = 0; i < this._player.dcCards.length; ++i) { - this._callbacks.canPlayDcCard(this._player.dcCards[i].id) - .done(this._highlightDcCardIfCanPlay.bind(this, i)); + for(let i = 0; i < this._player.dcCards.length; ++i) { + if(this._disabledDcCards[i]) + continue; + + let id = this._player.dcCards[i].id; + this._callbacks.canPlayDcCard(id) + .done(this._highlightDcCardIfCanPlay.bind(this, id)); } }; diff --git a/packages/game-ui/lib/player-hand.js b/packages/game-ui/lib/player-hand.js index <HASH>..<HASH> 100644 --- a/packages/game-ui/lib/player-hand.js +++ b/packages/game-ui/lib/player-hand.js @@ -1,6 +1,8 @@ // Abstract class representing a displayable container of cards "use strict"; +const q = require("q"); + function PlayerHand(cardsData, noRandomize) { this.Container_constructor(); @@ -112,8 +114,9 @@ p.removeCard = function(cardIdx, transitionTime) { }; p.highlightCard = function(cardIndex) { - this._cardSlots[cardIndex].highlight(); - this._cardSlots[cardIndex].cursor = "pointer"; + let cardDisp = this._cardSlots[cardIndex]; + cardDisp.highlight(); + cardDisp.cursor = "pointer"; }; p.unhighlightCard = function(cardIndex) {
Make highlighting for second card work properly
ryb73_dealers-choice-meta
train
3814512d39d9d3d22f1a2cc2d2ec96dc27ea3c16
diff --git a/lib/ruby_bugzilla.rb b/lib/ruby_bugzilla.rb index <HASH>..<HASH> 100644 --- a/lib/ruby_bugzilla.rb +++ b/lib/ruby_bugzilla.rb @@ -3,9 +3,9 @@ require 'linux_admin' class RubyBugzilla - CMD = '/usr/bin/bugzilla' - COOKIES_FILE = File.expand_path('~') + '/.bugzillacookies' - CREDS_FILE = File.expand_path('~') + '/.bugzilla_credentials.yaml' + CMD = `which bugzilla`.chomp + COOKIES_FILE = File.expand_path('~/.bugzillacookies') + CREDS_FILE = File.expand_path('~/.bugzilla_credentials.yaml') def self.username=(un) @username = un
Removed hardcoding of python-bugzilla path in favor of one on the PATH.
ManageIQ_active_bugzilla
train
044a1707d14de688bf815e4093fb0af49dc29f92
diff --git a/gwpy/frequencyseries/frequencyseries.py b/gwpy/frequencyseries/frequencyseries.py index <HASH>..<HASH> 100644 --- a/gwpy/frequencyseries/frequencyseries.py +++ b/gwpy/frequencyseries/frequencyseries.py @@ -437,8 +437,8 @@ class FrequencySeries(Series): epoch = self.epoch.gps if self.f0.to('Hz').value: raise ValueError( - f"Cannot convert FrequencySeries to PyCBC with f0 = {self.f0}. " - "Starting frequency must be equal to 0 Hz." + f"Cannot convert FrequencySeries to PyCBC with f0 = {self.f0}." + " Starting frequency must be equal to 0 Hz." ) return types.FrequencySeries(self.value, delta_f=self.df.to('Hz').value,
Move space in to_pycbc() string to later line
gwpy_gwpy
train
919c16194535565aefd874e2e64307d87b9a9f1d
diff --git a/godotenv_test.go b/godotenv_test.go index <HASH>..<HASH> 100644 --- a/godotenv_test.go +++ b/godotenv_test.go @@ -5,6 +5,13 @@ import ( "testing" ) +func parseAndCompare(t *testing.T, rawEnvLine string, expectedKey string, expectedValue string) { + key, value, _ := parseLine(rawEnvLine) + if key != expectedKey || value != expectedValue { + t.Errorf("Expected '%v' to parse as '%v' => '%v', got '%v' => '%v' instead", rawEnvLine, expectedKey, expectedValue, key, value) + } +} + func loadEnvAndCompareValues(t *testing.T, envFileName string, expectedValues map[string]string) { err := Load(envFileName) if err != nil { @@ -39,3 +46,8 @@ func TestLoadPlainEnv(t *testing.T) { loadEnvAndCompareValues(t, envFileName, plainValues) } + +func TestParsing(t *testing.T) { + // unquoted values + parseAndCompare(t, "FOO=bar", "FOO", "bar") +}
Start porting over parser tests.
joho_godotenv
train
1cdaf2c263072635aedcf33cfad9b2589fd2eb51
diff --git a/org.jenetics/src/main/java/org/jenetics/stat/Histogram.java b/org.jenetics/src/main/java/org/jenetics/stat/Histogram.java index <HASH>..<HASH> 100644 --- a/org.jenetics/src/main/java/org/jenetics/stat/Histogram.java +++ b/org.jenetics/src/main/java/org/jenetics/stat/Histogram.java @@ -64,7 +64,7 @@ import org.jenetics.internal.util.Hash; * * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a> * @since 1.0 - * @version 2.0 &mdash; <em>$Date: 2014-07-10 $</em> + * @version 2.0 &mdash; <em>$Date: 2014-07-16 $</em> */ public class Histogram<C> implements Consumer<C> { @@ -364,17 +364,9 @@ public class Histogram<C> implements Consumer<C> { public static <C extends Comparable<? super C>> Histogram<C> of( final C... separators ) { - return new Histogram<C>(COMPARATOR, separators); + return new Histogram<C>((o1, o2) -> o1.compareTo(o2), separators); } - @SuppressWarnings({"rawtypes", "unchecked"}) - private static final Comparator COMPARATOR = new Comparator() { - @Override - public int compare(final Object o1, final Object o2) { - return ((Comparable)o1).compareTo(o2); - } - }; - /** * Return a <i>histogram</i> for {@link Double} values. The <i>histogram</i> * array of the returned {@link Histogram} will look like this:
Replace inner class with lambda expression.
jenetics_jenetics
train
87d87ef9a8ddd2ae442b0aced22d5b497c7e1415
diff --git a/lib/trooper/dsl/bundler.rb b/lib/trooper/dsl/bundler.rb index <HASH>..<HASH> 100644 --- a/lib/trooper/dsl/bundler.rb +++ b/lib/trooper/dsl/bundler.rb @@ -20,6 +20,10 @@ module Trooper def using_bundler? File.exists? "Gemfile" end + + def ruby_bin_path + config[:ruby_bin_path] || "" + end end end diff --git a/lib/trooper/dsl/rake.rb b/lib/trooper/dsl/rake.rb index <HASH>..<HASH> 100644 --- a/lib/trooper/dsl/rake.rb +++ b/lib/trooper/dsl/rake.rb @@ -5,6 +5,12 @@ module Trooper def rake(command) run "#{ruby_bin_path}rake #{command}" end + + private + + def ruby_bin_path + config[:ruby_bin_path] || "" + end end end
fixed a problem with ruby_bin_path not available in module
madwire_trooper
train
be7c8b013ec7cc68711011cbdd17edf801b3df07
diff --git a/lib/codebot/core.rb b/lib/codebot/core.rb index <HASH>..<HASH> 100644 --- a/lib/codebot/core.rb +++ b/lib/codebot/core.rb @@ -51,10 +51,9 @@ module Codebot def join ipc = Thread.new { @ipc_server.join && stop } web = Thread.new { @web_server.join && stop } - irc = Thread.new { @irc_client.join && stop } ipc.join web.join - irc.join + @irc_client.join end # Requests that the running threads migrate to an updated configuration.
Allow starting when no networks are set up
olabini_codebot
train
0708535c4ebd17dc6fe6e840ca6b785320e70dd0
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -3,7 +3,7 @@ const fs = require('fs'); const path = require('path'); const posthtml = require('posthtml'); -const parser = require('posthtml-parser').default; +const parser = require('posthtml-parser').parser; const {match} = require('posthtml/lib/api'); const expressions = require('posthtml-expressions');
Update posthtml-parser require Align the posthtml-parser require with a change introduced to posthtml-parser: <URL>
posthtml_posthtml-include
train
dccdb6ba89cbd1b52fdcb97cbb6054008983e3e6
diff --git a/lib/retina_tag/engine.rb b/lib/retina_tag/engine.rb index <HASH>..<HASH> 100644 --- a/lib/retina_tag/engine.rb +++ b/lib/retina_tag/engine.rb @@ -15,7 +15,7 @@ module RetinaTag retina_els.slice!(-1) retina_path = "#{retina_els.join('.')}@2x.#{extension}" - if !Rails.application.assets.find_asset(retina_path).nil? + if retina_asset_present?(retina_path) hidpi_asset_path = asset_path(retina_path) end rescue @@ -35,6 +35,15 @@ module RetinaTag image_tag_without_retina(source, options_default) end + def retina_asset_present?(path) + if Rails.application.assets.present? + Rails.application.assets.find_asset(path).present? + else + Rails.application.assets_manifest.files.values.any? do |asset| + asset["logical_path"] == path + end + end + end end
Fixing for sprockets 3 in production
davydotcom_retina_tag
train
66f8b461a270108f4bcdf9decd974faba655bf89
diff --git a/bgen_reader/_genotype.py b/bgen_reader/_genotype.py index <HASH>..<HASH> 100644 --- a/bgen_reader/_genotype.py +++ b/bgen_reader/_genotype.py @@ -71,8 +71,13 @@ def read_genotype_partition( ncombs = lib.bgen_ncombs(vg) p = full((nsamples, ncombs), nan, dtype=float64) lib.bgen_read_genotype(bgen, vg, ffi.cast("double *", p.ctypes.data)) + phased = lib.bgen_phased(vg) + ploidy = [lib.bgen_ploidy(vg, i) for i in range(nsamples)] + missing = [lib.bgen_missing(vg, i) for i in range(nsamples)] lib.bgen_close_genotype(vg) - genotypes.append({"probs": p, "phased": -1, "ploidy": [], "missing": []}) + genotypes.append( + {"probs": p, "phased": phased, "ploidy": ploidy, "missing": missing} + ) return genotypes diff --git a/bgen_reader/test/test_bgen_reader.py b/bgen_reader/test/test_bgen_reader.py index <HASH>..<HASH> 100644 --- a/bgen_reader/test/test_bgen_reader.py +++ b/bgen_reader/test/test_bgen_reader.py @@ -2,16 +2,16 @@ from __future__ import unicode_literals import os import stat -import sys from contextlib import contextmanager -from dask.delayed import Delayed +import dask.dataframe as dd import pytest +from dask.delayed import Delayed from numpy import isnan from numpy.testing import assert_, assert_allclose, assert_equal from pandas import Series -from bgen_reader import create_metafile, example_files, read_bgen +from bgen_reader import example_files, read_bgen try: FileNotFoundError @@ -109,13 +109,12 @@ def noread_permission(path): os.chmod(path, perm) -def test_bgen_reader_delayed(): +def test_bgen_reader_lazy_types(): with example_files("haplotypes.bgen") as filepath: bgen = read_bgen(filepath, verbose=False) - variants = bgen["variants"] samples = bgen["samples"] - genotype = bgen["genotype"] - assert_(isinstance(genotype[0], Delayed)) + assert_(isinstance(bgen["genotype"][0], Delayed)) + assert_(isinstance(bgen["variants"], dd.DataFrame)) def test_bgen_reader_phased_genotype(): @@ -337,20 +336,14 @@ def test_bgen_reader_complex(): g = bgen["genotype"][-1].compute()["probs"][-1] assert_allclose(g[:5], [0, 0, 0, 1, 0]) + ploidy = bgen["genotype"][0].compute()["ploidy"] + assert_allclose(ploidy, [1, 2, 2, 2]) + ploidy = bgen["genotype"][-1].compute()["ploidy"] + assert_allclose(ploidy, [4, 4, 4, 4]) -# X = bgen["X"] - -# assert_allclose(X[0].compute().sel(data="ploidy"), [1, 2, 2, 2]) -# assert_allclose(X[-1].compute().sel(data="ploidy"), [4, 4, 4, 4]) - -# assert_allclose( -# X[:, 0].compute().sel(data="phased"), [0, 1, 1, 0, 1, 1, 1, 1, 0, 0] -# ) - -# X = X.compute() - -# x = X.sel(sample=0, data="phased") -# assert_allclose(x.where(x == 1, drop=True).variant.values, [1, 2, 4, 5, 6, 7]) + nvariants = len(variants) + phased = [bgen["genotype"][i].compute()["phased"] for i in range(nvariants)] + assert_allclose(phased, [0, 1, 1, 0, 1, 1, 1, 1, 0, 0]) def test_bgen_reader_complex_sample_file(): @@ -384,3 +377,12 @@ def test_bgen_reader_complex_sample_file(): assert_equal(samples.loc[0], "sample_0") assert_equal(samples.loc[3], "sample_3") + + + ploidy = bgen["genotype"][2].compute()["ploidy"] + missing = bgen["genotype"][2].compute()["missing"] + nvariants = len(variants) + phased = [bgen["genotype"][i].compute()["phased"] for i in range(nvariants)] + assert_allclose(ploidy, [1, 2, 2, 2]) + assert_allclose(missing, [0, 0, 0, 0]) + assert_allclose(phased, [0, 1, 1, 0, 1, 1, 1, 1, 0, 0])
missing, ploidy, phased
limix_bgen-reader-py
train
953e27f9f1d1fb3a7951d08a8bb9f7b5be05fb83
diff --git a/spec/bolt/cli_spec.rb b/spec/bolt/cli_spec.rb index <HASH>..<HASH> 100644 --- a/spec/bolt/cli_spec.rb +++ b/spec/bolt/cli_spec.rb @@ -112,4 +112,46 @@ describe "Bolt::CLI" do cli = Bolt::CLI.new(%w[script --nodes foo]) expect(cli.parse).to include(mode: 'script') end + + describe "execute" do + let(:executor) { double('executor') } + let(:cli) { Bolt::CLI.new({}) } + + before :each do + allow(Bolt::Executor).to receive(:new).and_return(executor) + end + + it "executes the 'whoami' command" do + expect(executor).to receive(:execute).with('whoami').and_return({}) + + options = { + nodes: ['foo'], mode: 'exec', task_options: { 'command' => 'whoami' } + } + cli.execute(options) + end + + it "runs a script" do + expect(executor).to receive(:run_script).with('bar.sh').and_return({}) + + options = { + nodes: ['foo'], mode: 'script', task_options: { 'script' => 'bar.sh' } + } + cli.execute(options) + end + + it "runs a task" do + task_path = '/path/to/task' + task_params = { 'name' => 'apache', 'action' => 'restart' } + expect(executor) + .to receive(:run_task).with(task_path, task_params).and_return({}) + + options = { + nodes: ['foo'], + mode: 'run', + leftovers: [task_path], + task_options: task_params + } + cli.execute(options) + end + end end
(TASKS-<I>) Add unit tests for CLI#execute Add unit tests for exec, script and run.
puppetlabs_bolt
train
113b28394cf721d9c83a0d79e7077e87c6832ab0
diff --git a/PySimpleGUIWx/PySimpleGUIWx.py b/PySimpleGUIWx/PySimpleGUIWx.py index <HASH>..<HASH> 100644 --- a/PySimpleGUIWx/PySimpleGUIWx.py +++ b/PySimpleGUIWx/PySimpleGUIWx.py @@ -1,5 +1,5 @@ #!/usr/bin/python3 -version = __version__ = "0.17.0 Released 6-Jun-2020" +version = __version__ = "0.17.0.1 Unreleased\n Multiline update changed to SetValue" port = 'PySimpleGUIWx' @@ -990,7 +990,7 @@ class Multiline(Element): def Update(self, value=None, disabled=None, append=False, background_color=None, text_color=None, font=None, visible=None): try: # added in case the widget has already been deleted for some readon. if value is not None and not append: - self.WxTextCtrl.SetLabel(value) + self.WxTextCtrl.SetValue(value) elif value is not None and append: self.WxTextCtrl.AppendText(value) if background_color is not None:
Multiline.update changed to use SetValue just like when intially created.
PySimpleGUI_PySimpleGUI
train
8f34f4d5c9f0b8eeb90299d2046c7762cc338096
diff --git a/galpy/orbit_src/Orbit.py b/galpy/orbit_src/Orbit.py index <HASH>..<HASH> 100644 --- a/galpy/orbit_src/Orbit.py +++ b/galpy/orbit_src/Orbit.py @@ -622,6 +622,46 @@ class Orbit: """ self._orb.plotvy(*args,**kwargs) + def toPlanar(self): + """ + NAME: + toPlanar + PURPOSE: + convert a 3D orbit into a 2D orbit + INPUT: + (none) + OUTPUT: + planarOrbit + HISTORY: + 2010-11-30 - Written - Bovy (NYU) + """ + if len(self.vxvv) == 6: + vxvv= [self.vxvv[0],self.vxvv[1],self.vxvv[2],self.vxvv[5]] + elif len(self.vxvv) == 5: + vxvv= [self.vxvv[0],self.vxvv[1],self.vxvv[2]] + else: + raise AttributeError("planar or linear Orbits do not have the toPlanar attribute") + return Orbit(vxvv=vxvv) + + def toLinear(self): + """ + NAME: + toLinear + PURPOSE: + convert a 3D orbit into a 1D orbit (z) + INPUT: + (none) + OUTPUT: + linearOrbit + HISTORY: + 2010-11-30 - Written - Bovy (NYU) + """ + if len(self.vxvv) == 6 or len(self.vxvv) == 5: + vxvv= [self.vxvv[3],self.vxvv[4]] + else: + raise AttributeError("planar or linear Orbits do not have the toPlanar attribute") + return Orbit(vxvv=vxvv) + def __add__(self,linOrb): """ NAME:
toPlanar and toLinear for orbits
jobovy_galpy
train
16671ec90553e60b61ca84d881dadb136b4353da
diff --git a/Lib/extractor/formats/opentype.py b/Lib/extractor/formats/opentype.py index <HASH>..<HASH> 100644 --- a/Lib/extractor/formats/opentype.py +++ b/Lib/extractor/formats/opentype.py @@ -361,6 +361,8 @@ def extractOpenTypeKerning(source, destination): elif "kern" in source: kerning = _extractOpenTypeKerningFromKern(source) groups = {} + for name, group in groups.items(): + groups[name] = list(sorted(group)) return kerning, groups def _extractOpenTypeKerningFromGPOS(source):
Groups must be a list.
robotools_extractor
train
b7b2d7848f6b358d49f8f21df3a39415c1ada7ad
diff --git a/datascience/tables.py b/datascience/tables.py index <HASH>..<HASH> 100644 --- a/datascience/tables.py +++ b/datascience/tables.py @@ -3693,8 +3693,12 @@ class Table(collections.abc.MutableMapping): x = values_dict[k][0], y = np.zeros_like(values_dict[k][0]), mode = "markers", - marker_symbol="line-ns", - marker_color="black" + marker = dict( + symbol = "line-ns-open", + color = "black", + size = 10, + opacity = 1, + ) )) fig.update_yaxes( @@ -3755,6 +3759,20 @@ class Table(collections.abc.MutableMapping): marker_symbol="line-ns", marker_color="black" ), row = i + 1, col = 1) + if rug: + fig.append_trace(go.Scatter( + x = values_dict[k][0], + y = np.zeros_like(values_dict[k][0]), + mode = "markers", + marker = dict( + symbol = "line-ns-open", + color = "black", + size = 10, + opacity = 1, + )), + row = i + 1, col = 1 + ) + fig.update_yaxes( title_text = "".join([
finished rug plots for ihist
data-8_datascience
train
3b140f31ed5816d3a198039b72a08c0b9ab1370a
diff --git a/labsuite/compilers/pfusx.py b/labsuite/compilers/pfusx.py index <HASH>..<HASH> 100644 --- a/labsuite/compilers/pfusx.py +++ b/labsuite/compilers/pfusx.py @@ -325,6 +325,11 @@ def compile(*sequences, output=None): # For printing an output map. well_map[well] = s + # Nicely formatted well map for the description. + output_map = [] + for well, seq in well_map.items(): + output_map.append("{}: {}".format(well, seq)) + # Take our three transfer groups and make them into a consolidated # transfer list. instructions = [] @@ -338,11 +343,6 @@ def compile(*sequences, output=None): object_pairs_hook=OrderedDict ).decode(data.read()) - # Nicely formatted well map for the description. - output_map = [] - for well, seq in well_map.items(): - output_map.append("{}: {}".format(well, seq)) - protocol['instructions'][0]['groups'] = instructions protocol['info']['create-date'] = str(datetime.date.today()) protocol['info']['description'] = "; ".join(output_map)
pFusX: Code reorder for readability.
Opentrons_opentrons
train
4cf8bf7312a124cb96e615923ebb373380fb60bd
diff --git a/activerecord/CHANGELOG b/activerecord/CHANGELOG index <HASH>..<HASH> 100644 --- a/activerecord/CHANGELOG +++ b/activerecord/CHANGELOG @@ -1,5 +1,7 @@ *SVN* +* Pushing a record on an association collection doesn't unnecessarily load all the associated records. [Obie Fernandez, Jeremy Kemper] + * Oracle: fix connection reset failure. #6846 [leonlleslie] * Subclass instantiation doesn't try to explicitly require the corresponding subclass. #6840 [leei, Jeremy Kemper] diff --git a/activerecord/lib/active_record/associations/association_collection.rb b/activerecord/lib/active_record/associations/association_collection.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/associations/association_collection.rb +++ b/activerecord/lib/active_record/associations/association_collection.rb @@ -7,7 +7,7 @@ module ActiveRecord load_target @target.to_ary end - + def reset reset_target! @loaded = false @@ -17,7 +17,6 @@ module ActiveRecord # Since << flattens its argument list and inserts each record, +push+ and +concat+ behave identically. def <<(*records) result = true - load_target @owner.transaction do flatten_deeper(records).each do |record| @@ -34,7 +33,7 @@ module ActiveRecord alias_method :push, :<< alias_method :concat, :<< - + # Remove all records from this association def delete_all load_target @@ -103,7 +102,7 @@ module ActiveRecord # calling collection.size if it has. If it's more likely than not that the collection does have a size larger than zero # and you need to fetch that collection afterwards, it'll take one less SELECT query if you use length. def size - if loaded? && [email protected][:uniq] + if @owner.new_record? || (loaded? && [email protected][:uniq]) @target.size elsif !loaded? && [email protected][:uniq] && @target.is_a?(Array) unsaved_records = Array(@target.detect { |r| r.new_record? }) diff --git a/activerecord/test/associations_test.rb b/activerecord/test/associations_test.rb index <HASH>..<HASH> 100755 --- a/activerecord/test/associations_test.rb +++ b/activerecord/test/associations_test.rb @@ -637,12 +637,15 @@ class HasManyAssociationsTest < Test::Unit::TestCase def test_adding_before_save no_of_firms = Firm.count no_of_clients = Client.count + new_firm = Firm.new("name" => "A New Firm, Inc") + c = Client.new("name" => "Apple") + new_firm.clients_of_firm.push Client.new("name" => "Natural Company") - new_firm.clients_of_firm << (c = Client.new("name" => "Apple")) - assert new_firm.new_record? - assert c.new_record? + assert_equal 1, new_firm.clients_of_firm.size + new_firm.clients_of_firm << c assert_equal 2, new_firm.clients_of_firm.size + assert_equal no_of_firms, Firm.count # Firm was not saved to database. assert_equal no_of_clients, Client.count # Clients were not saved to database. assert new_firm.save @@ -651,6 +654,7 @@ class HasManyAssociationsTest < Test::Unit::TestCase assert_equal new_firm, c.firm assert_equal no_of_firms+1, Firm.count # Firm was saved to database. assert_equal no_of_clients+2, Client.count # Clients were saved to database. + assert_equal 2, new_firm.clients_of_firm.size assert_equal 2, new_firm.clients_of_firm(true).size end diff --git a/activerecord/test/base_test.rb b/activerecord/test/base_test.rb index <HASH>..<HASH> 100755 --- a/activerecord/test/base_test.rb +++ b/activerecord/test/base_test.rb @@ -1288,12 +1288,12 @@ class BasicsTest < Test::Unit::TestCase client_new = Client.new client_new.name = "The Joneses" clients = [ client_stored, client_new ] - + firm.clients << clients + assert_equal clients.map(&:name).to_set, firm.clients.map(&:name).to_set firm.clear_association_cache - - assert_equal firm.clients.collect{ |x| x.name }.sort, clients.collect{ |x| x.name }.sort + assert_equal clients.map(&:name).to_set, firm.clients.map(&:name).to_set end def test_interpolate_sql
Pushing a record on an association collection doesn't unnecessarily load all the associated records. git-svn-id: <URL>
rails_rails
train
c242934f00d0e3d966f7a99d9510d4500f779549
diff --git a/libandroid-navigation/src/main/java/com/mapbox/services/android/navigation/v5/navigation/NavigationRoute.java b/libandroid-navigation/src/main/java/com/mapbox/services/android/navigation/v5/navigation/NavigationRoute.java index <HASH>..<HASH> 100644 --- a/libandroid-navigation/src/main/java/com/mapbox/services/android/navigation/v5/navigation/NavigationRoute.java +++ b/libandroid-navigation/src/main/java/com/mapbox/services/android/navigation/v5/navigation/NavigationRoute.java @@ -6,7 +6,9 @@ import android.support.annotation.Nullable; import com.mapbox.directions.v5.DirectionsCriteria; import com.mapbox.directions.v5.DirectionsCriteria.AnnotationCriteria; +import com.mapbox.directions.v5.DirectionsCriteria.ExcludeCriteria; import com.mapbox.directions.v5.DirectionsCriteria.ProfileCriteria; +import com.mapbox.directions.v5.DirectionsCriteria.VoiceUnitCriteria; import com.mapbox.directions.v5.MapboxDirections; import com.mapbox.directions.v5.models.DirectionsResponse; import com.mapbox.directions.v5.models.DirectionsRoute; @@ -355,6 +357,31 @@ public final class NavigationRoute { } /** + * Change the units used for voice announcements, this does not change the units provided in + * other fields outside of the {@link com.mapbox.directions.v5.models.VoiceInstructions} object. + * + * @param voiceUnits one of the values found inside the {@link VoiceUnitCriteria} + * @return this builder for chaining options together + * @since 0.8.0 + */ + public Builder voiceUnits(@Nullable @VoiceUnitCriteria String voiceUnits) { + directionsBuilder.voiceUnits(voiceUnits); + return this; + } + + /** + * Exclude specific road classes such as highways, tolls, and more. + * + * @param exclude one of the values found inside the {@link ExcludeCriteria} + * @return this builder for chaining options together + * @since 0.8.0 + */ + public Builder exclude(@Nullable @ExcludeCriteria String exclude) { + directionsBuilder.exclude(exclude); + return this; + } + + /** * Base package name or other simple string identifier. Used inside the calls user agent header. * * @param clientAppName base package name or other simple string identifier @@ -407,6 +434,7 @@ public final class NavigationRoute { .geometries(DirectionsCriteria.GEOMETRY_POLYLINE6) .overview(DirectionsCriteria.OVERVIEW_FULL) .voiceInstructions(true) + .bannerInstructions(true) .roundaboutExits(true); return new NavigationRoute(directionsBuilder.build()); }
Adds exclude and voiceUnits to dir request (#<I>)
mapbox_mapbox-navigation-android
train
e19e2cd24bef833f72b8cc95fde6749fcfad1f7e
diff --git a/pysolr.py b/pysolr.py index <HASH>..<HASH> 100644 --- a/pysolr.py +++ b/pysolr.py @@ -144,7 +144,7 @@ except NameError: __author__ = 'Joseph Kocherhans, Jacob Kaplan-Moss, Daniel Lindsley' __all__ = ['Solr'] -__version__ = (2, 0, 8) +__version__ = (2, 0, 9) def get_version(): return "%s.%s.%s" % __version__ @@ -343,6 +343,13 @@ class Solr(object): response = self._mlt(params) result = self.decoder.decode(response) + + if result['response'] is None: + result['response'] = { + 'docs': [], + 'numFound': 0, + } + return Results(result['response']['docs'], result['response']['numFound']) def add(self, docs, commit=True):
If MLT was enabled but no reindexing was performed, Solr returns null instead of no docs. Handle this slightly more gracefully.
django-haystack_pysolr
train
e0df1bb6d1982bffd7a53899709a2e0bbc8a428a
diff --git a/web/concrete/elements/files/search.php b/web/concrete/elements/files/search.php index <HASH>..<HASH> 100644 --- a/web/concrete/elements/files/search.php +++ b/web/concrete/elements/files/search.php @@ -38,7 +38,7 @@ $req = $flr->getSearchRequest(); <?php $fp = FilePermissions::getGlobal(); if ($fp->canAddFile()) { ?> - <li id="ccm-file-manager-upload"><a href="#"><?php echo t('Upload Files')?><input type="file" name="files[]" multiple="multiple" /></a></li> + <li id="ccm-file-manager-upload"><a href="javascript:void"><?php echo t('Upload Files')?><input type="file" name="files[]" multiple="multiple" /></a></li> <?php } ?> </ul> diff --git a/web/concrete/js/build/core/file-manager/search.js b/web/concrete/js/build/core/file-manager/search.js index <HASH>..<HASH> 100644 --- a/web/concrete/js/build/core/file-manager/search.js +++ b/web/concrete/js/build/core/file-manager/search.js @@ -82,13 +82,6 @@ } }; - $fileUploader.on('click', function() { - $(this).find('input').trigger('click'); - return false; - }).children('a').click(function() { - $(this).parent().find('input').trigger('click'); - return false; - }); $fileUploader.fileupload(args); };
Remove js bindings (that never would've worked) and replace them with a fallthrough. Former-commit-id: <I>c6acd3af<I>f<I>ec<I>a1aba<I>
concrete5_concrete5
train
ad9598c75895c7cd3a984044ac52dfabcadc7899
diff --git a/backup/util/dbops/restore_dbops.class.php b/backup/util/dbops/restore_dbops.class.php index <HASH>..<HASH> 100644 --- a/backup/util/dbops/restore_dbops.class.php +++ b/backup/util/dbops/restore_dbops.class.php @@ -1078,8 +1078,8 @@ abstract class restore_dbops { $localpath = $filesystem->get_local_path_from_storedfile($storedfile); $fs->create_file_from_pathname($file, $localpath); } else if ($filesystem->is_file_readable_remotely_by_storedfile($storedfile)) { - $url = $filesystem->get_remote_path_from_storedfile($storedfile); - $fs->create_file_from_url($file, $url); + $remotepath = $filesystem->get_remote_path_from_storedfile($storedfile); + $fs->create_file_from_pathname($file, $remotepath); } else if ($filesystem->is_file_readable_locally_by_storedfile($storedfile, true)) { $localpath = $filesystem->get_local_path_from_storedfile($storedfile, true); $fs->create_file_from_pathname($file, $localpath);
MDL-<I> restore: Fix remote file recovery
moodle_moodle
train
c0cd3e9e8eec51792c2ca509f45f2822b26f3488
diff --git a/extension/rsb/com/src/main/java/org/dc/jul/extension/rsb/com/AbstractExecutableController.java b/extension/rsb/com/src/main/java/org/dc/jul/extension/rsb/com/AbstractExecutableController.java index <HASH>..<HASH> 100644 --- a/extension/rsb/com/src/main/java/org/dc/jul/extension/rsb/com/AbstractExecutableController.java +++ b/extension/rsb/com/src/main/java/org/dc/jul/extension/rsb/com/AbstractExecutableController.java @@ -21,7 +21,7 @@ package org.dc.jul.extension.rsb.com; * <http://www.gnu.org/licenses/lgpl-3.0.html>. * #L% */ - +import com.google.protobuf.Descriptors; import com.google.protobuf.GeneratedMessage; import org.dc.jul.exception.CouldNotPerformException; import org.dc.jul.exception.InitializationException; @@ -32,10 +32,12 @@ import rst.homeautomation.state.ActivationStateType; import rst.homeautomation.state.ActivationStateType.ActivationState; import org.dc.jul.exception.InstantiationException; import org.dc.jul.exception.NotAvailableException; +import org.dc.jul.extension.protobuf.ClosableDataBuilder; /** * - * @author * @author <a href="mailto:[email protected]">Divine Threepwood</a> + * @author * @author <a href="mailto:[email protected]">Divine + * Threepwood</a> */ public abstract class AbstractExecutableController<M extends GeneratedMessage, MB extends M.Builder<MB>, CONFIG extends GeneratedMessage> extends AbstractEnableableConfigurableController<M, MB, CONFIG> implements Enableable { @@ -68,13 +70,22 @@ public abstract class AbstractExecutableController<M extends GeneratedMessage, M return; } - - try { - setField(ACTIVATION_STATE, activation); + try (ClosableDataBuilder<MB> dataBuilder = getDataBuilder(this)) { + Descriptors.FieldDescriptor findFieldByName = dataBuilder.getInternalBuilder().getDescriptorForType().findFieldByName(ACTIVATION_STATE); + if (findFieldByName == null) { + throw new NotAvailableException("Field[" + ACTIVATION_STATE + "] does not exist for type " + dataBuilder.getClass().getName()); + } + dataBuilder.getInternalBuilder().setField(findFieldByName, activation); } catch (Exception ex) { throw new CouldNotPerformException("Could not apply data change!", ex); } +// try { +// setField(ACTIVATION_STATE, activation); +// } catch (Exception ex) { +// throw new CouldNotPerformException("Could not apply data change!", ex); +// } + try { if (activation.getValue().equals(ActivationState.State.ACTIVE)) { if (!executing) {
try to fix that the AbstractExecutableController won't notify observers when receiving a new value
openbase_jul
train
48ccd6e3ef3e8ab7a65060e0ab919d080570e78a
diff --git a/lib/apivore/rspec_builder.rb b/lib/apivore/rspec_builder.rb index <HASH>..<HASH> 100644 --- a/lib/apivore/rspec_builder.rb +++ b/lib/apivore/rspec_builder.rb @@ -16,7 +16,8 @@ module Apivore end def get_apivore_setup(path, method, response) - @@setups[path + method + response].try(:call) || {} + setup = @@setups[path + method + response] + (instance_eval &setup if setup) || {} end def apivore_build_path(path, data) @@ -55,8 +56,8 @@ module Apivore full_path = apivore_build_path(swagger.base_path + path, setup_data) # e.g., get(full_path) - if setup_data.is_a?(Hash) && setup_data['_data'] - send(method, full_path, setup_data['_data']) + if setup_data.is_a?(Hash) + send(method, full_path, setup_data['_data'] || {}, setup_data['_headers'] || {}) else send(method, full_path) end
support rspec `let` blocks within `apivore_setup`
westfieldlabs_apivore
train
6bdb5cbc1ada2669cf898c8e49c010001cd88b9b
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,9 +2,6 @@ language: python cache: pip -addons: - postgresql: '9.6' - services: - postgresql diff --git a/aiohttp_devtools/version.py b/aiohttp_devtools/version.py index <HASH>..<HASH> 100644 --- a/aiohttp_devtools/version.py +++ b/aiohttp_devtools/version.py @@ -2,4 +2,4 @@ from distutils.version import StrictVersion __all__ = ['VERSION'] -VERSION = StrictVersion('0.3.1') +VERSION = StrictVersion('0.3.2') diff --git a/tests/test_start.py b/tests/test_start.py index <HASH>..<HASH> 100644 --- a/tests/test_start.py +++ b/tests/test_start.py @@ -118,7 +118,7 @@ async def test_all_options(tmpdir, test_client, loop, template_engine, session, assert '<title>foobar</title>' in text -# @if_boxed +@if_boxed @slow async def test_db_creation(tmpdir, test_client, loop): StartProject(
remove postgres version constraint from travis
aio-libs_aiohttp-devtools
train
4ed5d9304ca2f509a715bdae72fc62d9a79b6a24
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,24 +1,18 @@ +import re +from setuptools import setup -from dateparser import __version__ - - -try: - from setuptools import setup -except ImportError: - from distutils.core import setup - +(__version__, ) = re.findall("__version__.*\s*=\s*[']([^']+)[']", + open('dateparser/__init__.py').read()) readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.. :changelog:', '') -def load_reqs(fname): - return open(fname).read().splitlines() - - -requirements = load_reqs('requirements-build.txt') + load_reqs('requirements.txt') +requirements = [ + 'python-dateutil >= 2.2', +] -test_requirements = load_reqs('tests/requirements.txt') +test_requirements = open('tests/requirements.txt').read().splitlines() setup( name='dateparser',
dropped distutils, parsing version number with regex to avoid import, removed wheel dependency from install_requires
scrapinghub_dateparser
train
3dabe2c3af64751b246a50d369aabb0913f5c775
diff --git a/html5lib/tests/test_treewalkers.py b/html5lib/tests/test_treewalkers.py index <HASH>..<HASH> 100644 --- a/html5lib/tests/test_treewalkers.py +++ b/html5lib/tests/test_treewalkers.py @@ -4,6 +4,7 @@ import os import sys import unittest import warnings +from difflib import unified_diff try: unittest.TestCase.assertEqual @@ -280,10 +281,14 @@ def runTreewalkerTest(innerHTML, input, expected, errors, treeClass): output = convertTokens(treeClass["walker"](document)) output = attrlist.sub(sortattrs, output) expected = attrlist.sub(sortattrs, convertExpected(expected)) + diff = "".join(unified_diff([line + "\n" for line in expected.splitlines()], + [line + "\n" for line in output.splitlines()], + "Expected", "Received")) assert expected == output, "\n".join([ "", "Input:", input, "", "Expected:", expected, - "", "Received:", output + "", "Received:", output, + "", "Diff:", diff, ]) except NotImplementedError: pass # Amnesty for those that confess...
Add diff to error messages from treewalker tests. I've spent too long straining to see subtle difference. This helps.
html5lib_html5lib-python
train
de69c50f8c76a49561e88ce9912c72864bd5925b
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,15 +1,5 @@ # -*- coding: utf-8 -*- -# -# alot documentation build configuration file, created by -# sphinx-quickstart on Tue Jan 10 16:45:54 2012. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. +# alot documentation build configuration file import sys, os @@ -82,7 +72,7 @@ copyright = u'2012, Patrick Totzke' # The short X.Y version. version = __version__ # The full version, including alpha/beta/rc tags. -release = '0.21' +release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages.
docs: don't use hardcoded version string in conf.py
pazz_alot
train
5253396420ebd8cfedf0adbc8d12bf9ea2b269bf
diff --git a/sources/scalac/ast/TreeGen.java b/sources/scalac/ast/TreeGen.java index <HASH>..<HASH> 100644 --- a/sources/scalac/ast/TreeGen.java +++ b/sources/scalac/ast/TreeGen.java @@ -267,10 +267,20 @@ public class TreeGen implements Kinds, Modifiers { /** Build parameter */ public ValDef Param(int pos, Symbol sym) { + global.log("use of obsolete Param method in TreeGen"); return (ValDef)ValDef(pos, sym, Tree.Empty); } public ValDef Param(Symbol sym) { + global.log("use of obsolete Param method in TreeGen"); + return Param(sym.pos, sym); + } + + public ValDef ValDef(int pos, Symbol sym) { + return (ValDef)ValDef(pos, sym, Tree.Empty); + } + + public ValDef ValDef(Symbol sym) { return Param(sym.pos, sym); }
- made Param obsolete, and introduced ValDef as... - made Param obsolete, and introduced ValDef as alias (to reflect the change in Tree.java)
scala_scala
train
f1adab45999d4e67e092e913b9895f3b177f1e9d
diff --git a/pyfnnd/_fnndeconv.py b/pyfnnd/_fnndeconv.py index <HASH>..<HASH> 100644 --- a/pyfnnd/_fnndeconv.py +++ b/pyfnnd/_fnndeconv.py @@ -157,19 +157,25 @@ def deconvolve(F, C0=None, theta0=None, dt=0.02, learn_theta=(0, 0, 0, 0, 0), if theta0 is None: theta = _init_theta(F, dt, hz=0.3, tau=0.5) else: - theta = theta0 + sigma, alpha, beta, lamb, gamma = theta0 + # beta absorbs the offset + beta = beta - offset + theta = sigma, alpha, beta, lamb, gamma sigma, alpha, beta, lamb, gamma = theta if C0 is None: + # smooth the raw fluorescence with a boxcar filter + Fsmooth = _boxcar(F.mean(0), dt=dt, avg_win=1.0) + # initial estimate of the calcium concentration, based on the alpha and - # beta params - C0 = (1. / alpha).dot(F - beta[:, None]) / npix + # beta params (an average of the baseline-subtracted fluorescence, + # weighted by the reciprocal of the pixel mask) + # C0 = (1. / alpha).dot(Fsmooth - beta[:, None]) / npix # C0 = ((F - beta[:, None]) / alpha[:, None]).sum(0) # equivalent - # we smooth this with a boxcar filter - C0 = _boxcar(C0, dt=dt, avg_win=1.0) + C0 = Fsmooth # if we're not learning the parameters, this step is all we need to do n_hat, C_hat, LL = _estimate_MAP_spikes( @@ -201,7 +207,7 @@ def deconvolve(F, C0=None, theta0=None, dt=0.02, learn_theta=(0, 0, 0, 0, 0), delta_LL = -((LL1 - LL) / LL) if verbosity >= 1: - print('Params: iter=%3i; LL=%12.2f; delta_LL= %8.4g' + print('params: iter=%3i; LL=%12.2f; delta_LL= %8.4g' % (nloop_params, LL1, delta_LL)) # if the LL improved or stayed the same, keep these parameters @@ -267,13 +273,20 @@ def _estimate_MAP_spikes(F, C_hat, theta, dt, tol=1E-6, maxiter=100, verbosity=0 sigma, alpha, beta, lamb, gamma = theta # project the background-subtracted fluorescence movie onto the spatial - # filter to get the estimated 'calcium concentration'. by reducing the - # [npix, nt] movie to a single [nt,] vector we speed up the processing a - # lot! - C = (1. / alpha).dot(F - beta[:, None]) / npix - # C = ((F - beta[:, None]) / alpha[:, None]).sum(0) # equivalent + # filter to get the estimated 'calcium concentration' (an average of the + # baseline-subtracted fluorescence, weighted by the reciprocal of the pixel + # mask) + recip_alpha = (1. / alpha) + C = recip_alpha.dot(F - beta[:, None]) / npix + # C = ((F - beta[:, None]) / alpha[:, None]).mean(0) # equivalent + ipdb.set_trace() + # we apply a correction factor to the sigma parameter as well + sigma_fac = np.sqrt(recip_alpha.dot(recip_alpha)) / npix + # sigma_fac = np.sqrt(np.sum(1./(alpha ** 2))) / npix + sigma = sigma_fac * sigma + # used for computing the LL and gradient scale_var = 1. / (2 * sigma ** 2) lD = lamb * dt @@ -295,6 +308,7 @@ def _estimate_MAP_spikes(F, C_hat, theta, dt, tol=1E-6, maxiter=100, verbosity=0 # compute initial posterior log-likelihood of the fluorescence LL = _post_LL(n_hat, res, scale_var, lD, z) + # ipdb.set_trace() nloop1 = 0 LL_prev = LL
implemented sigma correction factor, still broken
alimuldal_PyFNND
train
a8a04e9d6ba7b1250ef6c4566ff72e309b0ab683
diff --git a/test/integration/09-cluster-broker.js b/test/integration/09-cluster-broker.js index <HASH>..<HASH> 100644 --- a/test/integration/09-cluster-broker.js +++ b/test/integration/09-cluster-broker.js @@ -254,7 +254,6 @@ describe('09 - integration - broker', function() { return users.allowMethod(localInstance, 'username', 'remoteComponent', 'brokeredMethod1'); }) .then(function(){ - console.log('pausing...'); return new Promise(function(resolve){ setTimeout(resolve, 5000); }); @@ -288,7 +287,6 @@ describe('09 - integration - broker', function() { return users.allowMethod(localInstance, 'username', 'remoteComponent', 'brokeredMethod3'); }) .then(function(){ - console.log('pausing...'); return new Promise(function(resolve){ setTimeout(resolve, 5000); });
chore: removed console.log of pause
happner_happner-cluster
train
ead3ff8245fc8ed7e59186fd356a13cee99c8a2e
diff --git a/src/main/java/hex/gbm/DTree.java b/src/main/java/hex/gbm/DTree.java index <HASH>..<HASH> 100644 --- a/src/main/java/hex/gbm/DTree.java +++ b/src/main/java/hex/gbm/DTree.java @@ -11,7 +11,6 @@ import water.*; import water.api.DocGen; import water.api.Request.API; import water.fvec.Chunk; -import water.fvec.Frame; import water.util.*; /** @@ -846,27 +845,51 @@ public class DTree extends Iced { } // Convert Tree model to Java @Override protected void toJavaPredictBody( final SB sb, final SB afterBodySb) { - String[] cnames = classNames(); + final int maxfsize = 100; // maximal number of trees in forest + int fidx = 0; // forest index + int treesInForest = 0; + SB forest = new SB(); + // divide trees into small forests per 100 trees sb.indent().p("java.util.Arrays.fill(preds,0f);\n"); - for( int i=0; i < treeBits.length; i++ ) { - CompressedTree cts[] = treeBits[i]; - for( int c=0; c<cts.length; c++ ) { + for( int c=0; c<nclasses(); c++ ) { + toJavaForestBegin(sb, forest, c, fidx++); + for( int i=0; i < treeBits.length; i++ ) { + CompressedTree cts[] = treeBits[i]; if( cts[c] == null ) continue; - sb.indent().p("// Tree ").p(i); - if( cnames != null ) sb.p(", class=").p(cnames[c]); - sb.p("\n"); - sb.indent().p("preds[").p(c+1).p("] +=").p(" Tree_").p(i).p("_class_").p(c).p(".predict(data);").nl(); - // append body of tree predictor function + forest.indent().p("pred").p(" +=").p(" Tree_").p(i).p("_class_").p(c).p(".predict(data);").nl(); + // append representation of tree predictor toJavaTreePredictFct(afterBodySb, cts[c], i, c); + if (++treesInForest > maxfsize) { + toJavaForestEnd(sb, forest, c, fidx); + toJavaForestBegin(sb, forest, c, fidx++); + treesInForest = 0; + } } + toJavaForestEnd(sb, forest, c, fidx); + treesInForest = 0; } + afterBodySb.p(forest); + } + + private void toJavaForestBegin(SB predictBody, SB forest, int c, int fidx) { + predictBody.indent().p("// Call forest predicting class ").p(c).nl(); + predictBody.indent().p("preds[").p(c+1).p("] +=").p(" Forest_").p(fidx).p("_class_").p(c).p(".predict(data);").nl(); + forest.indent().p("// Forest representing a subset of trees scoring class ").p(c).nl(); + forest.indent().p("public static class Forest_").p(fidx).p("_class_").p(c).p(" {").nl().ii(1); + forest.indent().p("public static float predict(double[] data) {").nl().ii(1); + forest.indent().p("float pred = 0;").nl(); + } + private void toJavaForestEnd(SB predictBody, SB forest, int c, int fidx) { + forest.indent().p("return pred;").nl(); + forest.indent().p("}").di(1).nl(); // end of function + forest.indent().p("}").di(1).nl(); // end of forest classs } // Produce prediction code for one tree protected void toJavaTreePredictFct(final SB sb, final CompressedTree cts, int tidx, int c) { sb.indent().p("// Tree predictor for ").p(tidx).p("-tree and ").p(c).p("-class").nl(); sb.indent().p("static class Tree_").p(tidx).p("_class_").p(c).p(" {").nl().ii(1); - sb.indent().p("static final float predict(double[] data) {").nl().ii(1); + sb.indent().p("static final float predict(double[] data) {").nl().ii(1); // predict method for one tree sb.indent().p("float pred = "); new TreeVisitor<RuntimeException>(this,cts) { byte _bits[] = new byte[100];
Generating Forest classes into resulting code.
h2oai_h2o-2
train
16e0fb4a5792caf70b1b1fbce2b7b594f00bd858
diff --git a/makephar.php b/makephar.php index <HASH>..<HASH> 100755 --- a/makephar.php +++ b/makephar.php @@ -29,9 +29,7 @@ if (basename($backtrace[0]["file"]) === "phar.php") { chdir(dirname($backtrace[1]["file"])); if (!isset($phar_debug)) file_put_contents($backtrace[0]["file"], file_get_contents("https://phar.madelineproto.xyz/phar.php?v=new")); } -if ((new Phar(__FILE__))[".git/refs/heads/master"]->getContent() !== file_get_contents("https://phar.madelineproto.xyz/release?v=new") && !isset($phar_debug)) { - file_put_contents(__FILE__, file_get_contents("https://phar.madelineproto.xyz/madeline.phar?v=new")); -} + Phar::interceptFileFuncs(); Phar::mapPhar("'.$argv[2].'"); require_once "phar://'.$argv[2].'/vendor/autoload.php"; diff --git a/phar.php b/phar.php index <HASH>..<HASH> 100644 --- a/phar.php +++ b/phar.php @@ -1,10 +1,8 @@ <?php -if (!file_exists('madeline.phar')) { +if (!file_exists('madeline.phar') || !file_exists('madeline.phar.version') || file_get_contents('madeline.phar.version') !== file_get_contents('https://phar.madelineproto.xyz/release?v=new')) { file_put_contents('madeline.phar', file_get_contents('https://phar.madelineproto.xyz/madeline.phar?v=new')); + file_put_contents('madeline.phar.version', file_get_contents('https://phar.madelineproto.xyz/release?v=new')); } -require 'madeline.phar'; -if (trim(file_get_contents('phar://madeline.phar/.git/refs/heads/master')) !== trim(file_get_contents('https://phar.madelineproto.xyz/release?v=new'))) { - file_put_contents('madeline.phar', file_get_contents('https://phar.madelineproto.xyz/madeline.phar?v=new')); -} +require 'madeline.phar'; diff --git a/tests/makephar.sh b/tests/makephar.sh index <HASH>..<HASH> 100755 --- a/tests/makephar.sh +++ b/tests/makephar.sh @@ -41,6 +41,7 @@ ssh-add madeline_rsa git clone [email protected]:danog/MadelineProtoPhar cd MadelineProtoPhar cp ../madeline.phar . +cp ../phar.php . echo -n $TRAVIS_COMMIT > release git add -A git commit -am "Release $TRAVIS_COMMIT"
Improve phar self-update mechanism
danog_MadelineProto
train
ecfb547b05efe48fd55ee9690d8a86f0792b082e
diff --git a/lib/train/transports/ssh_connection.rb b/lib/train/transports/ssh_connection.rb index <HASH>..<HASH> 100644 --- a/lib/train/transports/ssh_connection.rb +++ b/lib/train/transports/ssh_connection.rb @@ -73,7 +73,7 @@ class Train::Transports::SSH def run_command(cmd) stdout = stderr = '' exit_status = nil - cmd.force_encoding('binary') if cmd.respond_to?(:force_encoding) + cmd.dup.force_encoding('binary') if cmd.respond_to?(:force_encoding) logger.debug("[SSH] #{self} (#{cmd})") session.open_channel do |channel| diff --git a/test/integration/tests/run_command_test.rb b/test/integration/tests/run_command_test.rb index <HASH>..<HASH> 100644 --- a/test/integration/tests/run_command_test.rb +++ b/test/integration/tests/run_command_test.rb @@ -10,6 +10,13 @@ describe 'run_command' do res.exit_status.must_equal(0) end + it 'can run frozen commands' do + res = backend.run_command('echo hello world'.freeze) + res.stdout.must_equal("hello world\n") + res.stderr.must_equal('') + res.exit_status.must_equal(0) + end + it 'can echo commands to stderr' do # TODO: Specinfra often fails on this test. # Fix and re-enable it.
bugfix: run frozen string commands via ssh
inspec_train
train
64f8ecd452e573863fb05e4959532566b036109c
diff --git a/src/ComplexType.php b/src/ComplexType.php index <HASH>..<HASH> 100644 --- a/src/ComplexType.php +++ b/src/ComplexType.php @@ -98,7 +98,11 @@ class ComplexType extends Type if (!$member->getNullable()) { if ($type == '\DateTime') { - $constructorSource .= ' $this->' . $name . ' = $' . $name . '->format(\DateTime::ATOM);' . PHP_EOL; + if ($this->config->get('constructorParamsDefaultToNull')) { + $constructorSource .= ' $this->' . $name . ' = $' . $name . ' ? $' . $name . '->format(\DateTime::ATOM) : null;' . PHP_EOL; + } else { + $constructorSource .= ' $this->' . $name . ' = $' . $name . '->format(\DateTime::ATOM);' . PHP_EOL; + } } else { $constructorSource .= ' $this->' . $name . ' = $' . $name . ';' . PHP_EOL; }
Fix creating constructor for DaeTime and constructorParamsDefaultToNull to prevent null-pointer-exception
wsdl2phpgenerator_wsdl2phpgenerator
train
28d6c35d534ffc97950d087efd829223f4aabeba
diff --git a/src/Utility/Text.php b/src/Utility/Text.php index <HASH>..<HASH> 100644 --- a/src/Utility/Text.php +++ b/src/Utility/Text.php @@ -35,29 +35,34 @@ class Text * Warning: This method should not be used as a random seed for any cryptographic operations. * Instead you should use the openssl or mcrypt extensions. * + * It should also not be used to create identifiers that have security implications, such as + * 'unguessable' URL identifiers. Instead you should use `Security::randomBytes()` for that. + * * @see https://www.ietf.org/rfc/rfc4122.txt * @return string RFC 4122 UUID * @copyright Matt Farina MIT License https://github.com/lootils/uuid/blob/master/LICENSE */ public static function uuid() { + $random = function_exists('random_int') ? 'random_int' : 'mt_rand'; + return sprintf( '%04x%04x-%04x-%04x-%04x-%04x%04x%04x', // 32 bits for "time_low" - mt_rand(0, 65535), - mt_rand(0, 65535), + $random(0, 65535), + $random(0, 65535), // 16 bits for "time_mid" - mt_rand(0, 65535), + $random(0, 65535), // 12 bits before the 0100 of (version) 4 for "time_hi_and_version" - mt_rand(0, 4095) | 0x4000, + $random(0, 4095) | 0x4000, // 16 bits, 8 bits for "clk_seq_hi_res", // 8 bits for "clk_seq_low", // two most significant bits holds zero and one for variant DCE1.1 - mt_rand(0, 0x3fff) | 0x8000, + $random(0, 0x3fff) | 0x8000, // 48 bits for "node" - mt_rand(0, 65535), - mt_rand(0, 65535), - mt_rand(0, 65535) + $random(0, 65535), + $random(0, 65535), + $random(0, 65535) ); }
Improve warnings around Text::uuid() This function should not be used to generate 'secure' identifiers. While UUID v4 has a low chance of collisions it is not as foolproof as <I> fully random bytes. I've also opted to use a more secure source of random integers in PHP7 as mersenne twister to reduce the risk of if these uuids are used in a security related context.
cakephp_cakephp
train
555ba35178c3cce56640fbc4561c15a65b64a1dc
diff --git a/android/src/main/java/com/dieam/reactnativepushnotification/modules/RNPushNotificationListenerService.java b/android/src/main/java/com/dieam/reactnativepushnotification/modules/RNPushNotificationListenerService.java index <HASH>..<HASH> 100644 --- a/android/src/main/java/com/dieam/reactnativepushnotification/modules/RNPushNotificationListenerService.java +++ b/android/src/main/java/com/dieam/reactnativepushnotification/modules/RNPushNotificationListenerService.java @@ -27,6 +27,11 @@ public class RNPushNotificationListenerService extends GcmListenerService { @Override public void onMessageReceived(String from, final Bundle bundle) { JSONObject data = getPushData(bundle.getString("data")); + // Copy `twi_body` to `message` to support Twilio + if (bundle.containsKey("twi_body")) { + bundle.putString("message", bundle.getString("twi_body")); + } + if (data != null) { if (!bundle.containsKey("message")) { bundle.putString("message", data.optString("alert", "Notification received"));
Copy `twi_body` to `message` to support Twilio.
zo0r_react-native-push-notification
train
a3937dfd0eb4f71f28074bcd7ce7ff158334d491
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,8 @@ * Add `--color` and `--no-color` options * Add `::content` and `::shadow` to list of elements recognized by `PseudoElement` +* Fix `UnnecessaryParentReference` to not report selectors with multiple parent + references ## 0.43.2 diff --git a/lib/scss_lint/linter/unnecessary_parent_reference.rb b/lib/scss_lint/linter/unnecessary_parent_reference.rb index <HASH>..<HASH> 100644 --- a/lib/scss_lint/linter/unnecessary_parent_reference.rb +++ b/lib/scss_lint/linter/unnecessary_parent_reference.rb @@ -22,7 +22,7 @@ module SCSSLint # element { # & + & { ... } # } - return if sequence.members[1..-1].any? { |ss| sequence_starts_with_parent?(ss) } + return if sequence.members[1..-1].any? { |ss| sequence_contains_parent_reference?(ss) } # Special case: allow an isolated parent to appear if it is part of a # comma sequence of more than one sequence, as this could be used to DRY @@ -45,5 +45,10 @@ module SCSSLint first.is_a?(Sass::Selector::Parent) && first.suffix.nil? # Ignore concatenated selectors, like `&-something` end + + def sequence_contains_parent_reference?(simple_sequence) + return unless simple_sequence.is_a?(Sass::Selector::SimpleSequence) + simple_sequence.members.any? { |s| s.is_a?(Sass::Selector::Parent) } + end end end diff --git a/spec/scss_lint/linter/unnecessary_parent_reference_spec.rb b/spec/scss_lint/linter/unnecessary_parent_reference_spec.rb index <HASH>..<HASH> 100644 --- a/spec/scss_lint/linter/unnecessary_parent_reference_spec.rb +++ b/spec/scss_lint/linter/unnecessary_parent_reference_spec.rb @@ -95,4 +95,14 @@ describe SCSSLint::Linter::UnnecessaryParentReference do it { should_not report_lint } end + + context 'when an ampersand is used in concatentation following an ampersand' do + let(:scss) { <<-SCSS } + .icon { + & &-small {} + } + SCSS + + it { should_not report_lint } + end end
Fix UnnecessaryParentReference bug with multiple parent refs The linter would erroneously report an error for selectors with multiple parent references, e.g. .form_field { & &_input.qdn_input { ... } } Fixes #<I>
sds_scss-lint
train
17e751e1a4206a5c8c25b9e70a545c36f82f8931
diff --git a/wandb/config.py b/wandb/config.py index <HASH>..<HASH> 100644 --- a/wandb/config.py +++ b/wandb/config.py @@ -153,22 +153,25 @@ class Config(object): def update(self, params): if not isinstance(params, dict): - # handle tensorflow flags - if not hasattr(params, '__dict__'): - raise TypeError("config must be a dict or have a __dict__ attribute.") + # Handle some cases where params is not a dictionary + # by trying to convert it into a dictionary - # for older tensorflow flags (pre 1.4) - if "__flags" in dir(params): - if not params.__dict__['__parsed']: + if not hasattr(params, '__dict__'): + raise TypeError( + "config must be a dict or have a __dict__ attribute.") + if "__flags" in vars(params): + # for older tensorflow flags (pre 1.4) + if not '__parsed' in vars(params): params._parse_flags() - params = params.__dict__['__flags'] + params = vars(params)['__flags'] + elif "__wrapped" in vars(params): + # newer tensorflow flags (post 1.4) uses absl.flags in a wrapper + params = {name: params[name].value for name in dir(params)} else: - # newer tensorflow (post 1.4) uses absl.flags - params = dict(params.__dict__) - # the following was giving me errors that "Namespace is not subscriptable" - # so I replaced it with the above. - # {name: params[name].value for name in dir(params)} - + # params is a Namespace object (argparse) + # or something else + params = vars(params) + if not isinstance(params, dict): raise Error('Expected dict but received %s' % params) for key, val in params.items():
Fix the case where tensorflow args post tensorflow <I> is passed in to config object
wandb_client
train
20800d4391a1c5313d6f5c1d505d0cbf3f9d6ab8
diff --git a/gwpy/timeseries/timeseries.py b/gwpy/timeseries/timeseries.py index <HASH>..<HASH> 100644 --- a/gwpy/timeseries/timeseries.py +++ b/gwpy/timeseries/timeseries.py @@ -476,49 +476,59 @@ class TimeSeries(TimeSeriesBase): fftlength=fftlength, overlap=overlap, **kwargs) - def fftgram(self, stride): - """Calculate the Fourier-gram of this `TimeSeries`. - At every ``stride``, a single, complex FFT is calculated. + def fftgram(self, fftlength, overlap=0, window='hann', **kwargs): + """Calculate the Fourier-gram of this `TimeSeries`. - Parameters - ---------- - stride : `float` - number of seconds in single PSD (column of spectrogram) + At every ``stride``, a single, complex FFT is calculated. - Returns - ------- - fftgram : `~gwpy.spectrogram.Spectrogram` - a Fourier-gram - """ - from ..spectrogram import Spectrogram + Parameters + ---------- + fftlength : `float` + number of seconds in single FFT. - fftlength = stride - dt = stride - df = 1/fftlength - stride *= self.sample_rate.value - # get size of Spectrogram - nsteps = int(self.size // stride) - # get number of frequencies - nfreqs = int(fftlength*self.sample_rate.value) - - # generate output spectrogram - dtype = numpy.complex - out = Spectrogram(numpy.zeros((nsteps, nfreqs), dtype=dtype), - name=self.name, t0=self.t0, f0=0, df=df, - dt=dt, copy=False, unit=self.unit, dtype=dtype) - # stride through TimeSeries, recording FFTs as columns of Spectrogram - for step in range(nsteps): - # find step TimeSeries - idx = stride * step - idx_end = idx + stride - stepseries = self[idx:idx_end] - # calculated FFT and stack - stepfft = stepseries.fft() - out[step] = stepfft.value - if step == 0: - out.frequencies = stepfft.frequencies - return out + overlap : `float`, optional + number of seconds of overlap between FFTs, defaults to the + recommended overlap for the given window (if given), or 0 + + window : `str`, `numpy.ndarray`, optional + window function to apply to timeseries prior to FFT, + see :func:`scipy.signal.get_window` for details on acceptable + + + Returns + ------- + a Fourier-gram + """ + from ..spectrogram import Spectrogram + from scipy.signal import spectrogram + + noverlap = int(overlap * self.sample_rate.value) + nfft = int(fftlength * self.sample_rate.value) + nstride = nfft - noverlap + + # get size of Spectrogram + ntimes = int((self.size - nstride) / nstride) + nfreqs = int(nfft / 2 + 1) + + # generate output spectrogram + dtype = numpy.complex + out = Spectrogram(numpy.zeros((ntimes, nfreqs), dtype=dtype), + name=self.name, t0=self.t0, f0=0, df=1./fftlength, + dt=overlap, copy=False, unit=self.unit, dtype=dtype) + + [frequencies, times, values] = spectrogram(self, + fs=self.sample_rate.value, + window=window, + nperseg=nfft, + noverlap=noverlap, + mode='complex') + + # The shape is incorrect for the Spectrogram object + for freq_idx in range(frequencies.size): + out[:, freq_idx] = values[freq_idx, :] + + return out @_update_doc_with_fft_methods def spectral_variance(self, stride, fftlength=None, overlap=None,
Update to fftgram, it allows for the passing of overlap and window and utilizes the scipy.signal.spectrogram method
gwpy_gwpy
train
914bdf1e25781b4d67ac0bfb98cc788e1cf50150
diff --git a/src/de/lmu/ifi/dbs/elki/evaluation/AbstractClassifierEvaluation.java b/src/de/lmu/ifi/dbs/elki/evaluation/AbstractClassifierEvaluation.java index <HASH>..<HASH> 100644 --- a/src/de/lmu/ifi/dbs/elki/evaluation/AbstractClassifierEvaluation.java +++ b/src/de/lmu/ifi/dbs/elki/evaluation/AbstractClassifierEvaluation.java @@ -13,6 +13,7 @@ import java.io.File; import java.io.FileDescriptor; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.IOException; import java.io.PrintStream; import java.util.List; @@ -46,9 +47,10 @@ public abstract class AbstractClassifierEvaluation<O extends DatabaseObject, L e /** * @param normalization normalization is unused + * @throws IOException */ @Override - public final void output(File out, Normalization<O> normalization, List<AttributeSettings> settings) throws UnableToComplyException { + public final void output(File out, Normalization<O> normalization, List<AttributeSettings> settings) throws UnableToComplyException, IOException { PrintStream output; try { out.getParentFile().mkdirs(); @@ -65,7 +67,7 @@ public abstract class AbstractClassifierEvaluation<O extends DatabaseObject, L e output(output, normalization, settings); } - public void output(PrintStream outStream, Normalization<O> normalization, List<AttributeSettings> settings) throws UnableToComplyException { + public void output(PrintStream outStream, Normalization<O> normalization, List<AttributeSettings> settings) throws UnableToComplyException, IOException { writeHeader(outStream, settings, null); outStream.print("Evaluating "); outStream.println(classifier.getClass().getName()); diff --git a/src/de/lmu/ifi/dbs/elki/evaluation/ConfusionMatrixBasedEvaluation.java b/src/de/lmu/ifi/dbs/elki/evaluation/ConfusionMatrixBasedEvaluation.java index <HASH>..<HASH> 100644 --- a/src/de/lmu/ifi/dbs/elki/evaluation/ConfusionMatrixBasedEvaluation.java +++ b/src/de/lmu/ifi/dbs/elki/evaluation/ConfusionMatrixBasedEvaluation.java @@ -25,7 +25,7 @@ public class ConfusionMatrixBasedEvaluation<O extends DatabaseObject, L extends /** * Holds the used EvaluationProcedure. */ - private EvaluationProcedure<O, C, L> evaluationProcedure; + private EvaluationProcedure<O, L, C> evaluationProcedure; /** * Provides an evaluation based on the given confusion matrix. @@ -37,7 +37,7 @@ public class ConfusionMatrixBasedEvaluation<O extends DatabaseObject, L extends * @param testset the test set this evaluation is based on * @param evaluationProcedure the evaluation procedure used */ - public ConfusionMatrixBasedEvaluation(ConfusionMatrix confusionmatrix, C classifier, Database<O> database, Database<O> testset, EvaluationProcedure<O, C, L> evaluationProcedure) { + public ConfusionMatrixBasedEvaluation(ConfusionMatrix confusionmatrix, C classifier, Database<O> database, Database<O> testset, EvaluationProcedure<O, L, C> evaluationProcedure) { super(database, testset, classifier); this.confusionmatrix = confusionmatrix; this.evaluationProcedure = evaluationProcedure; diff --git a/src/de/lmu/ifi/dbs/elki/evaluation/procedure/ClassifierEvaluationProcedure.java b/src/de/lmu/ifi/dbs/elki/evaluation/procedure/ClassifierEvaluationProcedure.java index <HASH>..<HASH> 100644 --- a/src/de/lmu/ifi/dbs/elki/evaluation/procedure/ClassifierEvaluationProcedure.java +++ b/src/de/lmu/ifi/dbs/elki/evaluation/procedure/ClassifierEvaluationProcedure.java @@ -28,10 +28,10 @@ import java.util.SortedSet; * * @author Arthur Zimek */ -public class ClassifierEvaluationProcedure<O extends DatabaseObject, L extends ClassLabel, C extends Classifier<O, L>> extends AbstractParameterizable implements EvaluationProcedure<O, C, L> { +public class ClassifierEvaluationProcedure<O extends DatabaseObject, L extends ClassLabel, C extends Classifier<O, L>> extends AbstractParameterizable implements EvaluationProcedure<O, L, C> { /** - * Holds whether a test set hs been provided. + * Holds whether a test set has been provided. */ private boolean testSetProvided = false; diff --git a/src/de/lmu/ifi/dbs/elki/evaluation/procedure/EvaluationProcedure.java b/src/de/lmu/ifi/dbs/elki/evaluation/procedure/EvaluationProcedure.java index <HASH>..<HASH> 100644 --- a/src/de/lmu/ifi/dbs/elki/evaluation/procedure/EvaluationProcedure.java +++ b/src/de/lmu/ifi/dbs/elki/evaluation/procedure/EvaluationProcedure.java @@ -16,7 +16,7 @@ import de.lmu.ifi.dbs.elki.utilities.optionhandling.Parameterizable; * * @author Arthur Zimek */ -public interface EvaluationProcedure<O extends DatabaseObject,A extends Algorithm<O>,L extends ClassLabel> extends Parameterizable +public interface EvaluationProcedure<O extends DatabaseObject, L extends ClassLabel, A extends Algorithm<O>> extends Parameterizable { /** * Message to indicate failure to call either {@link #set(Database, Database) set(trainingset, testset)}
Small API clean up by using the same generics ordering.
elki-project_elki
train