hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
620c95599f5a8e500439ef546861868de2d34807
diff --git a/src/main/java/org/thymeleaf/standard/expression/OGNLVariableExpressionEvaluator.java b/src/main/java/org/thymeleaf/standard/expression/OGNLVariableExpressionEvaluator.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/thymeleaf/standard/expression/OGNLVariableExpressionEvaluator.java +++ b/src/main/java/org/thymeleaf/standard/expression/OGNLVariableExpressionEvaluator.java @@ -164,8 +164,9 @@ public final class OGNLVariableExpressionEvaluator // The root object on which we will evaluate expressions will depend on whether a selection target is // active or not... - final IVariablesMap variablesMap = processingContext.getVariablesMap(); - final Object evaluationRoot = (useSelectionAsRoot? variablesMap.getSelectionTarget() : variablesMap); + final IVariablesMap variablesMap = processingContext.getVariables(); + final Object evaluationRoot = + (useSelectionAsRoot && variablesMap.hasSelectionTarget()? variablesMap.getSelectionTarget() : variablesMap); // Execute the expression! final Object result;
Fixed bad specification of selection target in OGNL evaluator
thymeleaf_thymeleaf
train
f8338149322994d501ba82121c83d37b192aab6e
diff --git a/module/__init__.py b/module/__init__.py index <HASH>..<HASH> 100644 --- a/module/__init__.py +++ b/module/__init__.py @@ -25,8 +25,8 @@ XCB_CONN_CLOSED_EXT_NOTSUPPORTED = C.XCB_CONN_CLOSED_EXT_NOTSUPPORTED XCB_CONN_CLOSED_MEM_INSUFFICIENT = C.XCB_CONN_CLOSED_MEM_INSUFFICIENT XCB_CONN_CLOSED_REQ_LEN_EXCEED = C.XCB_CONN_CLOSED_REQ_LEN_EXCEED XCB_CONN_CLOSED_PARSE_ERR = C.XCB_CONN_CLOSED_PARSE_ERR -XCB_CONN_CLOSED_INVALID_SCREEN = C.XCB_CONN_CLOSED_INVALID_SCREEN -XCB_CONN_CLOSED_FDPASSING_FAILED = C.XCB_CONN_CLOSED_FDPASSING_FAILED +# XCB_CONN_CLOSED_INVALID_SCREEN = C.XCB_CONN_CLOSED_INVALID_SCREEN +# XCB_CONN_CLOSED_FDPASSING_FAILED = C.XCB_CONN_CLOSED_FDPASSING_FAILED def popcount(n): return bin(n).count('1') @@ -50,11 +50,11 @@ class ConnectionException(XcffibException): 'accepts.'), XCB_CONN_CLOSED_PARSE_ERR: ( 'Connection closed, error during parsing display string.'), - XCB_CONN_CLOSED_INVALID_SCREEN: ( - 'Connection closed because the server does not have a screen ' - 'matching the display.'), - XCB_CONN_CLOSED_FDPASSING_FAILED: ( - 'Connection closed because some FD passing operation failed'), +# XCB_CONN_CLOSED_INVALID_SCREEN: ( +# 'Connection closed because the server does not have a screen ' +# 'matching the display.'), +# XCB_CONN_CLOSED_FDPASSING_FAILED: ( +# 'Connection closed because some FD passing operation failed'), } def __init__(self, err): diff --git a/module/ffi.py b/module/ffi.py index <HASH>..<HASH> 100644 --- a/module/ffi.py +++ b/module/ffi.py @@ -20,8 +20,8 @@ CONSTANTS = [ "XCB_CONN_CLOSED_MEM_INSUFFICIENT", "XCB_CONN_CLOSED_REQ_LEN_EXCEED", "XCB_CONN_CLOSED_PARSE_ERR", - "XCB_CONN_CLOSED_INVALID_SCREEN", - "XCB_CONN_CLOSED_FDPASSING_FAILED", +# "XCB_CONN_CLOSED_INVALID_SCREEN", +# "XCB_CONN_CLOSED_FDPASSING_FAILED", ] diff --git a/setup.cfg b/setup.cfg index <HASH>..<HASH> 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,5 +2,6 @@ # E302: number of blank lines # E501: line too long # F401: module imported but not used +# E122: commented out code for supporting xcb == 1.8 [flake8] -ignore = E301,E302,E501,F401 +ignore = E301,E302,E501,F401,E122
be xcb == <I> compatible for now travis-ci doesn't have a version of trusty yet; this can be reverted when <URL>
tych0_xcffib
train
a07fa8541b3d644732e0dd517e5d3803d6933eb8
diff --git a/lib/OpenStackExtract/MiqOpenStackVm/MiqOpenStackImage.rb b/lib/OpenStackExtract/MiqOpenStackVm/MiqOpenStackImage.rb index <HASH>..<HASH> 100644 --- a/lib/OpenStackExtract/MiqOpenStackVm/MiqOpenStackImage.rb +++ b/lib/OpenStackExtract/MiqOpenStackVm/MiqOpenStackImage.rb @@ -1,5 +1,4 @@ require 'miq_tempfile' -require_relative '../../MiqVm/MiqVm' require_relative 'MiqOpenStackCommon' class MiqOpenStackImage
Remove require for MiqVm to avoid circular require.
ManageIQ_manageiq-smartstate
train
bfa46f47ed700c3cb4afd56af5e4f6568dc9d0d4
diff --git a/telethon/utils.py b/telethon/utils.py index <HASH>..<HASH> 100644 --- a/telethon/utils.py +++ b/telethon/utils.py @@ -51,6 +51,8 @@ mimetypes.add_type('audio/aac', '.aac') mimetypes.add_type('audio/ogg', '.ogg') mimetypes.add_type('audio/flac', '.flac') +mimetypes.add_type('application/x-tgsticker', '.tgs') + USERNAME_RE = re.compile( r'@|(?:https?://)?(?:www\.)?(?:telegram\.(?:me|dog)|t\.me)/(@|joinchat/)?' )
Register application/x-tgsticker to mimetypes
LonamiWebs_Telethon
train
3847ecb00ededdb93eb9b9555cbae0b74ca81bff
diff --git a/lib/node_modules/@stdlib/utils/find/lib/find.js b/lib/node_modules/@stdlib/utils/find/lib/find.js index <HASH>..<HASH> 100644 --- a/lib/node_modules/@stdlib/utils/find/lib/find.js +++ b/lib/node_modules/@stdlib/utils/find/lib/find.js @@ -146,7 +146,7 @@ function find( arr, options, clbk ) { // eslint-disable-line no-redeclare // Search moving from begin-to-end [0,1,...]: for ( i = 0; i < len; i++ ) { v = arr[ i ]; - if ( cb( v, i, arr ) ) { + if ( cb( v, i, arr ) ) { // eslint-disable-line callback-return if ( mode === 2 ) { out.push( [ i, v ] ); } else if ( mode === 1 ) { @@ -166,7 +166,7 @@ function find( arr, options, clbk ) { // eslint-disable-line no-redeclare k = -k; for ( i = len-1; i >= 0; i-- ) { v = arr[ i ]; - if ( cb( v, i, arr ) ) { + if ( cb( v, i, arr ) ) { // eslint-disable-line callback-return if ( mode === 2 ) { out.push( [ i, v ] ); } else if ( mode === 1 ) {
Disable callback-return lint rule
stdlib-js_stdlib
train
db84cc07bfcc2d183c9b285c320274932931df05
diff --git a/ipydex/core.py b/ipydex/core.py index <HASH>..<HASH> 100644 --- a/ipydex/core.py +++ b/ipydex/core.py @@ -373,15 +373,16 @@ class TBPrinter(object): self.TB = ultratb.FormattedTB(mode="Context", color_scheme='Linux', call_pdb=False) - def printout(self, end_offset=0): + def printout(self, end_offset=0, prefix="\n"): """ + :param prefix: string which is printed befor the actual TB (default: "\n") :param end_offset: 0 means print all, 1 means print parts[:-1] etc :return: """ # note that the kwarg `tb_offset` of the FormattedTB constructor is refers to the start of the list tb_parts = self.TB.structured_traceback(self.excType, self.excValue, self.traceback) - text = "\n".join(tb_parts[:len(tb_parts)-1-end_offset]+[tb_parts[-1]]) + text = "\n".join([prefix]+tb_parts[:len(tb_parts)-1-end_offset]+[tb_parts[-1]]) print(text) @@ -740,14 +741,7 @@ def get_whole_assignment_expression(line, varname, seq_type): :return: """ - if sys.version_info[0] >= 3: - # in python3 line is already unicode - uline = line - else: - # uses sys.getdefaultencoding( - uline = line.decode() - - tokens = list(tk.generate_tokens(io.StringIO(uline).readline)) + tokens = line_to_token_list(line) if issubclass(seq_type, tuple): L, R = "()" @@ -766,10 +760,6 @@ def get_whole_assignment_expression(line, varname, seq_type): i_start, i_end = None, None - # for python2-compatibility explicitly convert to named tuple - TokenInfo = collections.namedtuple("TokenInfo", ["type", "string", "start", "end", "line"]) - tokens = [TokenInfo(*t) for t in tokens] - for i, t in enumerate(tokens): if t.type == tk.NAME and t.string == varname: search_mode = 1 @@ -844,6 +834,24 @@ def get_carg_vars_from_frame(frame, seq_type): return results +def line_to_token_list(line): + + if sys.version_info[0] >= 3: + # in python3 line is already unicode + uline = line + else: + # uses sys.getdefaultencoding( + uline = line.decode() + + tokens = list(tk.generate_tokens(io.StringIO(uline).readline)) + + # for python2-compatibility explicitly convert to named tuple + TokenInfo = collections.namedtuple("TokenInfo", ["type", "string", "start", "end", "line"]) + tokens = [TokenInfo(*t) for t in tokens] + + return tokens + +
small refactoring (line_to_token_list)
cknoll_ipydex
train
553661cb69073087abc063e630119c88a6388552
diff --git a/src/com/google/javascript/jscomp/gwt/client/GwtRunner.java b/src/com/google/javascript/jscomp/gwt/client/GwtRunner.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/gwt/client/GwtRunner.java +++ b/src/com/google/javascript/jscomp/gwt/client/GwtRunner.java @@ -45,6 +45,7 @@ import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -79,6 +80,7 @@ public final class GwtRunner implements EntryPoint { String[] entryPoint; String env; boolean exportLocalPropertyDefinitions; + String[] extraAnnotationNames; boolean generateExports; String languageIn; String languageOut; @@ -117,6 +119,7 @@ public final class GwtRunner implements EntryPoint { defaultFlags.entryPoint = null; defaultFlags.env = "BROWSER"; defaultFlags.exportLocalPropertyDefinitions = false; + defaultFlags.extraAnnotationNames = null; defaultFlags.generateExports = false; defaultFlags.languageIn = "ECMASCRIPT6"; defaultFlags.languageOut = "ECMASCRIPT5"; @@ -367,6 +370,10 @@ public final class GwtRunner implements EntryPoint { options.setDefineReplacements(flags.defines.asMap()); } + if (flags.extraAnnotationNames != null) { + options.setExtraAnnotationNames(Arrays.asList(flags.extraAnnotationNames)); + } + options.setAngularPass(flags.angularPass); options.setApplyInputSourceMaps(flags.applyInputSourceMaps); options.setChecksOnly(flags.checksOnly);
Add extraAnnotationNames to Closure's GWT runner Required by Chrome, so that it can use Closure-js to compress its built in Javascript resources. ------------- Created by MOE: <URL>
google_closure-compiler
train
9c8b9f37dcb2a69ea578d251a2680c815163db80
diff --git a/src/Offer/IriOfferIdentifierFactory.php b/src/Offer/IriOfferIdentifierFactory.php index <HASH>..<HASH> 100644 --- a/src/Offer/IriOfferIdentifierFactory.php +++ b/src/Offer/IriOfferIdentifierFactory.php @@ -2,7 +2,7 @@ namespace CultuurNet\UDB3\Offer; -use CultuurNet\UDB3\Variations\Command\ValidationException; +use RuntimeException; use ValueObjects\Web\Url; class IriOfferIdentifierFactory implements IriOfferIdentifierFactoryInterface @@ -46,10 +46,8 @@ class IriOfferIdentifierFactory implements IriOfferIdentifierFactoryInterface ); if (0 === $match) { - throw new ValidationException( - [ - 'The given URL can not be used. It might not be a cultural event, or no integration is provided with the system the cultural event is located at.', - ] + throw new RuntimeException( + 'The given URL can not be used. It might not be a cultural event, or no integration is provided with the system the cultural event is located at.' ); } diff --git a/test/Offer/IriOfferIdentifierFactoryTest.php b/test/Offer/IriOfferIdentifierFactoryTest.php index <HASH>..<HASH> 100644 --- a/test/Offer/IriOfferIdentifierFactoryTest.php +++ b/test/Offer/IriOfferIdentifierFactoryTest.php @@ -2,8 +2,8 @@ namespace CultuurNet\UDB3\Offer; -use CultuurNet\UDB3\Variations\Command\ValidationException; use PHPUnit\Framework\TestCase; +use RuntimeException; use ValueObjects\Web\Url; class IriOfferIdentifierFactoryTest extends TestCase @@ -31,10 +31,7 @@ class IriOfferIdentifierFactoryTest extends TestCase */ public function it_throws_an_error_when_using_a_malformed_url() { - $this->expectException( - ValidationException::class, - 'Invalid data' - ); + $this->expectException(RuntimeException::class); $this->iriOfferIdentifierFactory->fromIri( Url::fromNative('https://du.de/sweet') @@ -46,10 +43,7 @@ class IriOfferIdentifierFactoryTest extends TestCase */ public function it_throws_an_error_when_using_an_unsupported_offer_type() { - $this->expectException( - ValidationException::class, - 'Invalid data' - ); + $this->expectException(RuntimeException::class); $this->iriOfferIdentifierFactory->fromIri( Url::fromNative('https://culudb-silex.dev:8080/kwiet/foo-bar')
Replace wrongly namespaced validation exception with simple runtime exception
cultuurnet_udb3-php
train
3df1c1072488ef5c32a905456ddff6d4fb5f3959
diff --git a/openquake/engine/engine.py b/openquake/engine/engine.py index <HASH>..<HASH> 100644 --- a/openquake/engine/engine.py +++ b/openquake/engine/engine.py @@ -244,7 +244,7 @@ def job_from_file(job_ini, job_id, username, **kw): try: oq = readinput.get_oqparam(job_ini, hc_id=hc_id) except Exception: - logs.dbcmd('finish', job_id, 'failed') + logs.dbcmd('finish', job_id, 'deleted') raise if 'calculation_mode' in kw: oq.calculation_mode = kw.pop('calculation_mode') diff --git a/openquake/server/db/actions.py b/openquake/server/db/actions.py index <HASH>..<HASH> 100644 --- a/openquake/server/db/actions.py +++ b/openquake/server/db/actions.py @@ -352,8 +352,8 @@ def del_calc(db, job_id, user, force=False): return {"error": 'Cannot delete calculation %d:' ' ID does not exist' % job_id} - deleted = db("UPDATE job SET status='deleted' WHERE id=?x AND user_name=?x", - job_id, user).rowcount + deleted = db("UPDATE job SET status='deleted' WHERE id=?x AND " + "user_name=?x", job_id, user).rowcount if not deleted: return {"error": 'Cannot delete calculation %d: it belongs to ' '%s and you are %s' % (job_id, owner, user)}
Changed the state from failed to deleted [skip hazardlib][demos] Former-commit-id: <I>a<I>f<I>e4dfd<I>bba<I>b<I>e<I>
gem_oq-engine
train
e8bad25a0177f62a1f6b74f3471bee447221f666
diff --git a/public/js/editors/panel.js b/public/js/editors/panel.js index <HASH>..<HASH> 100644 --- a/public/js/editors/panel.js +++ b/public/js/editors/panel.js @@ -12,7 +12,7 @@ var editorModes = { processing: 'text/x-csrc' }; -var badChars = new RegExp('\u200B', 'g'); +var badChars = new RegExp('[\u200B\u0080-\u00a0]', 'g'); if (jsbin.settings.editor.tabMode === 'default') { CodeMirror.keyMap.basic.Tab = undefined; @@ -369,7 +369,7 @@ Panel.prototype = { if (content === undefined) content = ''; this.controlButton.toggleClass('hasContent', !!content.trim().length); this.codeSet = true; - this.editor.setCode(content); + this.editor.setCode(content.replace(badChars, '')); } }, codeSet: false,
fixed #<I>. Remove dodgy spaces that cause jshint barfage
jsbin_jsbin
train
d0038e60178f3097d99868e20793d7f912bb236e
diff --git a/lib/rrule.js b/lib/rrule.js index <HASH>..<HASH> 100644 --- a/lib/rrule.js +++ b/lib/rrule.js @@ -183,8 +183,8 @@ RRule.prototype.next = function(after) { for(var i=0; i < this.exceptions.length; i++) { var exdate = this.exceptions[i]; if((exdate.valueOf() == nextInLocal.valueOf()) - || (exdate.date_only && y(exdate) == y(nextInLocal) - && m(exdate) == m(nextInLocal) && d(exdate) == d(nextInLocal))) { + || (exdate.date_only && y(to_utc_date(exdate)) == y(nextInLocal) + && m(to_utc_date(exdate)) == m(nextInLocal) && d(to_utc_date(exdate)) == d(nextInLocal))) { after = next; continue NextOccurs; } @@ -204,11 +204,11 @@ RRule.prototype.next = function(after) { this.count_end = this.nextOccurences(this.rule.COUNT).pop(); } - if(next > this.count_end) + if(next > to_utc_date(this.count_end)) return null; } - if(this.rule.UNTIL && next > this.rule.UNTIL) + if(this.rule.UNTIL && next > to_utc_date(this.rule.UNTIL)) return null; return from_utc_date(next);
fix timezone issues in rrule
tritech_node-icalendar
train
b21c438513640d722d839a0a04a667d651b883a7
diff --git a/src/main/java/org/minimalj/frontend/form/Form.java b/src/main/java/org/minimalj/frontend/form/Form.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/minimalj/frontend/form/Form.java +++ b/src/main/java/org/minimalj/frontend/form/Form.java @@ -427,6 +427,7 @@ public class Form<T> { if (formElementPath.equals(dependedPath) || formElementPath.startsWith(dependedPath) && formElementPath.charAt(dependedPath.length()) == '.') { Object newDependedValue = formElement.getProperty().getValue(object); formElement.setValue(newDependedValue); + changed(formElement); } } }
Form: make chains of Updater possible
BrunoEberhard_minimal-j
train
5a17d0ada01559524f363fce607caaddea338e15
diff --git a/npm-graph.js b/npm-graph.js index <HASH>..<HASH> 100644 --- a/npm-graph.js +++ b/npm-graph.js @@ -15,40 +15,37 @@ var isNotBuiltin = function (id) { module.exports = function (file, name, cb, opts) { var allDeps = {}; opts = opts || {}; - try { - mdeps(file, { filter: isNotBuiltin }).on('data', function (o) { - allDeps[o.id] = o.deps; - }).on('error', function (err) { - // hopefully we can get these in the future without 'end' not happening - console.warn(err.message); - }).on('end', function () { - //console.log(JSON.stringify(allDeps, null, '\t')); - // build a dependency tree from the flat mdeps list by recursing - var topTree = { name: name }; - var traverse = function (depObj, loc) { - loc.deps = loc.deps || {}; - Object.keys(depObj).sort().forEach(function (key) { - if (!opts.showLocal && ['\\', '/', '.'].indexOf(key[0]) >= 0) { - // keep local deps private, but keep inspecting them for modules - traverse(allDeps[depObj[key]] || {}, loc); - } - else { - // put new modules under new headers under current location - loc.deps[key] = { name: key }; - traverse(allDeps[depObj[key]] || {}, loc.deps[key]); - } - }); - }; - traverse(allDeps[file], topTree); + mdeps(file, { filter: isNotBuiltin }).on('data', function (o) { + allDeps[o.id] = o.deps; + }).on('error', function (err) { + // hopefully we can get these in the future without 'end' not happening + console.warn(err.message); + }).on('end', function () { + //console.log(JSON.stringify(allDeps, null, '\t')); + + // build a dependency tree from the flat mdeps list by recursing + var topTree = { name: name }; + var traverse = function (depObj, loc) { + loc.deps = loc.deps || {}; + Object.keys(depObj).sort().forEach(function (key) { + if (!opts.showLocal && ['\\', '/', '.'].indexOf(key[0]) >= 0) { + // keep local deps private, but keep inspecting them for modules + traverse(allDeps[depObj[key]] || {}, loc); + } + else { + // put new modules under new headers under current location + loc.deps[key] = { name: key }; + traverse(allDeps[depObj[key]] || {}, loc.deps[key]); + } + }); + }; + traverse(allDeps[file], topTree); + + var filterFn = function (o) { + return opts.showBuiltins || isNotBuiltin(o.name); + }; + cb(null, topiary(topTree, 'deps', shapeFn, filterFn)); + }); - var filterFn = function (o) { - return opts.showBuiltins || isNotBuiltin(o.name); - }; - cb(null, topiary(topTree, 'deps', shapeFn, filterFn)); - }); - } - catch (e) { - cb(e); - } };
no need to try-catch wrap anymore, all errors that we expect are stream errors from failed resolves
clux_npm-graph
train
f78fcce8b79cbf5b4be07c951e7a2ff7d2d40135
diff --git a/helpers/TbHtml.php b/helpers/TbHtml.php index <HASH>..<HASH> 100644 --- a/helpers/TbHtml.php +++ b/helpers/TbHtml.php @@ -1945,6 +1945,7 @@ EOD; $htmlOptions = self::addClassName('navbar-' . $style, $htmlOptions); $innerOptions = self::popOption('innerOptions', $htmlOptions, array()); $innerOptions = self::addClassName('navbar-inner', $innerOptions); + ob_start(); echo parent::openTag('div', $htmlOptions) . PHP_EOL; echo parent::tag('div', $innerOptions, $content) . PHP_EOL; echo '</div>' . PHP_EOL; @@ -2947,4 +2948,4 @@ EOD; { return 'tb' . self::$_counter++; } -} \ No newline at end of file +}
Forgot the ob_start() in the navbar helper method
crisu83_yiistrap
train
86bcf04b18565b05f0ba1630a180fb69b3978465
diff --git a/test/unit/model/sharedStimulus/export/SharedStimulusCSSExporterTest.php b/test/unit/model/sharedStimulus/export/SharedStimulusCSSExporterTest.php index <HASH>..<HASH> 100644 --- a/test/unit/model/sharedStimulus/export/SharedStimulusCSSExporterTest.php +++ b/test/unit/model/sharedStimulus/export/SharedStimulusCSSExporterTest.php @@ -78,15 +78,8 @@ class SharedStimulusCSSExporterTest extends TestCase $sharedStimulusCSSExporterService = $this->getPreparedServiceInstance($fileSystemMock); $sharedStimulusCSSExporterService->pack(new core_kernel_classes_Resource("dummyUri"), $link, $this->zipArchive); - if (!count($fileNames)) { - $this->assertEquals(0, $this->zipArchive->numFiles); - } - - if (count($fileNames)) { - $zippedFiles = $this->getZippedFilesList($this->zipArchive); - - $this->assertEquals($expectedZippedFiles, $zippedFiles); - } + $zippedFiles = $this->getZippedFilesList($this->zipArchive); + $this->assertEquals($expectedZippedFiles, $zippedFiles); } public function packTestDataProvider(): array
test: refactor SharedStimulusCSSExporterTest
oat-sa_extension-tao-mediamanager
train
e86e8c88b0f1e03b6b530c8995f8dc22a3b5c5bd
diff --git a/ryu/lib/packet/bfd.py b/ryu/lib/packet/bfd.py index <HASH>..<HASH> 100644 --- a/ryu/lib/packet/bfd.py +++ b/ryu/lib/packet/bfd.py @@ -80,6 +80,7 @@ BFD Control packet format import binascii import hashlib import random +import six import struct from . import packet_base @@ -240,7 +241,7 @@ class bfd(packet_base.PacketBase): flags = flags & 0x3f if flags & BFD_FLAG_AUTH_PRESENT: - (auth_type,) = struct.unpack_from('!B', buf[cls._PACK_STR_LEN]) + auth_type = six.indexbytes(buf, cls._PACK_STR_LEN) auth_cls = cls._auth_parsers[auth_type].\ parser(buf[cls._PACK_STR_LEN:])[0] else: @@ -396,8 +397,7 @@ class SimplePassword(BFDAuth): (auth_type, auth_len) = cls.parser_hdr(buf) assert auth_type == cls.auth_type - (auth_key_id,) = struct.unpack_from(cls._PACK_STR, - buf[cls._PACK_HDR_STR_LEN]) + auth_key_id = six.indexbytes(buf, cls._PACK_HDR_STR_LEN) password = buf[cls._PACK_HDR_STR_LEN + cls._PACK_STR_LEN:auth_len]
python3: Use six.indexbytes for extracting single byte of data
osrg_ryu
train
31e1893d39502fd28cd47215be8b03534897a2ad
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java index <HASH>..<HASH> 100644 --- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java +++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java @@ -15,14 +15,6 @@ */ package com.orientechnologies.orient.server.network.protocol.binary; -import java.io.IOException; -import java.net.Socket; -import java.util.Collection; -import java.util.HashSet; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.orient.core.command.OCommandRequestInternal; import com.orientechnologies.orient.core.command.OCommandRequestText; @@ -69,6 +61,14 @@ import com.orientechnologies.orient.server.handler.OServerHandler; import com.orientechnologies.orient.server.handler.OServerHandlerHelper; import com.orientechnologies.orient.server.tx.OTransactionOptimisticProxy; +import java.io.IOException; +import java.net.Socket; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + public class ONetworkProtocolBinary extends OBinaryNetworkProtocolAbstract { protected OClientConnection connection; protected OUser account; @@ -797,7 +797,11 @@ public class ONetworkProtocolBinary extends OBinaryNetworkProtocolAbstract { } } catch (OTransactionAbortedException e) { // TX ABORTED BY THE CLIENT - } + } catch (Exception e) { + //Error during TX initialization, possibly index constraints violation. + tx.close(); + sendError(clientTxId, e); + } } @SuppressWarnings("unchecked") diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java index <HASH>..<HASH> 100644 --- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java +++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java @@ -1191,8 +1191,8 @@ public class IndexTest { "select from index:TransactionUniqueIndexWithDotTest.label")); Assert.assertEquals(resultBeforeCommit.size(), 1); + long countClassBefore = db.countClass("TransactionUniqueIndexWithDotTest"); db.begin(); - try { ODocument docTwo = new ODocument("TransactionUniqueIndexWithDotTest"); docTwo.field("label", "A"); @@ -1203,6 +1203,9 @@ public class IndexTest { } catch (OIndexException oie) { } + Assert.assertEquals(((List<ODocument>) + db.command(new OCommandSQL("select from TransactionUniqueIndexWithDotTest")).execute()).size(), countClassBefore); + final List<ODocument> resultAfterCommit = db.query(new OSQLSynchQuery<ODocument>( "select from index:TransactionUniqueIndexWithDotTest.label")); Assert.assertEquals(resultAfterCommit.size(), 1);
Issue <I> was fixed.
orientechnologies_orientdb
train
31854b21a36111a26673f2e72604e802909319e2
diff --git a/source/org/jivesoftware/smack/util/StringUtils.java b/source/org/jivesoftware/smack/util/StringUtils.java index <HASH>..<HASH> 100644 --- a/source/org/jivesoftware/smack/util/StringUtils.java +++ b/source/org/jivesoftware/smack/util/StringUtils.java @@ -80,12 +80,11 @@ public class StringUtils { return null; } int atIndex = XMPPAddress.indexOf("@"); - if (atIndex < 0) { - return XMPPAddress.substring(0); + if (atIndex <= 0) { + return ""; } else { return XMPPAddress.substring(0, atIndex); - } } @@ -137,6 +136,30 @@ public class StringUtils { } /** + * Returns the XMPP address with any resource information removed. For example, + * for the address "[email protected]/Smack", "[email protected]" would + * be returned. + * + * @param XMPPAddress the XMPP address. + * @return the bare XMPP address without resource information. + */ + public static String parseBareAddress(String XMPPAddress) { + if (XMPPAddress == null) { + return null; + } + int slashIndex = XMPPAddress.indexOf("/"); + if (slashIndex < 0) { + return XMPPAddress; + } + else if (slashIndex == 0) { + return ""; + } + else { + return XMPPAddress.substring(0, slashIndex); + } + } + + /** * Escapes all necessary characters in the String so that it can be used * in an XML doc. *
Added method to parse the address without resource, fixed parsing of name (SMACK-<I>) git-svn-id: <URL>
igniterealtime_Smack
train
eda21a54330e919d200f554f2a04b8af7acc8182
diff --git a/src/Request/Factory.php b/src/Request/Factory.php index <HASH>..<HASH> 100644 --- a/src/Request/Factory.php +++ b/src/Request/Factory.php @@ -59,6 +59,14 @@ class Factory { $xRequest->addParameter(Jaxon::BOOL_VALUE, $xParam); } + else if(is_array($xArgument)) + { + $this->addParameter(Jaxon::JS_VALUE, json_encode($xParam, JSON_FORCE_OBJECT)); + } + else if(is_object($xArgument)) + { + $this->addParameter(Jaxon::JS_VALUE, json_encode($xParam)); + } } return $xRequest; }
A Jaxon call now accepts arrays and objects as parameters.
jaxon-php_jaxon-core
train
df59a1219fa6fc131be273a1a0bdd77b4b7ed299
diff --git a/event/src/main/java/com/bazaarvoice/emodb/event/core/DefaultEventStore.java b/event/src/main/java/com/bazaarvoice/emodb/event/core/DefaultEventStore.java index <HASH>..<HASH> 100644 --- a/event/src/main/java/com/bazaarvoice/emodb/event/core/DefaultEventStore.java +++ b/event/src/main/java/com/bazaarvoice/emodb/event/core/DefaultEventStore.java @@ -156,7 +156,7 @@ public class DefaultEventStore implements EventStore { checkLimit(limit, Limits.MAX_PEEK_LIMIT); DaoEventSink daoSink = new DaoEventSink(Integer.MAX_VALUE, sink); - _readerDao.readAll(channel, daoSink, null); + _readerDao.readAll(channel, daoSink, null, true); if (isDebugLoggingEnabled(channel)) { _log.debug("peek {} limit={} -> #={} more={}", channel, limit, daoSink.getCount(), daoSink.hasMore()); @@ -366,7 +366,7 @@ public class DefaultEventStore implements EventStore { return true; } }; - _readerDao.readAll(channel, eventSink, since); + _readerDao.readAll(channel, eventSink, since, false); if (!events.isEmpty()) { sink.accept(events); } @@ -430,7 +430,7 @@ public class DefaultEventStore implements EventStore { } return true; } - }, null); + }, null, false); if (!eventsToCopy.isEmpty()) { addAndDelete(toChannel, eventsToCopy, fromChannel, eventsToDelete); } diff --git a/event/src/main/java/com/bazaarvoice/emodb/event/db/EventReaderDAO.java b/event/src/main/java/com/bazaarvoice/emodb/event/db/EventReaderDAO.java index <HASH>..<HASH> 100644 --- a/event/src/main/java/com/bazaarvoice/emodb/event/db/EventReaderDAO.java +++ b/event/src/main/java/com/bazaarvoice/emodb/event/db/EventReaderDAO.java @@ -16,7 +16,7 @@ public interface EventReaderDAO { * If 'since' is non-null, then most of the events before 'since' time will be skipped. * Some prior to 'since' time (at most 999) may still get through, but all events on or after since are guaranteed. * */ - void readAll(String channel, EventSink sink, @Nullable Date since); + void readAll(String channel, EventSink sink, @Nullable Date since, boolean weak); /** * Read events for the channel and pass then to the sink until the sink returns false. If called repeatedly diff --git a/event/src/main/java/com/bazaarvoice/emodb/event/db/astyanax/AstyanaxEventReaderDAO.java b/event/src/main/java/com/bazaarvoice/emodb/event/db/astyanax/AstyanaxEventReaderDAO.java index <HASH>..<HASH> 100644 --- a/event/src/main/java/com/bazaarvoice/emodb/event/db/astyanax/AstyanaxEventReaderDAO.java +++ b/event/src/main/java/com/bazaarvoice/emodb/event/db/astyanax/AstyanaxEventReaderDAO.java @@ -238,8 +238,8 @@ public class AstyanaxEventReaderDAO implements EventReaderDAO { @ParameterizedTimed(type = "AstyanaxEventReaderDAO") @Override - public void readAll(String channel, EventSink sink, Date since) { - readAll(channel, since != null ? getSlabFilterSince(since, channel) : null, sink, true); + public void readAll(String channel, EventSink sink, Date since, boolean weak) { + readAll(channel, since != null ? getSlabFilterSince(since, channel) : null, sink, weak); } void readAll(String channel, SlabFilter filter, EventSink sink, boolean weak) { diff --git a/event/src/test/java/com/bazaarvoice/emodb/event/core/DedupQueueTest.java b/event/src/test/java/com/bazaarvoice/emodb/event/core/DedupQueueTest.java index <HASH>..<HASH> 100644 --- a/event/src/test/java/com/bazaarvoice/emodb/event/core/DedupQueueTest.java +++ b/event/src/test/java/com/bazaarvoice/emodb/event/core/DedupQueueTest.java @@ -64,7 +64,7 @@ public class DedupQueueTest { // The first peek checks the read channel, find it empty, checks the write channel. q.peek(new SimpleEventSink(10)); - verify(readerDao).readAll(eq("read"), Matchers.<EventSink>any(), (Date) Matchers.isNull()); + verify(readerDao).readAll(eq("read"), Matchers.<EventSink>any(), (Date) Matchers.isNull(), Matchers.eq(true)); verify(readerDao).readNewer(eq("write"), Matchers.<EventSink>any()); verifyNoMoreInteractions(readerDao); @@ -72,7 +72,7 @@ public class DedupQueueTest { // Subsequent peeks w/in a short window still peek the read channel, skip polling the write channel. q.peek(new SimpleEventSink(10)); - verify(readerDao).readAll(eq("read"), Matchers.<EventSink>any(), (Date) Matchers.isNull()); + verify(readerDao).readAll(eq("read"), Matchers.<EventSink>any(), (Date) Matchers.isNull(), Matchers.eq(true)); verifyNoMoreInteractions(readerDao); }
EMO-<I>: Ensure databus replay's are performed at LOCAL_QUORUM (#<I>)
bazaarvoice_emodb
train
22ec48f2a1aabb8d66982c9f01c551b308be2e84
diff --git a/lib/myUtils.js b/lib/myUtils.js index <HASH>..<HASH> 100644 --- a/lib/myUtils.js +++ b/lib/myUtils.js @@ -267,3 +267,20 @@ exports.callJustOnce = function(errF, cb) { } }; }; + +/** +* Whether two objects are structurally similar. +* +* @param {Object} x An object to compare. +* @param {Object} y An object to compare. +* +* @return {boolean} True if 'x' and 'y' are structurally similar. +*/ +exports.deepEqual = function(x, y) { + try { + assert.deepEqual(x, y); + return true; + } catch(ex) { + return false; + } +}; diff --git a/lib/templateUtils.js b/lib/templateUtils.js index <HASH>..<HASH> 100644 --- a/lib/templateUtils.js +++ b/lib/templateUtils.js @@ -207,7 +207,7 @@ var patchEnv = function(desc, f) { * */ var patchOneEnv = function(prefix, f) { - return function(env) { + var retF = function(env) { Object.keys(env) .forEach(function(x) { var val = env[x]; @@ -216,9 +216,14 @@ var patchOneEnv = function(prefix, f) { var propName = val.substring(prefix.length, val.length); env[x] = f(propName); + } else if (Array.isArray(val)) { + retF(val); + } else if (val && (typeof val === 'object')) { + retF(val); } }); }; + return retF; }; /** diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "caf_components", "description": "Cloud Assistant Framework Components", - "version": "0.2.34", + "version": "0.2.36", "author": "Antonio Lain <[email protected]>", "dependencies": { "async" : ">=0.1.9" diff --git a/test/hello/hello4.json b/test/hello/hello4.json index <HASH>..<HASH> 100644 --- a/test/hello/hello4.json +++ b/test/hello/hello4.json @@ -17,6 +17,12 @@ "description" : "Child1", "env" : { "language" : "$._.env.language", + "someArray": [ + { + "language" :"$._.env.language" + }, + "$._.env.language" + ], "message" : "child1", "number" : 8, "flag" : true diff --git a/test/hello/helloImpl.js b/test/hello/helloImpl.js index <HASH>..<HASH> 100644 --- a/test/hello/helloImpl.js +++ b/test/hello/helloImpl.js @@ -28,6 +28,11 @@ exports.newInstance = function($, spec, cb) { that.getLanguage = function() { return spec.env.language; }; + + that.getSpecEnv = function() { + return spec.env; + }; + cb(null, that); } catch (err) { console.log('got err' + err); diff --git a/test/test-component.js b/test/test-component.js index <HASH>..<HASH> 100644 --- a/test/test-component.js +++ b/test/test-component.js @@ -140,7 +140,7 @@ exports.manyDirs = function(test) { }; exports.properties = function(test) { - test.expect(9); + test.expect(11); async.series([ function(cb) { hello.load(null, {name: 'newHello'}, 'hello4.json', @@ -166,6 +166,10 @@ exports.properties = function(test) { test.equal(typeof(h1), 'object', 'Cannot create h1'); test.equal(h1.getLanguage(), 'french'); + var env = h1.getSpecEnv(); + test.equal(env.someArray[0] + .language, 'french'); + test.equal(env.someArray[1], 'french'); cb(err, $); }); }
Add deeply nested template links in environment
cafjs_caf_components
train
082cb36ea988050b31e055bb8c63374169021d95
diff --git a/lib/render.js b/lib/render.js index <HASH>..<HASH> 100644 --- a/lib/render.js +++ b/lib/render.js @@ -206,7 +206,7 @@ function lane (desc, opt) { var tx = 4.5; if (opt.compact) { ty = (opt.lanes - opt.index - 1) * opt.vspace / 2 + opt.fontsize/2; - tx += 20 + tx += 20; } var lane = ['g', { transform: t(tx, ty), @@ -216,7 +216,7 @@ function lane (desc, opt) { 'font-weight': opt.fontweight || 'normal' }, cage(desc, opt), - labelArr(desc, opt), + labelArr(desc, opt) ]; if (opt.compact) { lane.push(['g', text(opt.index, -10, opt.vspace/2 + 4)]); @@ -266,7 +266,7 @@ function render (desc, opt) { res.push(lane(desc, opt)); } if (opt.compact) { - res.push(compactLabels(desc,opt)); + res.push(compactLabels(desc, opt)); } return res; }
fixed semi,ws,comma warnings
drom_bitfield
train
d3554f4c3375af68babaddfbc9cdfa7160ef9577
diff --git a/pydevd.py b/pydevd.py index <HASH>..<HASH> 100644 --- a/pydevd.py +++ b/pydevd.py @@ -1203,6 +1203,13 @@ class PyDB(object): module_name = None entry_point_fn = '' if is_module: + # When launching with `python -m <module>`, python automatically adds + # an empty path to the PYTHONPATH which resolves files in the current + # directory, so, depending how pydevd itself is launched, we may need + # to manually add such an entry to properly resolve modules in the + # current directory (see: https://github.com/Microsoft/ptvsd/issues/1010). + if '' not in sys.path: + sys.path.insert(0, '') file, _, entry_point_fn = file.partition(':') module_name = file filename = get_fullname(file) diff --git a/tests_python/test_run.py b/tests_python/test_run.py index <HASH>..<HASH> 100644 --- a/tests_python/test_run.py +++ b/tests_python/test_run.py @@ -2,10 +2,11 @@ pytest_plugins = [ str('_pytest.pytester'), ] -def _run_and_check(testdir, path): + +def _run_and_check(testdir, path, check_for='Worked'): result = testdir.runpython(path) result.stdout.fnmatch_lines([ - 'Worked' + check_for ]) def test_run(testdir): @@ -63,3 +64,41 @@ py_db = pydevd.PyDB() py_db.run(%(foo_module)r, is_module=True, set_trace=False) ''' % locals())) + +def test_run_on_local_module_without_adding_to_pythonpath(testdir): + import sys + import os + + pydevd_dir = os.path.dirname(os.path.dirname(__file__)) + assert os.path.exists(os.path.join(pydevd_dir, 'pydevd.py')) + + foo_module = 'local_foo' + with open(os.path.join(os.getcwd(), 'local_foo.py'), 'w') as stream: + stream.write('print("WorkedLocalFoo")') + + _run_and_check(testdir, testdir.makepyfile(''' +import sys +import os +sys.path.append(%(pydevd_dir)r) +sys.argv.append('--as-module') +cwd = os.path.abspath(os.getcwd()) +while cwd in sys.path: + sys.path.remove(cwd) +import pydevd +py_db = pydevd.PyDB() +py_db.ready_to_run = True +py_db.run(%(foo_module)r, is_module=True) +''' % locals()), check_for='WorkedLocalFoo') + + _run_and_check(testdir, testdir.makepyfile(''' +import sys +import os +sys.argv.append('--as-module') +sys.path.append(%(pydevd_dir)r) +cwd = os.path.abspath(os.getcwd()) +while cwd in sys.path: + sys.path.remove(cwd) +import pydevd +py_db = pydevd.PyDB() +py_db.run(%(foo_module)r, is_module=True, set_trace=False) +''' % locals()), check_for='WorkedLocalFoo')
Fix issue where launching module in current working dir is not found. #PTVSD-<I>
fabioz_PyDev.Debugger
train
389833a2fd375ee3fd32a7633c33acd665cfcb62
diff --git a/pymod2pkg/__init__.py b/pymod2pkg/__init__.py index <HASH>..<HASH> 100644 --- a/pymod2pkg/__init__.py +++ b/pymod2pkg/__init__.py @@ -264,15 +264,16 @@ SUSE_PKG_MAP = [ # OpenStack services MultiRule( # keep lists in alphabetic order - mods=['ceilometer', 'cinder', 'cyborg', 'designate', 'freezer', - 'freezer-api', 'freezer-dr', 'glance', 'heat', 'ironic', - 'ironic-python-agent', 'karbor', 'keystone', 'manila', - 'masakari', 'masakari-monitors', 'mistral', 'monasca-agent', - 'monasca-api', 'monasca-ceilometer', 'monasca-log-api', - 'monasca-notification', 'monasca-persister', - 'monasca-transform', 'neutron', 'nova', 'rally', - 'sahara', 'swift', 'Tempest', 'trove', 'tuskar', - 'watcher', 'zaqar', 'zun'], + mods=[ + 'aodh', 'barbican', 'ceilometer', 'cinder', 'cyborg', 'designate', + 'freezer', 'freezer-api', 'freezer-dr', 'glance', 'heat', + 'ironic', 'ironic-python-agent', 'karbor', 'keystone', 'manila', + 'masakari', 'masakari-monitors', 'mistral', 'monasca-agent', + 'monasca-api', 'monasca-ceilometer', 'monasca-log-api', + 'monasca-notification', 'monasca-persister', + 'monasca-transform', 'neutron', 'nova', 'rally', + 'sahara', 'swift', 'Tempest', 'trove', 'tuskar', + 'watcher', 'zaqar', 'zun'], pkgfun=openstack_prefix_tr), # OpenStack clients MultiRule( diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -44,6 +44,8 @@ class Pymod2PkgTests(unittest.TestCase): def test_translation_suse(self): self.assertEqual(pymod2pkg.module2package('nova', 'suse'), 'openstack-nova') + self.assertEqual(pymod2pkg.module2package('barbican', 'suse'), + 'openstack-barbican') self.assertEqual(pymod2pkg.module2package('aodhclient', 'suse'), 'python-aodhclient')
Add aodh and barbican rule for SUSE aodh and barbican should translate to openstack-barbican on SUSE. Change-Id: I6ea<I>b<I>c8b2da<I>f<I>ebc9a<I>cc<I>f<I>
openstack_pymod2pkg
train
44b39fe1f809644b9a602e9717f9cdab720cf743
diff --git a/CONDUCT.md b/CONDUCT.md index <HASH>..<HASH> 100644 --- a/CONDUCT.md +++ b/CONDUCT.md @@ -10,4 +10,4 @@ Project maintainers have the right and responsibility to remove, edit, or reject Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers. -This Code of Conduct is adapted from the [Contributor Covenant](http:contributor-covenant.org), version 1.0.0, available at [http://contributor-covenant.org/version/1/0/0/](http://contributor-covenant.org/version/1/0/0/) \ No newline at end of file +This Code of Conduct is adapted from the [Contributor Covenant](http:contributor-covenant.org), version 1.0.0, available at [http://contributor-covenant.org/version/1/0/0/](http://contributor-covenant.org/version/1/0/0/) diff --git a/lib/celluloid/rspec.rb b/lib/celluloid/rspec.rb index <HASH>..<HASH> 100644 --- a/lib/celluloid/rspec.rb +++ b/lib/celluloid/rspec.rb @@ -1,6 +1,3 @@ -require "rubygems" -require "bundler/setup" - require "dotenv" require "nenv" diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,3 +1,6 @@ +require "rubygems" +require "bundler/setup" + require "celluloid/rspec" require "celluloid/essentials"
move bundler back into spec_helper
celluloid_celluloid
train
6fd5e480c1cd3e481e7097763ee281b65bc74306
diff --git a/src/effects.js b/src/effects.js index <HASH>..<HASH> 100644 --- a/src/effects.js +++ b/src/effects.js @@ -488,9 +488,7 @@ jQuery.fn.extend({ doAnimation = function() { // Operate on a copy of prop so per-property easing won't be lost var anim = Animation( this, jQuery.extend( {}, prop ), optall ); - doAnimation.finish = function() { - anim.stop( true ); - }; + // Empty animations, or finishing resolves immediately if ( empty || data_priv.get( this, "finish" ) ) { anim.stop( true ); @@ -570,8 +568,8 @@ jQuery.fn.extend({ // empty the queue first jQuery.queue( this, type, [] ); - if ( hooks && hooks.cur && hooks.cur.finish ) { - hooks.cur.finish.call( this ); + if ( hooks && hooks.stop ) { + hooks.stop.call( this, true ); } // look for any active animations, and finish them diff --git a/src/queue.js b/src/queue.js index <HASH>..<HASH> 100644 --- a/src/queue.js +++ b/src/queue.js @@ -35,7 +35,6 @@ jQuery.extend({ startLength--; } - hooks.cur = fn; if ( fn ) { // Add a progress sentinel to prevent the fx queue from being diff --git a/test/unit/effects.js b/test/unit/effects.js index <HASH>..<HASH> 100644 --- a/test/unit/effects.js +++ b/test/unit/effects.js @@ -2096,21 +2096,47 @@ test( ".finish( \"custom\" ) - custom queue animations", function() { }); test( ".finish() calls finish of custom queue functions", function() { - function queueTester() { - + function queueTester( next, hooks ) { + hooks.stop = function( gotoEnd ) { + inside++; + equal( this, div[0] ); + ok( gotoEnd, "hooks.stop(true) called"); + }; } - var div = jQuery( "<div>" ); + var div = jQuery( "<div>" ), + inside = 0, + outside = 0; - expect( 3 ); + expect( 6 ); queueTester.finish = function() { + outside++; ok( true, "Finish called on custom queue function" ); }; div.queue( queueTester ).queue( queueTester ).queue( queueTester ).finish(); + equal( inside, 1, "1 stop(true) callback" ); + equal( outside, 2, "2 finish callbacks" ); + div.remove(); }); +asyncTest( ".finish() is applied correctly when multiple elements were animated (#13937)", function() { + expect( 3 ); + + var elems = jQuery("<a>0</a><a>1</a><a>2</a>"); + + elems.animate( { opacity: 0 }, 1500 ).animate( { opacity: 1 }, 1500 ); + setTimeout(function() { + elems.eq( 1 ).finish(); + ok( !elems.eq( 1 ).queue().length, "empty queue for .finish()ed element" ); + ok( elems.eq( 0 ).queue().length, "non-empty queue for preceding element" ); + ok( elems.eq( 2 ).queue().length, "non-empty queue for following element" ); + elems.stop( true ); + start(); + }, 100 ); +}); + asyncTest( "slideDown() after stop() (#13483)", 2, function() { var ul = jQuery( "<ul style='height: 100px;display: block'></ul>" ), origHeight = ul.height();
Fix #<I>: Correctly scope .finish() following multi-element .animate(). Thanks @gnarf<I>. Close gh-<I>. (cherry picked from commit ae9e<I>e9f3cb<I>b<I>acb<I>e<I>e)
jquery_jquery
train
8706050aa4a64b0a3903a7f1976aabc345b7bd5c
diff --git a/openquake/calculators/classical.py b/openquake/calculators/classical.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/classical.py +++ b/openquake/calculators/classical.py @@ -279,10 +279,10 @@ class ClassicalCalculator(base.HazardCalculator): if dic is None: raise MemoryError('You ran out of memory!') - ctimes = dic['source_data'] - self.source_data += ctimes + sdata = dic['source_data'] + self.source_data += sdata grp_id = dic.pop('grp_id') - self.rel_ruptures[grp_id] += sum(ctimes['nrups']) + self.rel_ruptures[grp_id] += sum(sdata['nrupts']) # store rup_data if there are few sites if self.few_sites and len(dic['rup_data']['src_id']):
Fixed wrong warning about discardable TRTs
gem_oq-engine
train
237d0ef10a5b726196ede1df38f2584aca0f40aa
diff --git a/lib/edoors/spin.rb b/lib/edoors/spin.rb index <HASH>..<HASH> 100644 --- a/lib/edoors/spin.rb +++ b/lib/edoors/spin.rb @@ -81,6 +81,7 @@ module Edoors # def clear! @iotas.clear + @world.clear @pool.clear @sys_fifo.clear @app_fifo.clear
Spin#clear! must call @world.clear"
jeremyz_edoors-ruby
train
64e028a6cca451420d9f063c337d0d915299e16a
diff --git a/nupic/encoders/multi.py b/nupic/encoders/multi.py index <HASH>..<HASH> 100644 --- a/nupic/encoders/multi.py +++ b/nupic/encoders/multi.py @@ -19,6 +19,8 @@ # http://numenta.org/licenses/ # ---------------------------------------------------------------------- +import capnp + from nupic.encoders.base import Encoder from nupic.encoders.scalar import ScalarEncoder from nupic.encoders.adaptivescalar import AdaptiveScalarEncoder @@ -38,7 +40,6 @@ from nupic.encoders.random_distributed_scalar import RandomDistributedScalarEnco from nupic.encoders.base import Encoder from nupic.encoders.scalar_capnp import ScalarEncoderProto -# multiencoder must be imported last because it imports * from this module! diff --git a/nupic/encoders/random_distributed_scalar.py b/nupic/encoders/random_distributed_scalar.py index <HASH>..<HASH> 100644 --- a/nupic/encoders/random_distributed_scalar.py +++ b/nupic/encoders/random_distributed_scalar.py @@ -24,6 +24,7 @@ import numbers import pprint import sys +import capnp import numpy from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA diff --git a/tests/unit/nupic/algorithms/cla_classifier_test.py b/tests/unit/nupic/algorithms/cla_classifier_test.py index <HASH>..<HASH> 100755 --- a/tests/unit/nupic/algorithms/cla_classifier_test.py +++ b/tests/unit/nupic/algorithms/cla_classifier_test.py @@ -28,9 +28,9 @@ import cPickle as pickle import types import unittest2 as unittest +import capnp import numpy import tempfile -import capnp from nupic.algorithms.CLAClassifier import CLAClassifier from nupic.bindings.proto import ClaClassifier_capnp diff --git a/tests/unit/nupic/encoders/category_test.py b/tests/unit/nupic/encoders/category_test.py index <HASH>..<HASH> 100755 --- a/tests/unit/nupic/encoders/category_test.py +++ b/tests/unit/nupic/encoders/category_test.py @@ -23,12 +23,13 @@ """Unit tests for category encoder""" import tempfile +import unittest -from nupic.encoders.base import defaultDtype -from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA +import capnp import numpy -import unittest2 as unittest +from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA +from nupic.encoders.base import defaultDtype from nupic.encoders.category import CategoryEncoder, UNKNOWN from nupic.encoders.category_capnp import CategoryEncoderProto diff --git a/tests/unit/nupic/encoders/coordinate_test.py b/tests/unit/nupic/encoders/coordinate_test.py index <HASH>..<HASH> 100755 --- a/tests/unit/nupic/encoders/coordinate_test.py +++ b/tests/unit/nupic/encoders/coordinate_test.py @@ -20,6 +20,7 @@ # http://numenta.org/licenses/ # ---------------------------------------------------------------------- +import capnp # For import hook import numpy as np import tempfile import unittest
Clean up capnp imports.
numenta_nupic
train
3cf13c2e52b425e3c449bed32038dbcffeac907a
diff --git a/Query/Builder.php b/Query/Builder.php index <HASH>..<HASH> 100755 --- a/Query/Builder.php +++ b/Query/Builder.php @@ -1586,7 +1586,7 @@ class Builder { */ public function count($column = '*') { - return (int)$this->aggregate(__FUNCTION__, array($column)); + return (int) $this->aggregate(__FUNCTION__, array($column)); } /**
Add space between type-casting operator and return value.
illuminate_database
train
4a596ba9c77bedee193a8b789bd51429be38b8c8
diff --git a/docs/rules/no-static-typos.md b/docs/rules/no-static-typos.md index <HASH>..<HASH> 100644 --- a/docs/rules/no-static-typos.md +++ b/docs/rules/no-static-typos.md @@ -10,6 +10,7 @@ no casing typos: * propTypes * contextTypes +* childContextTypes * defaultProps The following patterns are considered warnings: @@ -32,6 +33,14 @@ class MyComponent extends React.Component { } class MyComponent extends React.Component { + static ChildContextTypes = {} +} + +class MyComponent extends React.Component { + static childcontexttypes = {} +} + +class MyComponent extends React.Component { static DefaultProps = {} } @@ -52,6 +61,10 @@ class MyComponent extends React.Component { } class MyComponent extends React.Component { + static childContextTypes = {} +} + +class MyComponent extends React.Component { static defaultProps = {} } ``` diff --git a/lib/rules/no-static-typos.js b/lib/rules/no-static-typos.js index <HASH>..<HASH> 100644 --- a/lib/rules/no-static-typos.js +++ b/lib/rules/no-static-typos.js @@ -9,7 +9,7 @@ var Components = require('../util/Components'); // Rule Definition // ------------------------------------------------------------------------------ -var STATIC_CLASS_PROPERTIES = ['propTypes', 'contextTypes', 'defaultProps']; +var STATIC_CLASS_PROPERTIES = ['propTypes', 'contextTypes', 'childContextTypes', 'defaultProps']; module.exports = { meta: { diff --git a/tests/lib/rules/no-static-typos.js b/tests/lib/rules/no-static-typos.js index <HASH>..<HASH> 100644 --- a/tests/lib/rules/no-static-typos.js +++ b/tests/lib/rules/no-static-typos.js @@ -30,6 +30,7 @@ ruleTester.run('no-static-typos', rule, { 'class First {', ' static PropTypes = {key: "myValue"};', ' static ContextTypes = {key: "myValue"};', + ' static ChildContextTypes = {key: "myValue"};', ' static DefaultProps = {key: "myValue"};', '}' ].join('\n'), @@ -40,6 +41,7 @@ ruleTester.run('no-static-typos', rule, { 'class First extends React.Component {', ' static propTypes = {key: "myValue"};', ' static contextTypes = {key: "myValue"};', + ' static childContextTypes = {key: "myValue"};', ' static defaultProps = {key: "myValue"};', '}' ].join('\n'), @@ -50,6 +52,7 @@ ruleTester.run('no-static-typos', rule, { 'class MyClass {', ' propTypes = {key: "myValue"};', ' contextTypes = {key: "myValue"};', + ' childContextTypes = {key: "myValue"};', ' defaultProps = {key: "myValue"};', '}' ].join('\n'), @@ -60,6 +63,7 @@ ruleTester.run('no-static-typos', rule, { 'class MyClass {', ' PropTypes = {key: "myValue"};', ' ContextTypes = {key: "myValue"};', + ' ChildContextTypes = {key: "myValue"};', ' DefaultProps = {key: "myValue"};', '}' ].join('\n'), @@ -70,6 +74,7 @@ ruleTester.run('no-static-typos', rule, { 'class MyClass {', ' proptypes = {key: "myValue"};', ' contexttypes = {key: "myValue"};', + ' childcontextypes = {key: "myValue"};', ' defaultprops = {key: "myValue"};', '}' ].join('\n'), @@ -80,6 +85,7 @@ ruleTester.run('no-static-typos', rule, { 'class MyClass {', ' static PropTypes() {};', ' static ContextTypes() {};', + ' static ChildContextTypes() {};', ' static DefaultProps() {};', '}' ].join('\n'), @@ -90,6 +96,7 @@ ruleTester.run('no-static-typos', rule, { 'class MyClass {', ' static proptypes() {};', ' static contexttypes() {};', + ' static childcontexttypes() {};', ' static defaultprops() {};', '}' ].join('\n'), @@ -136,6 +143,24 @@ ruleTester.run('no-static-typos', rule, { }, { code: [ 'class Component extends React.Component {', + ' static ChildContextTypes = {};', + '}' + ].join('\n'), + parser: 'babel-eslint', + parserOptions: parserOptions, + errors: [{message: ERROR_MESSAGE}] + }, { + code: [ + 'class Component extends React.Component {', + ' static childcontexttypes = {};', + '}' + ].join('\n'), + parser: 'babel-eslint', + parserOptions: parserOptions, + errors: [{message: ERROR_MESSAGE}] + }, { + code: [ + 'class Component extends React.Component {', ' static DefaultProps = {};', '}' ].join('\n'),
Add support for childContextTypes
ytanruengsri_eslint-plugin-react-ssr
train
fdbc0075d12924d665028d01a2b4033896593e14
diff --git a/src/Model/CurrencyPair.php b/src/Model/CurrencyPair.php index <HASH>..<HASH> 100644 --- a/src/Model/CurrencyPair.php +++ b/src/Model/CurrencyPair.php @@ -94,6 +94,16 @@ final class CurrencyPair } /** + * Returns the hashed representation of the pair. + * + * @return string + */ + public function toHash() + { + return md5($this->toString()); + } + + /** * Returns a string representation of the pair. * * @return string diff --git a/src/Swap.php b/src/Swap.php index <HASH>..<HASH> 100644 --- a/src/Swap.php +++ b/src/Swap.php @@ -51,7 +51,7 @@ class Swap implements SwapInterface } if (null !== $this->cacheItemPool) { - $item = $this->cacheItemPool->getItem($currencyPair->toString()); + $item = $this->cacheItemPool->getItem($currencyPair->toHash()); if ($item->isHit()) { return $item->get(); diff --git a/tests/Tests/SwapTest.php b/tests/Tests/SwapTest.php index <HASH>..<HASH> 100644 --- a/tests/Tests/SwapTest.php +++ b/tests/Tests/SwapTest.php @@ -91,7 +91,7 @@ class SwapTest extends \PHPUnit_Framework_TestCase $pool ->expects($this->once()) ->method('getItem') - ->with($pair->toString()) + ->with($pair->toHash()) ->will($this->returnValue($item)); $swap = new Swap($provider, $pool); @@ -125,7 +125,7 @@ class SwapTest extends \PHPUnit_Framework_TestCase $pool ->expects($this->once()) ->method('getItem') - ->with($pair->toString()) + ->with($pair->toHash()) ->will($this->returnValue($item)); $swap = new Swap($provider, $pool); @@ -170,7 +170,7 @@ class SwapTest extends \PHPUnit_Framework_TestCase $pool ->expects($this->once()) ->method('getItem') - ->with($pair->toString()) + ->with($pair->toHash()) ->will($this->returnValue($item)); $pool
Fixed unsupported / in cache keys
florianv_exchanger
train
bcd156b4632503aa7f7c9ad6a69aef4a2f5c0c53
diff --git a/rapidoid-u/src/main/java/org/rapidoid/util/U.java b/rapidoid-u/src/main/java/org/rapidoid/util/U.java index <HASH>..<HASH> 100644 --- a/rapidoid-u/src/main/java/org/rapidoid/util/U.java +++ b/rapidoid-u/src/main/java/org/rapidoid/util/U.java @@ -819,6 +819,37 @@ public class U { }); } + public static <K1, K2, V> Map<K1, Map<K2, V>> mapOfMaps() { + return autoExpandingMap(new Mapper<K1, Map<K2, V>>() { + + @Override + public Map<K2, V> map(K1 src) throws Exception { + return synchronizedMap(); + } + + }); + } + + public static <K, V> Map<K, List<V>> mapOfLists() { + return autoExpandingMap(new Mapper<K, List<V>>() { + + @Override + public List<V> map(K src) throws Exception { + return Collections.synchronizedList(U.<V> list()); + } + + }); + } + + public static <K, V> Map<K, Set<V>> mapOfSets() { + return autoExpandingMap(new Mapper<K, Set<V>>() { + + @Override + public Set<V> map(K src) throws Exception { + return Collections.synchronizedSet(U.<V> set()); + } + + }); } }
Added more auto-expanding map factory utils.
rapidoid_rapidoid
train
af7aa4f1160d27055bb56d5b7778c2f2eeb054c9
diff --git a/packages/ringcentral-integration/modules/Analytics/index.js b/packages/ringcentral-integration/modules/Analytics/index.js index <HASH>..<HASH> 100644 --- a/packages/ringcentral-integration/modules/Analytics/index.js +++ b/packages/ringcentral-integration/modules/Analytics/index.js @@ -8,6 +8,40 @@ import getAnalyticsReducer from './getAnalyticsReducer'; import { Segment } from '../../lib/Analytics'; import callingModes from '../CallingSettings/callingModes'; +const INIT_TRACK_LIST = [ + '_authentication', + '_logout', + '_callAttempt', + '_callConnected', + '_webRTCRegistration', + '_smsAttempt', + '_smsSent', + '_logCall', + '_logSMS', + '_clickToDial', + '_clickToSMS', + '_viewEntity', + '_createEntity', + '_editCallLog', + '_editSMSLog', + '_navigate', + '_inboundCall', + '_coldTransfer', + '_textClickToDial', + '_voicemailClickToDial', + '_voicemailClickToSMS', + '_voicemailDelete', + '_voicemailFlag', + '_contactDetailClickToDial', + '_contactDetailClickToSMS', + '_callHistoryClickToDial', + '_callHistoryClickToSMS', + '_conferenceInviteWithText', + '_conferenceAddDialInNumber', + '_conferenceJoinAsHost', + '_showWhatsNew', +]; + /** * @class * @description Analytics module. @@ -76,6 +110,7 @@ export default class Analytics extends RcModule { // init this._reducer = getAnalyticsReducer(this.actionTypes); this._segment = Segment(); + this._trackList = INIT_TRACK_LIST; } initialize() { @@ -123,39 +158,7 @@ export default class Analytics extends RcModule { if (this.lastActions.length) { await sleep(300); this.lastActions.forEach((action) => { - [ - '_authentication', - '_logout', - '_callAttempt', - '_callConnected', - '_webRTCRegistration', - '_smsAttempt', - '_smsSent', - '_logCall', - '_logSMS', - '_clickToDial', - '_clickToSMS', - '_viewEntity', - '_createEntity', - '_editCallLog', - '_editSMSLog', - '_navigate', - '_inboundCall', - '_coldTransfer', - '_textClickToDial', - '_voicemailClickToDial', - '_voicemailClickToSMS', - '_voicemailDelete', - '_voicemailFlag', - '_contactDetailClickToDial', - '_contactDetailClickToSMS', - '_callHistoryClickToDial', - '_callHistoryClickToSMS', - '_conferenceInviteWithText', - '_conferenceAddDialInNumber', - '_conferenceJoinAsHost', - '_showWhatsNew', - ].forEach((key) => { + this._trackList.forEach((key) => { this[key](action); }); }); @@ -166,6 +169,15 @@ export default class Analytics extends RcModule { } } + /** + * Append more action to track + * First, Inherit this class and declare channel specific method on it + * Then append more method name to track using this method + * @param {string[]} methodNames + */ + appendTrackList(methodNames) { + this._trackList.push(...methodNames); + } _authentication(action) { if (this._auth && this._auth.actionTypes.loginSuccess === action.type) { @@ -415,6 +427,9 @@ export default class Analytics extends RcModule { eventPostfix: 'Call History', router: '/history', }, { + eventPostfix: 'Call List', + router: '/calls', + }, { eventPostfix: 'Settings', router: '/settings', }, {
Make tracking list can append more method from the child class (#<I>) 1. Make tracking list can append more method from the child class. 2. Add routes for tracking - /calls.
ringcentral_ringcentral-js-widgets
train
adeee781a3c4e5e2c465acbed69ab648a801003c
diff --git a/runtime/core/src/main/java/io/datawire/quark/runtime/BufferImpl.java b/runtime/core/src/main/java/io/datawire/quark/runtime/BufferImpl.java index <HASH>..<HASH> 100644 --- a/runtime/core/src/main/java/io/datawire/quark/runtime/BufferImpl.java +++ b/runtime/core/src/main/java/io/datawire/quark/runtime/BufferImpl.java @@ -17,6 +17,7 @@ public class BufferImpl implements Buffer { public BufferImpl(ByteBuf b) { this.b = b; + b.writerIndex(b.capacity()); b.retain(); }
Quark Buffer has readableSize===capacity, enforce it
datawire_quark
train
92a074bc18a838aeb0ada8555fbf749f7b150c82
diff --git a/lib/compat/multi_json.rb b/lib/compat/multi_json.rb index <HASH>..<HASH> 100644 --- a/lib/compat/multi_json.rb +++ b/lib/compat/multi_json.rb @@ -1,7 +1,7 @@ gem 'multi_json', '>= 1.0.0' require 'multi_json' -if !MultiJson.respond_to?(:load) || MultiJson.method(:load).owner == Kernel +if !MultiJson.respond_to?(:load) || [Kernel, ActiveSupport::Dependencies::Loadable].include?(MultiJson.method(:load).owner) module MultiJson class <<self alias :load :decode
Compatible multi_json >= <I> with Rails<I>
googleapis_google-api-ruby-client
train
7454333bf1b6e588185cefce069ccc4b051a4f0f
diff --git a/upup/pkg/fi/cloudup/containerd.go b/upup/pkg/fi/cloudup/containerd.go index <HASH>..<HASH> 100644 --- a/upup/pkg/fi/cloudup/containerd.go +++ b/upup/pkg/fi/cloudup/containerd.go @@ -228,6 +228,7 @@ func findAllContainerdDockerMappings() map[string]string { "1.4.6": "20.10.7", "1.4.9": "20.10.8", "1.4.11": "20.10.9", + "1.4.12": "20.10.11", } return versions diff --git a/upup/pkg/fi/cloudup/docker.go b/upup/pkg/fi/cloudup/docker.go index <HASH>..<HASH> 100644 --- a/upup/pkg/fi/cloudup/docker.go +++ b/upup/pkg/fi/cloudup/docker.go @@ -202,6 +202,7 @@ func findAllDockerHashesAmd64() map[string]string { "20.10.8": "7ea11ecb100fdc085dbfd9ab1ff380e7f99733c890ed815510a5952e5d6dd7e0", "20.10.9": "caf74e54b58c0b38bb4d96c8f87665f29b684371c9a325562a3904b8c389995e", "20.10.10": "1719446f99cd56e87d0c67019996af4ea859f11891bfd89de2252d6c916ccaaa", + "20.10.11": "dd6ff72df1edfd61ae55feaa4aadb88634161f0aa06dbaaf291d1be594099ff3", } return hashes @@ -256,6 +257,7 @@ func findAllDockerHashesArm64() map[string]string { "20.10.8": "4eb9d5e2adf718cd7ee59f6951715f3113c9c4ee49c75c9efb9747f2c3457b2b", "20.10.9": "0259f8b6572f02cf0dafd7388ca0e4adfdbbfaba81cfb1b7443e89fccbed22c7", "20.10.10": "8db47cdcd7ac6e082c9ce83347d8fb99eaa01e04b0c8d94851e8d58f350a3633", + "20.10.11": "87a4219c54552797ffd38790b72832372a90eceb7c8e451c36a682093d57dae6", } return hashes
Add hashes for Docker <I>
kubernetes_kops
train
2c4eb0064ba9b550a39fd89072343232dce08e62
diff --git a/lib/dml/oci8po_adodb_moodle_database.php b/lib/dml/oci8po_adodb_moodle_database.php index <HASH>..<HASH> 100644 --- a/lib/dml/oci8po_adodb_moodle_database.php +++ b/lib/dml/oci8po_adodb_moodle_database.php @@ -170,7 +170,7 @@ class oci8po_adodb_moodle_database extends adodb_moodle_database { } public function sql_bitxor($int1, $int2) { - return '((' . $int1 . ') # (' . $int2 . '))'; + return '(' . $this->sql_bitor($int1, $int2) . ' - ' . $this->sql_bitand($int1, $int2) . ')'; } /** diff --git a/lib/dml/oci_native_moodle_database.php b/lib/dml/oci_native_moodle_database.php index <HASH>..<HASH> 100644 --- a/lib/dml/oci_native_moodle_database.php +++ b/lib/dml/oci_native_moodle_database.php @@ -1118,13 +1118,11 @@ class oci_native_moodle_database extends moodle_database { } public function sql_bitor($int1, $int2) { - // TODO: this can not work because the number of params does not match return '((' . $int1 . ') + (' . $int2 . ') - ' . $this->sql_bitand($int1, $int2) . ')'; } public function sql_bitxor($int1, $int2) { - // TODO: this can not work because the number of params does not match - return '(' . sql_bitor($int1, $int2) . ' - ' . $this->sql_bitand($int1, $int2) . ')'; + return '(' . $this->sql_bitor($int1, $int2) . ' - ' . $this->sql_bitand($int1, $int2) . ')'; } /**
MDL-<I> oracle dml: towards full support
moodle_moodle
train
495c3afe7b326edcd088d96a6bd41c5f93d5e37d
diff --git a/lib/slingshot.rb b/lib/slingshot.rb index <HASH>..<HASH> 100644 --- a/lib/slingshot.rb +++ b/lib/slingshot.rb @@ -1,6 +1,8 @@ require 'rest_client' require 'yajl/json_gem' +require 'slingshot/rubyext/hash' + module Slingshot autoload :Configuration, File.dirname(__FILE__) + '/slingshot/configuration' diff --git a/lib/slingshot/index.rb b/lib/slingshot/index.rb index <HASH>..<HASH> 100644 --- a/lib/slingshot/index.rb +++ b/lib/slingshot/index.rb @@ -19,6 +19,21 @@ module Slingshot false end + def store(*args) + if args.size > 1 + (type, document = args) + else + (document = args.pop; type = :document) + end + document = case true + when document.is_a?(String) then document + when document.respond_to?(:to_indexed_json) then document.to_indexed_json + else raise ArgumentError, "Please pass a JSON string or object with a 'to_indexed_json' method" + end + result = Configuration.client.post "#{Configuration.url}/#{@name}/#{type}/", document + JSON.parse(result) + end + def refresh Configuration.client.post "#{Configuration.url}/#{@name}/_refresh", '' end diff --git a/test/unit/index_test.rb b/test/unit/index_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/index_test.rb +++ b/test/unit/index_test.rb @@ -37,6 +37,33 @@ module Slingshot assert_nothing_raised { assert @index.refresh } end + context "when storing" do + + should "properly set type from args" do + Configuration.client.expects(:post).with("#{Configuration.url}/dummy/article/", '{"title":"Test"}').returns('{"ok":true,"_id":"test"}').twice + @index.store 'article', :title => 'Test' + @index.store :article, :title => 'Test' + end + + should "set default type" do + Configuration.client.expects(:post).with("#{Configuration.url}/dummy/document/", '{"title":"Test"}').returns('{"ok":true,"_id":"test"}') + @index.store :title => 'Test' + end + + should "call #to_indexed_json on non-String documents" do + document = { :title => 'Test' } + Configuration.client.expects(:post).returns('{"ok":true,"_id":"test"}') + document.expects(:to_indexed_json) + @index.store document + end + + should "raise error when storing neither String nor object with #to_indexed_json method" do + class MyDocument;end; document = MyDocument.new + assert_raise(ArgumentError) { @index.store document } + end + + end + end end
Added Index#store to save documents in index
karmi_retire
train
4438e704e520d1baff4e8ed78f8faf0f35fe7d10
diff --git a/salt/returners/mysql.py b/salt/returners/mysql.py index <HASH>..<HASH> 100644 --- a/salt/returners/mysql.py +++ b/salt/returners/mysql.py @@ -191,10 +191,16 @@ def returner(ret): (`fun`, `jid`, `return`, `id`, `success`, `full_ret` ) VALUES (%s, %s, %s, %s, %s, %s)''' - cur.execute(sql, (ret['fun'], ret['jid'], + success = 'None' + if 'success' in ret: + success = ret['success'] + fun = 'None' + if 'fun' in ret: + fun = ret['fun'] + cur.execute(sql, (fun, ret['jid'], json.dumps(ret['return']), ret['id'], - ret['success'], + success, json.dumps(ret))) except salt.exceptions.SaltMasterError: log.critical('Could not store return with MySQL returner. MySQL server unavailable.') @@ -222,11 +228,11 @@ def save_load(jid, load): ''' with _get_serv(commit=True) as cur: - sql = '''INSERT INTO `jids` - (`jid`, `load`) - VALUES (%s, %s)''' + sql = '''INSERT INTO `jids` (`jid`, `load`) + SELECT %s, %s FROM DUAL + WHERE NOT EXISTS (SELECT `jid` FROM `jids` where `jid`=%s)''' - cur.execute(sql, (jid, json.dumps(load))) + cur.execute(sql, (jid, json.dumps(load), jid)) def get_load(jid):
fix mysql returner for syndic master job cache - Change 'jids' table insert so that syndic returns will not fail (the first insert wins) - Set default values for 'success' and 'fun' properties as they are not present on syndic returns
saltstack_salt
train
8a0f69b9553a511878aaa6134738bc606b9b94d9
diff --git a/core/src/main/java/org/springframework/security/access/method/AbstractMethodSecurityMetadataSource.java b/core/src/main/java/org/springframework/security/access/method/AbstractMethodSecurityMetadataSource.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/springframework/security/access/method/AbstractMethodSecurityMetadataSource.java +++ b/core/src/main/java/org/springframework/security/access/method/AbstractMethodSecurityMetadataSource.java @@ -55,7 +55,14 @@ public abstract class AbstractMethodSecurityMetadataSource implements MethodSecu if (object instanceof JoinPoint) { JoinPoint jp = (JoinPoint) object; - Class<?> targetClass = jp.getTarget().getClass(); + Class<?> targetClass; + + if (jp.getTarget() != null) { + targetClass = jp.getTarget().getClass(); + } else { + // SEC-1295: target may be null if an ITD is in use + targetClass = jp.getSignature().getDeclaringType(); + } String targetMethodName = jp.getStaticPart().getSignature().getName(); Class<?>[] types = ((CodeSignature) jp.getStaticPart().getSignature()).getParameterTypes(); Class<?> declaringType = ((CodeSignature) jp.getStaticPart().getSignature()).getDeclaringType();
SEC-<I>: Placing Security on Roo Aspected method fails. Added suggested fix - check for null target and use Signature.declaredType instead.
spring-projects_spring-security
train
1d5c88fc1d70ec1c92c7a249ee5ccf7884e3dcc6
diff --git a/cms_lab_publications/models.py b/cms_lab_publications/models.py index <HASH>..<HASH> 100644 --- a/cms_lab_publications/models.py +++ b/cms_lab_publications/models.py @@ -195,7 +195,10 @@ class PublicationSet(models.Model): 'To add files and tags to publication records, create publications ' \ 'individually via the Publication Admin (or below).', ) - publications = models.ManyToManyField(Publication) + publications = models.ManyToManyField(Publication, + blank=True, + null=True, + ) searchable = models.BooleanField('searchable?', default=True,
Allow a Publication Set to be created without publications
mfcovington_djangocms-lab-publications
train
3cec95581f8f1c00dd577dd9a751c50c277bd833
diff --git a/src/Orm/EntityRepository.php b/src/Orm/EntityRepository.php index <HASH>..<HASH> 100644 --- a/src/Orm/EntityRepository.php +++ b/src/Orm/EntityRepository.php @@ -198,7 +198,7 @@ final class EntityRepository implements EntityRepositoryInterface } $filterDataDto = FilterDataDto::new($i, $filter, current($queryBuilder->getRootAliases()), $submittedData); - $filter->apply($queryBuilder, $filterDataDto, $fields->get($propertyName), $entityDto); + $filter->apply($queryBuilder, $filterDataDto, $fields->getByProperty($propertyName), $entityDto); ++$i; }
fix get field dto on filter clause the fieldDto is always null
EasyCorp_EasyAdminBundle
train
ba9c30568b95c82b9199b290c921ff0504217420
diff --git a/packages/neos-ui-editors/src/Image/index.js b/packages/neos-ui-editors/src/Image/index.js index <HASH>..<HASH> 100644 --- a/packages/neos-ui-editors/src/Image/index.js +++ b/packages/neos-ui-editors/src/Image/index.js @@ -25,6 +25,9 @@ export default class ImageEditor extends Component { }), PropTypes.string ]), + // "hooks" are the hooks specified by commit() + hooks: PropTypes.object, + commit: PropTypes.func.isRequired, renderSecondaryInspector: PropTypes.func.isRequired, @@ -138,8 +141,6 @@ export default class ImageEditor extends Component { }; const nextimage = $set(CROP_IMAGE_ADJUSTMENT, cropAdjustments, image); - this.setState({image: nextimage}); - commit(value, { 'Neos.UI:Hook.BeforeSave.CreateImageVariant': nextimage }); @@ -151,8 +152,6 @@ export default class ImageEditor extends Component { const nextimage = resizeAdjustment ? $set(RESIZE_IMAGE_ADJUSTMENT, resizeAdjustment, image) : $drop(RESIZE_IMAGE_ADJUSTMENT, image); - this.setState({image: nextimage}); - commit(value, { 'Neos.UI:Hook.BeforeSave.CreateImageVariant': nextimage }); @@ -226,14 +225,17 @@ export default class ImageEditor extends Component { const { renderSecondaryInspector, - options + options, + hooks } = this.props; + const usedImage = (this.props.hooks ? this.props.hooks['Neos.UI:Hook.BeforeSave.CreateImageVariant'] : this.state.image); + return ( <div className={style.imageEditor}> <PreviewScreen ref={this.setPreviewScreenRef} - image={image} + image={usedImage} isLoading={isAssetLoading} onDrop={this.handleFilesDrop} onClick={this.handleThumbnailClicked} @@ -247,7 +249,7 @@ export default class ImageEditor extends Component { onChooseFromLocalFileSystem={this.handleChooseFile} onRemove={this.handleRemoveFile} onCrop={this.isFeatureEnabled('crop') && (() => renderSecondaryInspector('IMAGE_CROP', () => <Secondary.ImageCropper - sourceImage={Image.fromImageData(image)} + sourceImage={Image.fromImageData(usedImage)} options={options} onComplete={this.handleMediaCrop} />))} diff --git a/packages/neos-ui/src/Containers/RightSideBar/Inspector/EditorEnvelope/index.js b/packages/neos-ui/src/Containers/RightSideBar/Inspector/EditorEnvelope/index.js index <HASH>..<HASH> 100644 --- a/packages/neos-ui/src/Containers/RightSideBar/Inspector/EditorEnvelope/index.js +++ b/packages/neos-ui/src/Containers/RightSideBar/Inspector/EditorEnvelope/index.js @@ -63,6 +63,7 @@ export default class EditorEnvelope extends Component { label, node, value: transientValue ? transientValue.value : sourceValue, + hooks: transientValue ? transientValue.hooks : null, propertyName: id, options };
BUGFIX: properly reset Image Editor on Inspector Cancel Fixes: #<I>
neos_neos-ui
train
f15b8e43e637b8f16cac0ea7f4658ba533afded2
diff --git a/python/src/nnabla/utils/converter/nnabla/exporter.py b/python/src/nnabla/utils/converter/nnabla/exporter.py index <HASH>..<HASH> 100644 --- a/python/src/nnabla/utils/converter/nnabla/exporter.py +++ b/python/src/nnabla/utils/converter/nnabla/exporter.py @@ -18,6 +18,7 @@ import os import shutil import tempfile import zipfile +import numpy as np # TODO temporary work around to suppress FutureWarning message. import warnings @@ -45,8 +46,10 @@ class NnpExporter: import h5py with h5py.File(filename, 'w') as hd: for i, param in enumerate(nnp.parameter): + data = np.reshape(list(param.data), + tuple(list(param.shape.dim))) dset = hd.create_dataset( - param.variable_name, dtype='f4', data=list(param.data)) + param.variable_name, dtype='f4', data=data) dset.attrs['need_grad'] = param.need_grad dset.attrs['index'] = i
fix nnp to nnp problem with h5
sony_nnabla
train
0d0b24013730773351a09344350f6294bb5cbd36
diff --git a/Model/Module.php b/Model/Module.php index <HASH>..<HASH> 100644 --- a/Model/Module.php +++ b/Model/Module.php @@ -25,7 +25,7 @@ abstract class Module implements ModuleInterface /** * @var string */ - protected $type; + protected $modularType; /** * Get id @@ -40,9 +40,17 @@ abstract class Module implements ModuleInterface /** * {@inheritdoc} */ - public function setType($type) + public function getModularSegment() { - $this->type = $type; + return $this->getId(); + } + + /** + * {@inheritdoc} + */ + public function setModularType($type) + { + $this->modularType = $type; return $this; } @@ -50,8 +58,8 @@ abstract class Module implements ModuleInterface /** * {@inheritdoc} */ - public function getType() + public function getModularType() { - return $this->type; + return $this->modularType; } } diff --git a/Model/ModuleInterface.php b/Model/ModuleInterface.php index <HASH>..<HASH> 100644 --- a/Model/ModuleInterface.php +++ b/Model/ModuleInterface.php @@ -18,16 +18,23 @@ namespace Harmony\Component\ModularRouting\Model; interface ModuleInterface { /** - * Returns the type of related metadata + * Returns the segment to identify the module * * @return string */ - public function getType(); + public function getModularSegment(); /** - * Set the type of related metadata + * Sets the type of the module * * @param string $type */ - public function setType($type); + public function setModularType($type); + + /** + * Returns the type of the module + * + * @return string + */ + public function getModularType(); }
Improved the Module interface with some refactoring and the inclusion of getModularSegment
harmony-project_modular-routing
train
d9f64b18849821789b24e54a52a5b9a42525572d
diff --git a/app/preparation/mzidtsv/proteingrouping.py b/app/preparation/mzidtsv/proteingrouping.py index <HASH>..<HASH> 100644 --- a/app/preparation/mzidtsv/proteingrouping.py +++ b/app/preparation/mzidtsv/proteingrouping.py @@ -116,14 +116,6 @@ def get_masters(ppgraph): return masters -def count_protein_group_hits(proteins, pgcontents): - proteins = set(proteins) - hit_counts = [] - for pgroup in pgcontents: - hit_counts.append(str(len(proteins.intersection(pgroup)))) - return ';'.join(hit_counts) - - def group_proteins(proteins, pgdb): """Generates protein groups per PSM. First calls get_slave_proteins to determine which of a graph is the master protein. Then calls a sorting @@ -146,17 +138,17 @@ def group_proteins(proteins, pgdb): return protein_groups -def sort_proteingroup(sortfunctions, sortfunc_index, pgroup, ppgraph): +def sort_protein_group(pgroup, sortfunctions, sortfunc_index): """Recursive function that sorts protein group by a number of sorting functions.""" pgroup_out = [] - subgroups = sortfunctions[sortfunc_index](pgroup, ppgraph) + subgroups = sortfunctions[sortfunc_index](pgroup) sortfunc_index += 1 for subgroup in subgroups: if len(subgroup) > 1 and sortfunc_index < len(sortfunctions): - pgroup_out.extend(sort_proteingroup(sortfunctions, - sortfunc_index, - subgroup, ppgraph)) + pgroup_out.extend(sort_protein_group(subgroup, + sortfunctions, + sortfunc_index)) else: pgroup_out.extend(subgroup) return pgroup_out @@ -166,63 +158,33 @@ def get_all_proteins_from_unrolled_psm(psm, pgdb): return pgdb.get_proteins_for_peptide([psm_id]) -def sort_pgroup_peptides(proteins, ppgraph): - return sort_evaluator(sort_amounts(proteins, ppgraph)) +def sort_pgroup_peptides(proteins): + return sort_amounts(proteins, 3) -def sort_pgroup_psms(proteins, ppgraph): - return sort_evaluator(sort_amounts(proteins, ppgraph, innerlookup=True)) +def sort_pgroup_psms(proteins): + return sort_amounts(proteins, 4) -def sort_amounts(proteins, ppgraph, innerlookup=False): +def sort_amounts(proteins, sort_index): """Generic function for sorting peptides and psms""" amounts = {} for protein in proteins: - if not innerlookup: - amount_x_for_protein = len(ppgraph[protein]) - else: - amount_x_for_protein = len([x for y in ppgraph[protein].values() - for x in y]) + amount_x_for_protein = protein[sort_index] try: amounts[amount_x_for_protein].append(protein) except KeyError: amounts[amount_x_for_protein] = [protein] - return amounts + return [v for k, v in sorted(amounts.items(), reverse=True)] -def sort_pgroup_score(proteins, ppgraph): - scores = {} - for protein in proteins: - protein_score = sum([int(x[1]) for y in ppgraph[protein].values() - for x in y]) - try: - scores[protein_score].append(protein) - except KeyError: - scores[protein_score] = [protein] - return sort_evaluator(scores) +def sort_pgroup_score(proteins): + return sort_amounts(proteins, 5) -def sort_evaluator(sort_dict): - return [v for k, v in sorted(sort_dict.items(), reverse=True)] - - -def sort_pgroup_coverage(proteins, ppgraph): +def sort_pgroup_coverage(proteins): return [proteins] -def sort_alphabet(proteins, ppgraph): - return [sorted(proteins)] -# FIXME sequence coverage, we need database for that - - -def get_slave_proteins(protein, graph): - # FIXME ties? other problems? - slave_proteins = [] - for subprotein, peps in graph.items(): - if subprotein == protein: - continue - elif set(graph[protein]).issubset(peps): - return False - elif set(peps).issubset(graph[protein]): - slave_proteins.append(subprotein) - return slave_proteins +def sort_alphabet(proteins): + return [sorted(proteins, key=lambda x: x[2])]
Changed sort functions to sort info from db records by key
glormph_msstitch
train
83ed900fce18a01f8eb24c45dd69b21b50085407
diff --git a/Factory/MediaFactory.php b/Factory/MediaFactory.php index <HASH>..<HASH> 100755 --- a/Factory/MediaFactory.php +++ b/Factory/MediaFactory.php @@ -12,7 +12,7 @@ namespace WellCommerce\Bundle\MediaBundle\Factory; -use WellCommerce\Bundle\CoreBundle\Factory\AbstractFactory; +use WellCommerce\Bundle\DoctrineBundle\Factory\AbstractEntityFactory; use WellCommerce\Bundle\MediaBundle\Entity\MediaInterface; /** @@ -20,7 +20,7 @@ use WellCommerce\Bundle\MediaBundle\Entity\MediaInterface; * * @author Adam Piotrowski <[email protected]> */ -class MediaFactory extends AbstractFactory +class MediaFactory extends AbstractEntityFactory { /** * @var string
MOved factories to doctrine bundle >> EntityFactoryInterface
WellCommerce_CouponBundle
train
eabe1140b2e59947d143757234db8be6495cb476
diff --git a/strategyFunctions/swarmServices.js b/strategyFunctions/swarmServices.js index <HASH>..<HASH> 100644 --- a/strategyFunctions/swarmServices.js +++ b/strategyFunctions/swarmServices.js @@ -721,9 +721,12 @@ var engine = { createSecret (options, cb) { lib.getDeployer(options, (error, deployer) => { utils.checkError(error, 540, cb, () => { + + let data = Buffer.from(options.params.data).toString('base64'); + let secret = { Name: options.params.name, - Data: options.params.data + Data: data }; deployer.createSecret(secret, (error, response) => {
data in docker create secret converted to base<I> before creating secret
soajs_soajs.core.drivers
train
1fd8d16ef25bf8b725415eca7b39a24c876d4539
diff --git a/Tests/TestBundle/Checker/CustomChecker.php b/Tests/TestBundle/Checker/CustomChecker.php index <HASH>..<HASH> 100644 --- a/Tests/TestBundle/Checker/CustomChecker.php +++ b/Tests/TestBundle/Checker/CustomChecker.php @@ -18,7 +18,7 @@ use Jose\Component\Checker\HeaderChecker; class CustomChecker implements ClaimChecker, HeaderChecker { - public function checkClaim($value) + public function checkClaim($value): void { if (true === $value) { throw new \InvalidArgumentException('Custom checker!'); @@ -30,7 +30,7 @@ class CustomChecker implements ClaimChecker, HeaderChecker return 'custom'; } - public function checkHeader($value) + public function checkHeader($value): void { if (true === $value) { throw new \InvalidArgumentException('Custom checker!');
Checkers (#<I>) Checkers reviewed
web-token_jwt-bundle
train
0cfd5e2dd77f27a0715b07cde4649989a6c4b769
diff --git a/marathon_acme/tests/test_service.py b/marathon_acme/tests/test_service.py index <HASH>..<HASH> 100644 --- a/marathon_acme/tests/test_service.py +++ b/marathon_acme/tests/test_service.py @@ -320,8 +320,7 @@ class TestMarathonAcme(object): 'portDefinitions': [ {'port': 9000, 'protocol': 'tcp', 'labels': {}}, {'port': 9001, 'protocol': 'tcp', 'labels': {}} - ], - 'ports': [10007] + ] }) d = self.marathon_acme.sync() @@ -364,6 +363,40 @@ class TestMarathonAcme(object): assert_that(self.fake_marathon_lb.check_signalled_usr1(), Equals(True)) + def test_sync_app_port_definitions_preferred(self): + """ + When a sync is run and this Marathon returns both the + 'portDefinitions' field and 'ports' field with the app definitions, + the 'portsDefinition' field should be used and 'ports' ignored. + """ + # Store an app in Marathon with a marathon-acme domain + self.fake_marathon.add_app({ + 'id': '/my-app_1', + 'labels': { + 'HAPROXY_GROUP': 'external', + 'MARATHON_ACME_0_DOMAIN': 'example.com' + }, + 'portDefinitions': [ + {'port': 9000, 'protocol': 'tcp', 'labels': {}}, + {'port': 9001, 'protocol': 'tcp', 'labels': {}} + ], + # There should never be a different number of elements in + # 'portDefinitions' and 'ports' but 'ports' is deprecated and we + # want to make sure it is ignored if 'portDefinitions' is present + 'ports': [] + }) + + d = self.marathon_acme.sync() + assert_that(d, succeeded(MatchesListwise([ # Per domain + is_marathon_lb_sigusr_response + ]))) + + assert_that(self.cert_store.as_dict(), succeeded(MatchesDict({ + 'example.com': Not(Is(None)) + }))) + + assert_that(self.fake_marathon_lb.check_signalled_usr1(), Equals(True)) + def test_sync_multiple_apps(self): """ When a sync is run and there are multiple apps, certificates are
Add explicit test for app definitions with both portDefinitions and ports
praekeltfoundation_marathon-acme
train
fa23309bb02473491c7be5db9e8220b786df614a
diff --git a/lxd/patches.go b/lxd/patches.go index <HASH>..<HASH> 100644 --- a/lxd/patches.go +++ b/lxd/patches.go @@ -40,6 +40,7 @@ var patches = []patch{ {name: "storage_api_keys", run: patchStorageApiKeys}, {name: "storage_api_update_storage_configs", run: patchStorageApiUpdateStorageConfigs}, {name: "storage_api_lxd_on_btrfs", run: patchStorageApiLxdOnBtrfs}, + {name: "storage_api_lvm_detect_lv_size", run: patchStorageApiDetectLVSize}, } type patch struct { @@ -2130,3 +2131,94 @@ func patchStorageApiLxdOnBtrfs(name string, d *Daemon) error { return nil } + +func patchStorageApiDetectLVSize(name string, d *Daemon) error { + pools, err := dbStoragePools(d.db) + if err != nil { + if err == NoSuchObjectError { + return nil + } + shared.LogErrorf("Failed to query database: %s", err) + return err + } + + for _, poolName := range pools { + poolID, pool, err := dbStoragePoolGet(d.db, poolName) + if err != nil { + shared.LogErrorf("Failed to query database: %s", err) + return err + } + + // Make sure that config is not empty. + if pool.Config == nil { + pool.Config = map[string]string{} + + // Insert default values. + err = storagePoolFillDefault(poolName, pool.Driver, pool.Config) + if err != nil { + return err + } + } + + // We're only interested in LVM pools. + if pool.Driver != "lvm" { + continue + } + + // Get all storage volumes on the storage pool. + volumes, err := dbStoragePoolVolumesGet(d.db, poolID, supportedVolumeTypes) + if err != nil { + if err == NoSuchObjectError { + continue + } + return err + } + + poolName := pool.Config["lvm.vg_name"] + if poolName == "" { + shared.LogErrorf("The \"lvm.vg_name\" key should not be empty.") + return fmt.Errorf("The \"lvm.vg_name\" key should not be empty.") + } + + for _, volume := range volumes { + // Make sure that config is not empty. + if volume.Config == nil { + volume.Config = map[string]string{} + + // Insert default values. + err := storageVolumeFillDefault(volume.Name, volume.Config, pool) + if err != nil { + return err + } + } + + // It shouldn't be possible that false volume types + // exist in the db, so it's safe to ignore the error. + volumeTypeApiEndpoint, _ := storagePoolVolumeTypeNameToApiEndpoint(volume.Type) + lvmName := containerNameToLVName(volume.Name) + lvmLvDevPath := getLvmDevPath(poolName, volumeTypeApiEndpoint, lvmName) + size, err := lvmGetLVSize(lvmLvDevPath) + if err != nil { + shared.LogErrorf("Failed to detect size of logical volume: %s.", err) + return err + } + + if volume.Config["size"] == size { + continue + } + + volume.Config["size"] = size + + // It shouldn't be possible that false volume types + // exist in the db, so it's safe to ignore the error. + volumeType, _ := storagePoolVolumeTypeNameToType(volume.Type) + // Update the volume config. + err = dbStoragePoolVolumeUpdate(d.db, volume.Name, volumeType, poolID, volume.Config) + if err != nil { + return err + } + } + } + + return nil +} diff --git a/lxd/storage_lvm.go b/lxd/storage_lvm.go index <HASH>..<HASH> 100644 --- a/lxd/storage_lvm.go +++ b/lxd/storage_lvm.go @@ -88,6 +88,24 @@ func storageLVExists(lvName string) (bool, error) { return true, nil } +func lvmGetLVSize(lvPath string) (string, error) { + msg, err := shared.TryRunCommand("lvs", "--noheadings", "-o", "size", "--nosuffix", "--units", "b", lvPath) + if err != nil { + return "", fmt.Errorf("Failed to retrieve size of logical volume: %s: %s.", string(msg), err) + } + + sizeString := string(msg) + sizeString = strings.TrimSpace(sizeString) + size, err := strconv.ParseInt(sizeString, 10, 64) + if err != nil { + return "", err + } + + detectedSize := shared.GetByteSizeString(size, 0) + + return detectedSize, nil +} + func storageLVMThinpoolExists(vgName string, poolName string) (bool, error) { output, err := exec.Command("vgs", "--noheadings", "-o", "lv_attr", fmt.Sprintf("%s/%s", vgName, poolName)).Output() if err != nil {
patches: detect logical volume size Addresses #<I>.
lxc_lxd
train
d5975d53cba537a26d6c6e7082f325f707d56e56
diff --git a/Slim/Http/Response.php b/Slim/Http/Response.php index <HASH>..<HASH> 100644 --- a/Slim/Http/Response.php +++ b/Slim/Http/Response.php @@ -306,16 +306,16 @@ class Response extends Message implements ResponseInterface */ public function withJson($data, $status = null, $encodingOptions = 0) { - $body = $this->getBody(); - $body->rewind(); - $body->write($json = json_encode($data, $encodingOptions)); + $clone = clone $this; + $clone->body = new Body(fopen('php://temp', 'w+')); + $clone->body->write($json = json_encode($data, $encodingOptions)); // Ensure that the json encoding passed successfully if ($json === false) { throw new \RuntimeException(json_last_error_msg(), json_last_error()); } - $responseWithJson = $this->withHeader('Content-Type', 'application/json;charset=utf-8'); + $responseWithJson = $clone->withHeader('Content-Type', 'application/json;charset=utf-8'); if (isset($status)) { return $responseWithJson->withStatus($status); } diff --git a/tests/Http/ResponseTest.php b/tests/Http/ResponseTest.php index <HASH>..<HASH> 100755 --- a/tests/Http/ResponseTest.php +++ b/tests/Http/ResponseTest.php @@ -287,9 +287,10 @@ class ResponseTest extends \PHPUnit_Framework_TestCase { $data = ['foo' => 'bar1&bar2']; - $response = new Response(); - $response = $response->withJson($data, 201); + $originalResponse = new Response(); + $response = $originalResponse->withJson($data, 201); + $this->assertNotEquals($response->getStatusCode(), $originalResponse->getStatusCode()); $this->assertEquals(201, $response->getStatusCode()); $this->assertEquals('application/json;charset=utf-8', $response->getHeaderLine('Content-Type')); @@ -297,6 +298,13 @@ class ResponseTest extends \PHPUnit_Framework_TestCase $body->rewind(); $dataJson = $body->getContents(); //json_decode($body->getContents(), true); + $originalBody = $originalResponse->getBody(); + $originalBody->rewind(); + $originalContents = $originalBody->getContents(); + + // test the original body hasn't be replaced + $this->assertNotEquals($dataJson, $originalContents); + $this->assertEquals('{"foo":"bar1&bar2"}', $dataJson); $this->assertEquals($data['foo'], json_decode($dataJson, true)['foo']); @@ -321,7 +329,7 @@ class ResponseTest extends \PHPUnit_Framework_TestCase { $data = ['foo' => 'bar'.chr(233)]; $this->assertEquals('bar'.chr(233), $data['foo']); - + $response = new Response(); $response->withJson($data, 200);
Ensure withJson returns a new Response Fixes issue <I>. The withJson method now clones the Response, and updates this with a new header, and a new Body.
slimphp_Slim
train
a05ce20bac30150941a883304a54736a186eba5c
diff --git a/lib/python/vdm/server/HTTPListener.py b/lib/python/vdm/server/HTTPListener.py index <HASH>..<HASH> 100644 --- a/lib/python/vdm/server/HTTPListener.py +++ b/lib/python/vdm/server/HTTPListener.py @@ -1969,26 +1969,6 @@ class StopDatabaseAPI(MethodView): return server.kill_database(database_id) - # @staticmethod - # def put(database_id): - # - # """ - # Stops the specified VoltDB - # Args: - # database_id (int): The id of the database that should be stopped - # Returns: - # Status string indicating if the stop request was sent successfully - # """ - # # try: - # # test = Get_Voltdb_Process().processId - # # # os.kill(Get_Voltdb_Process().processId, signal.SIGTERM) - # # return make_response(jsonify({'statusstring': 'success'}), 200) - # # except Exception, err: - # # return make_response(jsonify({'statusstring': str(err)}), 500) - # server = voltdbserver.VoltDatabase(database_id) - # return server.kill_database(database_id) - - class StartServerAPI(MethodView): """Class to handle request to start a server for this database.""" @@ -2340,7 +2320,6 @@ def main(runner, amodule, config_dir, server): global __IP__ global __PORT__ - # config_path = config_dir + '/' + 'vdm.xml' config_path = os.path.join(config_dir, 'vdm.xml') arrServer = {}
VDM-<I> Removed the commented codes.
VoltDB_voltdb
train
7ee49c32b70b0bd3e9709865c0b9c43d16c8f18c
diff --git a/peer.go b/peer.go index <HASH>..<HASH> 100644 --- a/peer.go +++ b/peer.go @@ -121,7 +121,8 @@ type Peer struct { versionKnown bool // Last received pong message - lastPong int64 + lastPong int64 + lastBlockReceived time.Time host []byte port uint16 @@ -408,10 +409,7 @@ func (p *Peer) HandleInbound() { for i := msg.Data.Len() - 1; i >= 0; i-- { block = ethchain.NewBlockFromRlpValue(msg.Data.Get(i)) - //p.ethereum.StateManager().PrepareDefault(block) - //state := p.ethereum.StateManager().CurrentState() err = p.ethereum.StateManager().Process(block, false) - if err != nil { if ethutil.Config.Debug { peerlogger.Infof("Block %x failed\n", block.Hash()) @@ -422,6 +420,8 @@ func (p *Peer) HandleInbound() { } else { lastBlock = block } + + p.lastBlockReceived = time.Now() } if msg.Data.Len() <= 1 { @@ -561,6 +561,25 @@ func (p *Peer) HandleInbound() { p.Stop() } +// General update method +func (self *Peer) update() { + serviceTimer := time.NewTicker(5 * time.Second) + +out: + for { + select { + case <-serviceTimer.C: + if time.Since(self.lastBlockReceived) > 10*time.Second { + self.catchingUp = false + } + case <-self.quit: + break out + } + } + + serviceTimer.Stop() +} + func (p *Peer) Start() { peerHost, peerPort, _ := net.SplitHostPort(p.conn.LocalAddr().String()) servHost, servPort, _ := net.SplitHostPort(p.conn.RemoteAddr().String()) @@ -583,6 +602,8 @@ func (p *Peer) Start() { go p.HandleOutbound() // Run the inbound handler in a new goroutine go p.HandleInbound() + // Run the general update handler + go p.update() // Wait a few seconds for startup and then ask for an initial ping time.Sleep(2 * time.Second)
Added update method and general service timer * disable catching up if no block received for longer than <I> seconds
ethereum_go-ethereum
train
280157d508ddf52c503be05c5b81cec36cb47d21
diff --git a/src/core/TSDB.java b/src/core/TSDB.java index <HASH>..<HASH> 100644 --- a/src/core/TSDB.java +++ b/src/core/TSDB.java @@ -965,6 +965,9 @@ public final class TSDB { if (search != null) { search.indexAnnotation(note).addErrback(new PluginError()); } + if( rt_publisher != null ) { + rt_publisher.publishAnnotation(note); + } } /** diff --git a/src/tsd/RTPublisher.java b/src/tsd/RTPublisher.java index <HASH>..<HASH> 100644 --- a/src/tsd/RTPublisher.java +++ b/src/tsd/RTPublisher.java @@ -14,13 +14,14 @@ package net.opentsdb.tsd; import java.util.Map; +import net.opentsdb.core.TSDB; +import net.opentsdb.meta.Annotation; +import net.opentsdb.stats.StatsCollector; + import org.hbase.async.Bytes; import com.stumbleupon.async.Deferred; -import net.opentsdb.core.TSDB; -import net.opentsdb.stats.StatsCollector; - /** * Real Time publisher plugin interface that is used to emit data from a TSD * as data comes in. Initially it supports publishing data points immediately @@ -137,4 +138,13 @@ public abstract class RTPublisher { public abstract Deferred<Object> publishDataPoint(final String metric, final long timestamp, final double value, final Map<String, String> tags, final byte[] tsuid); + + /** + * Called any time a new annotation is published + * @param annotation The published annotation + * @return A deferred without special meaning to wait on if necessary. The + * value may be null but a Deferred must be returned. + */ + public abstract Deferred<Object> publishAnnotation(Annotation annotation); + } diff --git a/test/tsd/DummyRTPublisher.java b/test/tsd/DummyRTPublisher.java index <HASH>..<HASH> 100644 --- a/test/tsd/DummyRTPublisher.java +++ b/test/tsd/DummyRTPublisher.java @@ -15,6 +15,7 @@ package net.opentsdb.tsd; import java.util.Map; import net.opentsdb.core.TSDB; +import net.opentsdb.meta.Annotation; import net.opentsdb.stats.StatsCollector; import com.stumbleupon.async.Deferred; @@ -65,4 +66,9 @@ public final class DummyRTPublisher extends RTPublisher { return Deferred.fromResult(new Object()); } + @Override + public Deferred<Object> publishAnnotation(Annotation annotation) { + return Deferred.fromResult(new Object()); + } + } diff --git a/test/tsd/TestRTPublisher.java b/test/tsd/TestRTPublisher.java index <HASH>..<HASH> 100644 --- a/test/tsd/TestRTPublisher.java +++ b/test/tsd/TestRTPublisher.java @@ -16,7 +16,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.when; import static org.powermock.api.mockito.PowerMockito.mock; + +import java.util.Collections; +import java.util.HashMap; + import net.opentsdb.core.TSDB; +import net.opentsdb.meta.Annotation; import net.opentsdb.utils.Config; import net.opentsdb.utils.PluginLoader; @@ -99,4 +104,17 @@ public final class TestRTPublisher { System.currentTimeMillis(), new byte[] { 0, 0, 0, 0, 0, 0, 0, 1 }, null, null, (short)0x7)); } + + @Test + public void publishAnnotation() throws Exception { + Annotation ann = new Annotation(); + HashMap<String, String> customMap = new HashMap<String, String>(1); + customMap.put("test-custom-key", "test-custom-value"); + ann.setCustom(customMap); + ann.setDescription("A test annotation"); + ann.setNotes("Test annotation notes"); + ann.setStartTime(System.currentTimeMillis()); + assertNotNull(rt_publisher.publishAnnotation(ann)); + } + }
#<I> RTPublisher should publish submitted Annotations
OpenTSDB_opentsdb
train
a89a1acb177731902d7d8b82be1ae931b91fe189
diff --git a/python/upload.py b/python/upload.py index <HASH>..<HASH> 100644 --- a/python/upload.py +++ b/python/upload.py @@ -77,6 +77,39 @@ def encode_multipart(fields, files, boundary=None): return (body, headers) +def upload_file(filepath, move_files=True): + ''' + Upload file at filepath. + + Move to subfolders 'success'/'failed' on completion if move_files is True. + ''' + filename = os.path.basename(filepath) + print("Uploading: {0}".format(filename)) + + parameters = {"key": filename, "AWSAccessKeyId": "AKIAI2X3BJAT2W75HILA", "acl": "private", + "policy": PERMISSION_HASH, "signature": SIGNATURE_HASH, "Content-Type":"image/jpeg" } + + with open(filepath, "rb") as f: + encoded_string = f.read() + + data, headers = encode_multipart(parameters, {'file': {'filename': filename, 'content': encoded_string}}) + request = urllib2.Request(MAPILLARY_UPLOAD_URL, data=data, headers=headers) + response = urllib2.urlopen(request) + + if response.getcode()==204: + os.rename(filepath, "success/"+filename) + print("Success: {0}".format(filename)) + else: + os.rename(filepath, "failed/"+filename) + print("Failed: {0}".format(filename)) + + +def create_dirs(): + if not os.path.exists("success"): + os.mkdir("success") + if not os.path.exists("failed"): + os.mkdir("failed") + if __name__ == '__main__': ''' @@ -89,6 +122,8 @@ if __name__ == '__main__': path = sys.argv[1] + create_dirs() + if path.endswith(".jpg"): # single file file_list = [path] @@ -99,22 +134,6 @@ if __name__ == '__main__': file_list += [os.path.join(root, filename) for filename in files if filename.endswith(".jpg")] for filepath in file_list: - filename = os.path.basename(filepath) - print("Uploading: {0}".format(filename)) - - parameters = {"key": filename, "AWSAccessKeyId": "AKIAI2X3BJAT2W75HILA", "acl": "private", - "policy": PERMISSION_HASH, "signature": SIGNATURE_HASH, "Content-Type":"image/jpeg" } - - with open(filepath, "rb") as f: - encoded_string = f.read() - - data, headers = encode_multipart(parameters, {'file': {'filename': filename, 'content': encoded_string}}) - request = urllib2.Request(MAPILLARY_UPLOAD_URL, data=data, headers=headers) - response = urllib2.urlopen(request) - - if response.getcode()==204: - print("Success: {0}".format(filename)) - else: - print("Failed: {0}".format(filename)) + upload_file(filepath) print("Done uploading.")
Move photos on success/fail
mapillary_mapillary_tools
train
2a4ea60d33cc0d5c42cce03800a5d376e68f0e53
diff --git a/test/foreign_function.js b/test/foreign_function.js index <HASH>..<HASH> 100644 --- a/test/foreign_function.js +++ b/test/foreign_function.js @@ -1,6 +1,5 @@ -var expect = require('expect.js') - , assert = require('assert') +var assert = require('assert') , ref = require('ref') , Array = require('ref-array') , Struct = require('ref-struct') @@ -25,15 +24,15 @@ describe('ForeignFunction', function () { it('should call the static "abs" bindings', function () { var _abs = bindings.abs var abs = ffi.ForeignFunction(_abs, 'int', [ 'int' ]) - expect(abs).to.be.a('function') - expect(abs(-1234)).to.equal(1234) + assert.equal('function', typeof abs) + assert.equal(1234, abs(-1234)) }) it('should call the static "atoi" bindings', function () { var _atoi = bindings.atoi var atoi = ffi.ForeignFunction(_atoi, 'int', [ 'string' ]) - expect(atoi).to.be.a('function') - expect(atoi('1234')).to.equal(1234) + assert.equal('function', typeof atoi) + assert.equal(1234, atoi('1234')) }) it('should call the static "double_box" bindings', function () { @@ -154,12 +153,12 @@ describe('ForeignFunction', function () { it('should call the static "abs" bindings asynchronously', function (done) { var _abs = bindings.abs var abs = ffi.ForeignFunction(_abs, 'int', [ 'int' ]) - expect(abs).to.be.a('function') + assert.equal('function', typeof abs.async) // invoke asynchronously abs.async(-1234, function (err, res) { - expect(err).to.equal(null) - expect(res).to.equal(1234) + assert.equal(null, err) + assert.equal(1234, res) done() }) })
test: use "assert" in the ForeignFunction tests
node-ffi-napi_node-ffi-napi
train
bdef9ff1efc115de35dfba2548dd1f6227432ff4
diff --git a/lib/irc.js b/lib/irc.js index <HASH>..<HASH> 100644 --- a/lib/irc.js +++ b/lib/irc.js @@ -91,8 +91,6 @@ function _version() { * @param {String} nick: optional nickname, defaults to 'nestor' */ function start(host, channel, nick) { - const PENDING = 1000; - var commands = { build: _build, stop: _stop, @@ -105,7 +103,7 @@ function start(host, channel, nick) { }; bot = new Bot(commands); - bot.connect(host, channel, { nick: nick, pending: PENDING }); + bot.connect(host, channel, { nick: nick }); } exports.start = start; \ No newline at end of file diff --git a/lib/jenkins.js b/lib/jenkins.js index <HASH>..<HASH> 100644 --- a/lib/jenkins.js +++ b/lib/jenkins.js @@ -123,7 +123,7 @@ Jenkins.prototype.console = function (jobName, opts, cb) { } setTimeout(function () { cb(); - }, opts.interval || INTERVAL); + }, (opts && opts.interval) ? opts.interval : INTERVAL); } }); },
Default IRC pending should be 1 sec, not no pending. Change Jenkins console interval handling to allow undefined opts.
cliffano_nestor
train
a5c014f5b227d7a65783823f89d075596186b643
diff --git a/tests/cases/test/DispatcherTest.php b/tests/cases/test/DispatcherTest.php index <HASH>..<HASH> 100644 --- a/tests/cases/test/DispatcherTest.php +++ b/tests/cases/test/DispatcherTest.php @@ -27,7 +27,11 @@ class DispatcherTest extends \lithium\test\Unit { } public function testRunWithReporter() { - $report = Dispatcher::run(null, array('reporter' => 'html')); + $report = Dispatcher::run(null, array( + 'reporter' => function($info) { + return $info; + } + )); $this->assertTrue($report instanceof Report); $result = $report->group; diff --git a/tests/cases/test/ReportTest.php b/tests/cases/test/ReportTest.php index <HASH>..<HASH> 100644 --- a/tests/cases/test/ReportTest.php +++ b/tests/cases/test/ReportTest.php @@ -40,8 +40,7 @@ class ReportTest extends \lithium\test\Unit { array('data' => array('lithium\tests\mocks\test\MockFilterClassTest')) ), 'filters' => array("Complexity" => ""), - 'format' => 'html', - 'reporter' => 'html' + 'format' => 'html' )); $expected = array('lithium\test\filter\Complexity' => array( @@ -67,10 +66,9 @@ class ReportTest extends \lithium\test\Unit { public function testRender() { $report = new Report(array( - 'title' => 'lithium\tests\mocks\test\MockUnitTest', - 'group' => new Group(array('data' => array('lithium\tests\mocks\test\MockUnitTest'))), - 'format' => 'html', - 'reporter' => 'html' + 'title' => '\lithium\tests\mocks\test\MockUnitTest', + 'group' => new Group(array('data' => array('\lithium\tests\mocks\test\MockUnitTest'))), + 'format' => 'html' )); $report->run();
Updating report unit tests for new reporter.
UnionOfRAD_lithium
train
876f9e4c8e9e2eeb6812ed76c91d0e77a479dff0
diff --git a/bin/medpy_extract_sub_volume_by_example.py b/bin/medpy_extract_sub_volume_by_example.py index <HASH>..<HASH> 100755 --- a/bin/medpy_extract_sub_volume_by_example.py +++ b/bin/medpy_extract_sub_volume_by_example.py @@ -82,16 +82,10 @@ def main(): # extract the position of the foreground object in the mask image logger.info('Extract the position of the foreground object...') - mask = mask_image.nonzero() - position = ((max(0, mask[0].min() - args.offset), mask[0].max() + 1 + args.offset), # crop negative values - (max(0, mask[1].min() - args.offset), mask[1].max() + 1 + args.offset), - (max(0, mask[2].min() - args.offset), mask[2].max() + 1 + args.offset)) # minx, maxx / miny, maxy / minz, maxz - - logger.debug('Extracted position is {}.'.format(position)) - - # unload mask and mask image - del mask - del mask_image + positions = mask_image.nonzero() + positions = [(max(0, positions[i].min() - args.offset), positions[i].max() + 1 + args.offset) + for i in range(len(positions))] # crop negative values + logger.debug('Extracted position is {}.'.format(positions)) # load image logger.info('Loading image {}...'.format(args.image)) @@ -103,7 +97,7 @@ def main(): # execute extraction of the sub-area logger.info('Extracting sub-volume...') - index = [slice(x[0], x[1]) for x in position] + index = tuple([slice(x[0], x[1]) for x in positions]) volume = image_data[index] # check if the output image contains data @@ -117,7 +111,7 @@ def main(): origin_base = numpy.array([0] * image_data.ndim) # for backwards compatibility # modify the volume offset to imitate numpy behavior (e.g. wrap negative values) - offset = numpy.array([x[0] for x in position]) + offset = numpy.array([x[0] for x in positions]) for i in range(0, len(offset)): if None == offset[i]: offset[i] = 0 offset[offset<0] += numpy.array(image_data.shape)[offset<0] # wrap around
medpy_extract_sub_volume_by_example.py now dimensionality free.
loli_medpy
train
e55074986db493e074486d50922705ec4a9ab4b7
diff --git a/core/src/main/java/com/graphhopper/routing/ch/PrepareContractionHierarchies.java b/core/src/main/java/com/graphhopper/routing/ch/PrepareContractionHierarchies.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/graphhopper/routing/ch/PrepareContractionHierarchies.java +++ b/core/src/main/java/com/graphhopper/routing/ch/PrepareContractionHierarchies.java @@ -134,6 +134,10 @@ public class PrepareContractionHierarchies { logFinalGraphStats(); } + public boolean isPrepared() { + return prepared; + } + private void logFinalGraphStats() { logger.info("shortcuts that exceed maximum weight: {}", chStore.getNumShortcutsExceedingWeight()); logger.info("took: {}s, graph now - num edges: {}, num nodes: {}, num shortcuts: {}", diff --git a/core/src/main/java/com/graphhopper/routing/lm/PrepareLandmarks.java b/core/src/main/java/com/graphhopper/routing/lm/PrepareLandmarks.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/graphhopper/routing/lm/PrepareLandmarks.java +++ b/core/src/main/java/com/graphhopper/routing/lm/PrepareLandmarks.java @@ -128,6 +128,10 @@ public class PrepareLandmarks { totalPrepareTime = sw.getMillis(); } + public boolean isPrepared() { + return prepared; + } + public long getTotalPrepareTime() { return totalPrepareTime; }
Add missing isPrepared method to LM/CH preparations
graphhopper_graphhopper
train
13272ff1568b90d541df6db6058e1329d243acf4
diff --git a/netmaster/objApi/apiController.go b/netmaster/objApi/apiController.go index <HASH>..<HASH> 100755 --- a/netmaster/objApi/apiController.go +++ b/netmaster/objApi/apiController.go @@ -366,107 +366,6 @@ func endpointGroupCleanup(endpointGroup *contivModel.EndpointGroup) { // FIXME: hack to allocate unique endpoint group ids var globalEpgID = 1 -/* -// Some utility functions to work with the external contracts -func extContractsGrpDeregister(epg *contivModel.EndpointGroup, contractsGrp *contivModel.ExtContractsGroup, contractType string) error { - if contractsGrp == nil { - errStr := fmt.Sprintf("%s External contracts group not found", contractType) - log.Errorf(errStr) - return core.Errorf(errStr) - } - - modeldb.RemoveLinkSet(&contractsGrp.LinkSets.EndpointGroups, epg) - if contractType == "provided" { - modeldb.RemoveLinkSet(&epg.LinkSets.ProvExtContractsGrps, contractsGrp) - } else if contractType == "consumed" { - modeldb.RemoveLinkSet(&epg.LinkSets.ConsExtContractsGrps, contractsGrp) - } - // Links broken, update the contracts group object. - err := contractsGrp.Write() - if err != nil { - return err - } - - return nil -} - -func extContractsGrpValidateAndRegister(epg *contivModel.EndpointGroup, contractsGrp *contivModel.ExtContractsGroup, contractType string) error { - if contractsGrp == nil { - errStr := fmt.Sprintf("%s External contracts group not found", contractType) - log.Errorf(errStr) - return core.Errorf(errStr) - } - - if strings.ToLower(contractsGrp.ContractsType) != contractType { - errStr := fmt.Sprintf("Incorrect type for contract group: %v", contractsGrp) - log.Errorf(errStr) - return core.Errorf(errStr) - } - - // Establish the necessary links. - // TBD: Error handling. In the loop, we are bailing out - // on failure. How do we take care of already established - // links? - modeldb.AddLinkSet(&contractsGrp.LinkSets.EndpointGroups, epg) - if contractType == "provided" { - modeldb.AddLinkSet(&epg.LinkSets.ProvExtContractsGrps, contractsGrp) - } else if contractType == "consumed" { - modeldb.AddLinkSet(&epg.LinkSets.ConsExtContractsGrps, contractsGrp) - } - - // Links made, write the policy set object. - err := contractsGrp.Write() - if err != nil { - return err - } - - return nil -} - -func cleanupExternalContracts(endpointGroup *contivModel.EndpointGroup) error { - // Cleanup consumed external contracts - for _, consExtContractsGrp := range endpointGroup.ConsExtContractsGrps { - contractsGrp := contivModel.FindExtContractsGroup(consExtContractsGrp) - err = extContractsGrpDeregister(endpointGroup, contractsGrp, "consumed") - if err != nil { - return err - } - } - - // Cleanup provided external contracts - for _, provExtContractsGrp := range endpointGroup.ProvExtContractsGrps { - contractsGrp := contivModel.FindExtContractsGroup(provExtContractsGrp) - err = extContractsGrpDeregister(endpointGroup, contractsGrp, "provided") - if err != nil { - return err - } - } - - return nil -} - -func setupExternalContracts(endpointGroup *contivModel.EndpointGroup, consContractsGrps, provContractsGrps string[]) error { - // Validate presence and register consumed external contracts - for _, consExtContractsGrp := range consContractsGrps { - contractsGrp := contivModel.FindExtContractsGroup(consExtContractsGrp) - err = extContractsGrpValidateAndRegister(endpointGroup, contractsGrp, "consumed") - if err != nil { - return err - } - } - - // Validate presence and register provided external contracts - for _, provExtContractsGrp := range provContractsGrps { - contractsGrp := contivModel.FindExtContractsGroup(provExtContractsGrp) - err = extContractsGrpValidateAndRegister(endpointGroup, contractsGrp, "provided") - if err != nil { - return err - } - } - - return nil -} -*/ // EndpointGroupCreate creates end point group func (ac *APIController) EndpointGroupCreate(endpointGroup *contivModel.EndpointGroup) error { log.Infof("Received EndpointGroupCreate: %+v", endpointGroup)
Add external contracts support (import and export) for ACI
contiv_netplugin
train
e43388157d7e5c90a728e8784a1cc5db7d48a2d4
diff --git a/lib/jquery.raty.js b/lib/jquery.raty.js index <HASH>..<HASH> 100644 --- a/lib/jquery.raty.js +++ b/lib/jquery.raty.js @@ -41,7 +41,7 @@ this.opt.path = this.opt.path || ''; - if (this.opt.path && this.opt.path.slice(this.opt.path.length - 1, this.opt.path.length) !== '/') { + if (this.opt.path && this.opt.path.charAt( this.opt.path.length - 1 ) !== '/') { this.opt.path += '/'; }
Testing the path using charAt rather than slice
wbotelhos_raty
train
036c8a0b26ff5afea8103dfd3e986b080c8eed0f
diff --git a/crc16.go b/crc16.go index <HASH>..<HASH> 100644 --- a/crc16.go +++ b/crc16.go @@ -25,6 +25,7 @@ const ( type Table struct { entries [256]uint16 reversed bool + noXOR bool } // IBMTable is the table for the IBM polynomial. @@ -47,11 +48,19 @@ func MakeTable(poly uint16) *Table { return makeTable(poly) } -// MakeTable returns the Table constructed from the specified polynomial. +// MakeBitsReversedTable returns the Table constructed from the specified polynomial. func MakeBitsReversedTable(poly uint16) *Table { return makeBitsReversedTable(poly) } +// MakeTableNoXOR returns the Table constructed from the specified polynomial. +// Updates happen without XOR in and XOR out. +func MakeTableNoXOR(poly uint16) *Table { + tab := makeTable(poly) + tab.noXOR = true + return tab +} + // makeTable returns the Table constructed from the specified polynomial. func makeBitsReversedTable(poly uint16) *Table { t := &Table{ @@ -115,15 +124,12 @@ func updateNoXOR(crc uint16, tab *Table, p []byte) uint16 { return crc } -// UpdateNoXOR is the same Update, but without XOR in and XOR out -func UpdateNoXOR(crc uint16, tab *Table, p []byte) uint16 { - return updateNoXOR(crc, tab, p) -} - // Update returns the result of adding the bytes in p to the crc. func Update(crc uint16, tab *Table, p []byte) uint16 { if tab.reversed { return updateBitsReversed(crc, tab, p) + } else if tab.noXOR { + return updateNoXOR(crc, tab, p) } else { return update(crc, tab, p) } @@ -133,9 +139,6 @@ func Update(crc uint16, tab *Table, p []byte) uint16 { // using the polynomial represented by the Table. func Checksum(data []byte, tab *Table) uint16 { return Update(0, tab, data) } -// ChecksumNoXOR is the same as Checksum, but with no XOR in and no XOR out -func ChecksumNoXOR(data []byte, tab *Table) uint16 { return UpdateNoXOR(0, tab, data) } - // ChecksumIBM returns the CRC-16 checksum of data // using the IBM polynomial. func ChecksumIBM(data []byte) uint16 { return Update(0, IBMTable, data) }
Add feature for no XOR in and no XOR out
howeyc_crc16
train
64b7e11e68ae2e674eb8b0cce56c0956329c26a2
diff --git a/src/basis.js b/src/basis.js index <HASH>..<HASH> 100644 --- a/src/basis.js +++ b/src/basis.js @@ -2769,6 +2769,7 @@ // misc. sortAsObject: function(){ + // deprecated in basis.js 1.3.0 /** @cut */ consoleMethods.warn('basis.array.sortAsObject is deprecated, use basis.array.sort instead'); return arrayFunctions.sort.apply(this, arguments); }, diff --git a/src/basis/data/dataset.js b/src/basis/data/dataset.js index <HASH>..<HASH> 100644 --- a/src/basis/data/dataset.js +++ b/src/basis/data/dataset.js @@ -2454,7 +2454,7 @@ Split: Split, Extract: Extract, - // deprecated name of Filter + // deprecated in 1.3.0 name of Filter Subset: Filter.subclass({ className: namespace + '.Subset', init: function(){ diff --git a/src/basis/entity.js b/src/basis/entity.js index <HASH>..<HASH> 100644 --- a/src/basis/entity.js +++ b/src/basis/entity.js @@ -469,7 +469,7 @@ return result; }, - // deprecated + // deprecated in 1.3.0 entitySetType: entitySetType, extend: function(){ /** @cut */ basis.dev.warn('basis.entity: EntitySetType.extend() is deprecated, use EntitySetType.extendClass() instead.'); @@ -477,6 +477,7 @@ } }); + // deprecated in 1.3.0 /** @cut */ if (Object.defineProperty) /** @cut */ Object.defineProperty(result, 'entitySetType', { /** @cut */ get: function(){ @@ -627,7 +628,7 @@ return result; }, - // deprecated + // deprecated in 1.3.0 entityType: entityType, extend: function(){ /** @cut */ basis.dev.warn('basis.entity: EntityType.extend() is deprecated, use EntityType.extendClass() instead.'); @@ -635,6 +636,7 @@ } }); + // deprecated in 1.3.0 /** @cut */ if (Object.defineProperty) /** @cut */ Object.defineProperty(result, 'entityType', { /** @cut */ get: function(){ diff --git a/src/basis/net/ajax.js b/src/basis/net/ajax.js index <HASH>..<HASH> 100644 --- a/src/basis/net/ajax.js +++ b/src/basis/net/ajax.js @@ -184,7 +184,7 @@ newStateData = this.getResponseError(); // NOTE: for backward capability of deprecated behaviour - // should be removed in future + // should be removed in future (deprecated in 1.2.0) if (!newStateData && this.data.error) { /** @cut */ basis.dev.warn('Request#getResponseError should not update request data, but returns error data. Please, fix your method implementation, as data updating is deprecated behaviour.'); @@ -256,6 +256,7 @@ }, /** + * deprecated in 1.2.0 * @deprecated */ processErrorResponse: function(){ diff --git a/src/basis/ui/calendar.js b/src/basis/ui/calendar.js index <HASH>..<HASH> 100644 --- a/src/basis/ui/calendar.js +++ b/src/basis/ui/calendar.js @@ -700,6 +700,7 @@ if (this.sections) { + // deprecated in 1.3.0 /** @cut */ basis.dev.warn('basis.ui.calendar.Calendar#sections is deprecated, use childNodes instead'); this.setChildNodes(this.sections); }
add to comment when something is deprecated
basisjs_basisjs
train
c5ed7b1fbd77ace635ef27e42e3cedf9724e2a07
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -234,7 +234,7 @@ def do_setup(): 'flask-caching>=1.3.3, <1.4.0', 'flask-login==0.2.11', 'flask-swagger==0.2.13', - 'flask-wtf>=0.14, <0.15', + 'flask-wtf>=0.14.2, <0.15', 'funcsigs==1.0.0', 'future>=0.16.0, <0.17', 'gitpython>=2.0.2',
[AIRFLOW-<I>] Bump Flask-WTF Flask-appbuilder needs at least <I> Closes #<I> from Fokko/AIRFLOW-<I>
apache_airflow
train
ef6db0cd15edcfd01443da1a8b6230923986b680
diff --git a/ChangeLog.md b/ChangeLog.md index <HASH>..<HASH> 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -1,5 +1,9 @@ # Changelog +## v1.9.3 + +- Corrected folder path for SF > 3 (08/10/2021) + ## v1.9.2 - Corrected `ServiceUser` to check wether token is NOT null (20/09/2021) diff --git a/Service/ServiceImage.php b/Service/ServiceImage.php index <HASH>..<HASH> 100644 --- a/Service/ServiceImage.php +++ b/Service/ServiceImage.php @@ -53,7 +53,7 @@ class ServiceImage implements ServiceImageInterface $folder .= '/'; } - return '3' === substr(Kernel::VERSION, 0, 1) ? $rootDir . '/../web/images/' . $folder : $rootDir . '/../public/images/' . $folder; + return '3' === substr(Kernel::VERSION, 0, 1) ? $rootDir . '/../web/images/' . $folder : $rootDir . '/public/images/' . $folder; } /** diff --git a/Twig/TemplateExists.php b/Twig/TemplateExists.php index <HASH>..<HASH> 100644 --- a/Twig/TemplateExists.php +++ b/Twig/TemplateExists.php @@ -35,8 +35,8 @@ class TemplateExists extends AbstractExtension */ public function templateExists($template) { - $templatesFolder = '3' === substr(Kernel::VERSION, 0, 1) ? '/app/Resources/views/' : '/../templates/'; - $templatesFolder = $this->configService->getContainerParameter('kernel.project_dir') . $templatesFolder; + $root = $this->configService->getContainerParameter('kernel.project_dir'); + $templatesFolder = '3' === substr(Kernel::VERSION, 0, 1) ? $root . '/app/Resources/views/' : $root . '/templates/'; return is_file($templatesFolder . $template); }
- Corrected folder path for SF > 3 (<I>/<I>/<I>)
975L_ServicesBundle
train
58fe2eb66ba825c8c27124aec9e54c9fc6d8477d
diff --git a/tasks/ghost.js b/tasks/ghost.js index <HASH>..<HASH> 100644 --- a/tasks/ghost.js +++ b/tasks/ghost.js @@ -18,7 +18,7 @@ module.exports = function (grunt) { // Get spawn function for the CasperJS process creation spawn = require('child_process').spawn, // Create array that will contain all the parameters for CasperJS - command = ['test'], + command = [], len; // Get CasperJS test options and add them to command array
Removed unnecessary string. At least I think it's unnecessary?
colinwren_grunt-ghost
train
f5032ae2e8d7679cb2428a691d3263a161bda16a
diff --git a/awslimitchecker/tests/test_runner.py b/awslimitchecker/tests/test_runner.py index <HASH>..<HASH> 100644 --- a/awslimitchecker/tests/test_runner.py +++ b/awslimitchecker/tests/test_runner.py @@ -291,7 +291,11 @@ class TestAwsLimitCheckerRunner(object): with pytest.raises(ValueError) as excinfo: runner.set_limit_overrides(mock_checker, overrides) assert mock_checker.mock_calls == [] - assert excinfo.value.message == "Limit names must be in " \ + if sys.version_info[0] > 2: + msg = excinfo.value.args[0] + else: + msg = excinfo.value.message + assert msg == "Limit names must be in " \ "'service/limit' format; EC2 is invalid." def test_entry_show_usage(self): diff --git a/awslimitchecker/tests/test_utils.py b/awslimitchecker/tests/test_utils.py index <HASH>..<HASH> 100644 --- a/awslimitchecker/tests/test_utils.py +++ b/awslimitchecker/tests/test_utils.py @@ -39,6 +39,7 @@ Jason Antman <[email protected]> <http://www.jasonantman.com> import argparse import pytest +import sys from awslimitchecker.utils import StoreKeyValuePair @@ -80,4 +81,11 @@ class TestStoreKeyValuePair(object): parser.add_argument('-o', '--one', action=StoreKeyValuePair) with pytest.raises(SystemExit) as excinfo: parser.parse_args(['-o', 'foobar']) - assert excinfo.value.message == 2 + if sys.version_info[0] > 2: + msg = excinfo.value.args[0] + else: + if sys.version_info[1] < 7: + msg = excinfo.value + else: + msg = excinfo.value.message + assert msg == 2
fix issues with py<I> and py3 tests
jantman_awslimitchecker
train
b91c84c8643e010867bf4b6f03dd2c0aeda9d3e4
diff --git a/xgbutil.go b/xgbutil.go index <HASH>..<HASH> 100644 --- a/xgbutil.go +++ b/xgbutil.go @@ -17,6 +17,7 @@ import ( // X connection, the root window, event callbacks, key/mouse bindings, etc. type XUtil struct { conn *xgb.Conn + screen *xgb.ScreenInfo root xgb.Id atoms map[string]xgb.Id atomNames map[xgb.Id]string @@ -30,6 +31,8 @@ type XUtil struct { mousebinds map[MouseBindKey][]MouseBindCallback //mousebind key -> callbacks mousegrabs map[MouseBindKey]int // mouse bind key -> # of grabs + + gc xgb.Id // a general purpose graphics context; used to paint images } // Callback is an interface that should be implemented by event callback @@ -109,6 +112,7 @@ func Dial(display string) (*XUtil, error) { // Initialize our central struct that stores everything. xu := &XUtil{ conn: c, + screen: c.DefaultScreen(), root: c.DefaultScreen().Root, atoms: make(map[string]xgb.Id, 50), // start with a nice size atomNames: make(map[xgb.Id]string, 50), @@ -121,6 +125,11 @@ func Dial(display string) (*XUtil, error) { mousegrabs: make(map[MouseBindKey]int, 10), } + // Create a general purpose graphics context + xu.gc = xu.conn.NewId() + xu.conn.CreateGC(xu.gc, xu.root, xgb.GCForeground, + []uint32{xu.screen.WhitePixel}) + // Register the Xinerama extension... because it doesn't cost much. err = xu.conn.RegisterExtension("XINERAMA") @@ -140,6 +149,11 @@ func (xu *XUtil) Conn() (*xgb.Conn) { return xu.conn } +// Screen returns the default screen +func (xu *XUtil) Screen() *xgb.ScreenInfo { + return xu.screen +} + // RootWin returns the current root window. func (xu *XUtil) RootWin() (xgb.Id) { return xu.root @@ -153,8 +167,10 @@ func (xu *XUtil) SetRootWin(root xgb.Id) { xu.root = root } -// RootWidth gets the width of the root window -func (xu *XUtil) RootWidth() { +// GC gets a general purpose graphics context that is typically used to simply +// paint images. +func (xu *XUtil) GC() xgb.Id { + return xu.gc } // AttachCallback associates a (event, window) tuple with an event.
add a general purpose graphics context and a shortcut to the default screen
BurntSushi_xgbutil
train
ef49e6f2eb44c492bba550ae90f9f291312647d2
diff --git a/lib/faraday_middleware/response_middleware.rb b/lib/faraday_middleware/response_middleware.rb index <HASH>..<HASH> 100644 --- a/lib/faraday_middleware/response_middleware.rb +++ b/lib/faraday_middleware/response_middleware.rb @@ -37,6 +37,8 @@ module FaradayMiddleware def process_response(env) env[:raw_body] = env[:body] if preserve_raw?(env) env[:body] = parse(env[:body]) + rescue Faraday::Error::ParsingError => err + raise Faraday::Error::ParsingError.new(err, env[:response]) end # Parse the response body. diff --git a/spec/unit/parse_json_spec.rb b/spec/unit/parse_json_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/parse_json_spec.rb +++ b/spec/unit/parse_json_spec.rb @@ -66,6 +66,15 @@ describe FaradayMiddleware::ParseJson, :type => :response do end end + it "includes the response on the ParsingError instance" do + begin + process('{') { |env| env[:response] = Faraday::Response.new } + fail 'Parsing should have failed.' + rescue Faraday::Error::ParsingError => err + expect(err.response).to be_a(Faraday::Response) + end + end + context "with mime type fix" do let(:middleware) { app = described_class::MimeTypeFix.new(lambda {|env|
Include env.response when rescuing ParsingErrors (#<I>) If a `ParsingError` is raised during processing, then attach the `env.response` to the error.
lostisland_faraday_middleware
train
8e7871ce6d258ddf46e182c4eafa1ca5967b3c35
diff --git a/pysd/py_backend/external.py b/pysd/py_backend/external.py index <HASH>..<HASH> 100644 --- a/pysd/py_backend/external.py +++ b/pysd/py_backend/external.py @@ -620,8 +620,6 @@ class ExtData(External): self.roots = [root] self.coordss = [coords] self.dims = dims - - # This value should be unique self.interp = interp def add(self, file_name, tab, time_row_or_col, cell, @@ -635,6 +633,14 @@ class ExtData(External): self.cells.append(cell) self.roots.append(root) self.coordss.append(coords) + + try: + assert interp == self.interp + except AssertionError: + raise ValueError(self.py_name + "\n" + "Error matching interpolation method with " + + "previously defined one") + try: assert dims == self.dims except AssertionError:
Check interpolation method in ExtData.add Check the consistency in the definition of the interpolation method.
JamesPHoughton_pysd
train
2ab33ad144bc4c67ae082ce30a1669428e4b2ff4
diff --git a/js/bitmart.js b/js/bitmart.js index <HASH>..<HASH> 100644 --- a/js/bitmart.js +++ b/js/bitmart.js @@ -534,6 +534,7 @@ module.exports = class bitmart extends Exchange { 'Content-Type': 'application/x-www-form-urlencoded', }; } else { + const nonce = this.nonce (); this.checkRequiredCredentials (); const token = this.safeString (this.options, 'accessToken'); if (token === undefined) { @@ -541,8 +542,8 @@ module.exports = class bitmart extends Exchange { } const expires = this.safeInteger (this.options, 'expires'); if (expires !== undefined) { - if (this.nonce () >= expires) { - throw new AuthenticationError (this.id + ' accessToken expired, supply a new accessToken or call to signIn() method'); + if (nonce >= expires) { + throw new AuthenticationError (this.id + ' accessToken expired, supply a new accessToken or call the signIn() method'); } } if (Object.keys (query).length) { @@ -550,7 +551,7 @@ module.exports = class bitmart extends Exchange { } headers = { 'Content-Type': 'application/json', - 'X-BM-TIMESTAMP': this.nonce (), + 'X-BM-TIMESTAMP': nonce.toString (), 'X-BM-AUTHORIZATION': 'Bearer ' + token, }; if (method !== 'GET') {
bitmart minor edits in sign()
ccxt_ccxt
train
7b7507834ff62374e856699c1be2e1067c6f9451
diff --git a/lib/rack/app/payload/parser/builder.rb b/lib/rack/app/payload/parser/builder.rb index <HASH>..<HASH> 100644 --- a/lib/rack/app/payload/parser/builder.rb +++ b/lib/rack/app/payload/parser/builder.rb @@ -15,12 +15,17 @@ class Rack::App::Payload::Parser::Builder self end - def accept_json - Rack::App::Payload::Parser::Builder::Defaults.json(self) - end - - def accept_www_form_urlencoded - Rack::App::Payload::Parser::Builder::Defaults.form(self) + FORMS = { + :json => Rack::App::Payload::Parser::Builder::Defaults.method(:json), + :form => Rack::App::Payload::Parser::Builder::Defaults.method(:form), + :urlencoded => Rack::App::Payload::Parser::Builder::Defaults.method(:form), + :www_form_urlencoded => Rack::App::Payload::Parser::Builder::Defaults.method(:form) + } + + def accept(*form_names) + form_names.map(&:to_sym).each do |form_name| + (FORMS[form_name] || raise(NotImplementedError)).call(self) + end end def merge!(parser_builder) diff --git a/spec/rack/app/payload_spec/parser_spec.rb b/spec/rack/app/payload_spec/parser_spec.rb index <HASH>..<HASH> 100644 --- a/spec/rack/app/payload_spec/parser_spec.rb +++ b/spec/rack/app/payload_spec/parser_spec.rb @@ -7,6 +7,16 @@ describe "Rack::App#payload" do rack_app do + payload do + parser do + accept :json, :www_form_urlencoded + + on "custom/x-yaml" do |io| + YAML.load(io.read) + end + end + end + get "/" do Marshal.dump(payload) #> [{"Foo" => "bar"}] end @@ -14,23 +24,6 @@ describe "Rack::App#payload" do end context 'when custom parser defined' do - rack_app do - - payload do - - parser do - on "custom/x-yaml" do |io| - YAML.load(io.read) - end - end - - end - - get "/" do - Marshal.dump(payload) #> [{"Foo" => "bar"}] - end - - end let(:request_options) do { @@ -45,21 +38,6 @@ describe "Rack::App#payload" do unless IS_OLD_RUBY context 'when payload is a json' do - rack_app do - - payload do - parser do - accept_json - end - end - - get "/" do - Marshal.dump(payload) #> [{"Foo" => "bar"}] - end - - end - - [ "application/json", "application/x-javascript", @@ -113,17 +91,6 @@ describe "Rack::App#payload" do end context 'when content type is form-urlencoded' do - rack_app do - payload do - parser do - accept_www_form_urlencoded - end - end - - get "/" do - Marshal.dump(payload) #> [{"Foo" => "bar"}] - end - end let(:payload_struct){{'hello' => 'world'}} [
feat: simplify payload parser accept interface
rack-app_rack-app
train
941a75e9e66fed1c930c4120a3460a83eaf7ba2f
diff --git a/go/service/main.go b/go/service/main.go index <HASH>..<HASH> 100644 --- a/go/service/main.go +++ b/go/service/main.go @@ -2,8 +2,10 @@ package service import ( "fmt" + "io/ioutil" "net" "os" + "path" "github.com/codegangsta/cli" "github.com/keybase/client/go/libcmdline" @@ -56,6 +58,11 @@ func (d *Service) RunClient() (err error) { func (d *Service) Run() (err error) { G.Service = true + err = d.writeVersionFile() + if err != nil { + return + } + if len(d.chdirTo) != 0 { e_tmp := os.Chdir(d.chdirTo) if e_tmp != nil { @@ -81,6 +88,13 @@ func (d *Service) Run() (err error) { return } +// If the daemon is already running, we need to be able to check what version +// it is, in case the client has been updated. +func (d *Service) writeVersionFile() error { + versionFilePath := path.Join(G.Env.GetCacheDir(), "service.version") + return ioutil.WriteFile(versionFilePath, []byte(libkb.CLIENT_VERSION), 0644) +} + func (d *Service) ReleaseLock() error { return d.lockPid.Close() }
write .cache/keybase/service.version on service startup This provides an easy way for updated clients to find out what version of the service is currently running, so that they can bounce it if need be. Closes <URL>
keybase_client
train
6529dc5f2dc876fa888f43a27a0cab2f2fed78a0
diff --git a/lib/jsdom/level2/html.js b/lib/jsdom/level2/html.js index <HASH>..<HASH> 100644 --- a/lib/jsdom/level2/html.js +++ b/lib/jsdom/level2/html.js @@ -60,11 +60,11 @@ core.resourceLoader = { resolve: function(document, path) { var baseElements = document.getElementsByTagName('base'), baseUrl = document.URL; - + if (baseElements.length > 0) { baseUrl = baseElements.item(0).href; } - + return URL.resolve(baseUrl, path).replace(/^file:\/\//, ''); }, download: function(url, callback) { @@ -101,8 +101,17 @@ core.CharacterData.prototype.__defineSetter__("_nodeValue", function(value) { this._text = value; if (this.ownerDocument && this.parentNode) { var ev = this.ownerDocument.createEvent("MutationEvents") - ev.initMutationEvent("DOMCharacterDataModified", true, false, this, oldValue, newValue, null, null) - this.dispatchEvent(ev) + + ev.initMutationEvent("DOMCharacterDataModified", + true, + false, + this, + oldValue, + value, + null, + null); + + this.dispatchEvent(ev); } }); @@ -391,7 +400,7 @@ core.HTMLDocument.prototype = { get head() { return firstChild(this.documentElement, 'HEAD'); }, - + set head() { /* noop */ }, get body() {
renamed newValue to value (fixing the event that is emitted) and restructured the event init to wrap before <I> cols
jsdom_jsdom
train
40c56a3cdd612a08cd5d05337e8c26151ad4473f
diff --git a/dropwizard-logging/src/main/java/io/dropwizard/logging/FileAppenderFactory.java b/dropwizard-logging/src/main/java/io/dropwizard/logging/FileAppenderFactory.java index <HASH>..<HASH> 100644 --- a/dropwizard-logging/src/main/java/io/dropwizard/logging/FileAppenderFactory.java +++ b/dropwizard-logging/src/main/java/io/dropwizard/logging/FileAppenderFactory.java @@ -13,7 +13,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import io.dropwizard.validation.ValidationMethod; -import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import java.util.TimeZone; @@ -62,7 +61,7 @@ import java.util.TimeZone; * <td>{@code archivedFileCount}</td> * <td>{@code 5}</td> * <td> - * The number of archived files to keep. Must be between {@code 1} and {@code 50}. + * The number of archived files to keep. Must be greater than {@code 0}. * </td> * </tr> * <tr> @@ -93,7 +92,6 @@ public class FileAppenderFactory extends AbstractAppenderFactory { private String archivedLogFilenamePattern; @Min(1) - @Max(50) private int archivedFileCount = 5; @NotNull
Removing maximum limit on archivedFileCount for file logging.
dropwizard_dropwizard
train
9606b6a27f88249ad525caac92b8417ebc8f9051
diff --git a/spec/functional/resource/user/useradd_spec.rb b/spec/functional/resource/user/useradd_spec.rb index <HASH>..<HASH> 100644 --- a/spec/functional/resource/user/useradd_spec.rb +++ b/spec/functional/resource/user/useradd_spec.rb @@ -25,10 +25,22 @@ def resource_for_platform(username, run_context) Chef::Resource.resource_for_node(:user, node).new(username, run_context) end +# ideally we could somehow pass an array of [ ...::Aix, ...::Linux ] to the +# filter, but we have to pick the right one for the O/S. +def user_provider_filter + case ohai[:os] + when "aix" + Chef::Provider::User::Aix + when "linux" + Chef::Provider::User::Linux + end +end + metadata = { :unix_only => true, :requires_root => true, :not_supported_on_mac_osx => true, + :provider => { :user => user_provider_filter }, } describe Chef::Provider::User::Useradd, metadata do
fix the useradd test filters try to document it a bit better so i don't accidentally delete it again.
chef_chef
train
457c98f4c2489fec139df59dcef58e068b104b01
diff --git a/internal/snmp/translate.go b/internal/snmp/translate.go index <HASH>..<HASH> 100644 --- a/internal/snmp/translate.go +++ b/internal/snmp/translate.go @@ -54,11 +54,17 @@ func LoadMibsFromPath(paths []string, log telegraf.Logger) error { } appendPath(mibPath) - folders = append(folders, mibPath) err := filepath.Walk(mibPath, func(path string, info os.FileInfo, err error) error { if info == nil { - return fmt.Errorf("no mibs found") + log.Warnf("No mibs found") + if os.IsNotExist(err) { + log.Warnf("MIB path doesn't exist: %q", mibPath) + } else if err != nil { + return err + } + return nil } + folders = append(folders, mibPath) // symlinks are files so we need to double check if any of them are folders // Will check file vs directory later on if info.Mode()&os.ModeSymlink != 0 { @@ -71,7 +77,7 @@ func LoadMibsFromPath(paths []string, log telegraf.Logger) error { return nil }) if err != nil { - return fmt.Errorf("Filepath could not be walked: %v", err) + return fmt.Errorf("Filepath %q could not be walked: %v", mibPath, err) } for _, folder := range folders { diff --git a/plugins/inputs/snmp/snmp_test.go b/plugins/inputs/snmp/snmp_test.go index <HASH>..<HASH> 100644 --- a/plugins/inputs/snmp/snmp_test.go +++ b/plugins/inputs/snmp/snmp_test.go @@ -1328,3 +1328,10 @@ func TestCanNotParse(t *testing.T) { err := s.Init() require.Error(t, err) } + +func TestMissingMibPath(t *testing.T) { + log := testutil.Logger{} + path := []string{"non-existing-directory"} + err := snmp.LoadMibsFromPath(path, log) + require.NoError(t, err) +}
fix: snmp input plugin errors if mibs folder doesn't exist (#<I>) (#<I>)
influxdata_telegraf
train
be1c0614068db3ebe561b7384a21d5d5107a0905
diff --git a/treeherder/log_parser/tasks.py b/treeherder/log_parser/tasks.py index <HASH>..<HASH> 100644 --- a/treeherder/log_parser/tasks.py +++ b/treeherder/log_parser/tasks.py @@ -74,7 +74,7 @@ def parse_log(project, job_log_url, job_guid, check_errors=False): clean_line = get_mozharness_substring(err['line']) # get a meaningful search term out of the error line search_term = get_error_search_term(clean_line) - bugs = None + bugs = dict(open_recent=[], all_others=[]) # collect open recent and all other bugs suggestions if search_term:
Bug <I> - Return a dict not None if no valid search terms found; r=mdoglio The UI expects bugs to be a dict, rather than None. This change also makes the return value consistent with that when a search term is identified, but the get_bugs_for_search_term() found zero bugs.
mozilla_treeherder
train
0475b23a37d74a32edc8b24a48f8ae08bb80d8ff
diff --git a/tests/test_utils.py b/tests/test_utils.py index <HASH>..<HASH> 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -80,6 +80,15 @@ def test_lru_cache_unlimited_explicit(): assert len(cache.lru) == 100 +def test_lru_cache_iteration_works(): + cache = LRUCache() + count = 0 + for _ in cache: + assert False, 'there should be no elements in the cache' + + assert count == 0 + + def test_catch_warning(): class MyWarning(Warning): pass diff --git a/tinydb/utils.py b/tinydb/utils.py index <HASH>..<HASH> 100644 --- a/tinydb/utils.py +++ b/tinydb/utils.py @@ -43,6 +43,9 @@ class LRUCache: def __getitem__(self, key): return self.get(key) + def __iter__(self): + return iter(self.__cache) + def get(self, key, default=None): value = self.__cache.get(key) if value:
Internal: support iteration over LRUCache correctly Right now LRUCache is iterable due to its __getitem__ method which Python uses with numerical indexes. But due to the current `get()` default value of `None` it will iterate for ever always returning `None`. For now this commit fixes this by making iteration over a LRUCache iterate the underlying dict instead.
msiemens_tinydb
train
32eb941bdf5f81a7935116d6b2f0ac37eedd9baf
diff --git a/src/main/java/org/gitlab/api/models/GitlabMergeRequest.java b/src/main/java/org/gitlab/api/models/GitlabMergeRequest.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/gitlab/api/models/GitlabMergeRequest.java +++ b/src/main/java/org/gitlab/api/models/GitlabMergeRequest.java @@ -49,13 +49,16 @@ public class GitlabMergeRequest { @JsonProperty("created_at") private Date createdAt; - + @JsonProperty("merge_commit_sha") + private String mergeCommitSHA; + @JsonProperty("merge_status") private String mergeStatus; @JsonProperty("web_url") private String webUrl; + @JsonProperty("sha") private String sha; public Integer getId() { @@ -244,9 +247,21 @@ public class GitlabMergeRequest { this.changes = changes; } - public String getMergeStatus() { return mergeStatus; } + public String getMergeCommitSHA() { + return mergeCommitSHA; + } + + public void setMergeCommitSHA(String mergeCommitSHA) { + this.mergeCommitSHA = mergeCommitSHA; + } - public void setMergeStatus(String mergeStatus) { this.mergeStatus = mergeStatus; } + public String getMergeStatus() { + return mergeStatus; + } + + public void setMergeStatus(String mergeStatus) { + this.mergeStatus = mergeStatus; + } public String getWebUrl() { return webUrl;
add missing field mergeCommitSHA and json property SHA (#<I>)
timols_java-gitlab-api
train
232dcf16ede5d8ea29ef6d1cb567b9347af91867
diff --git a/annotypes/_array.py b/annotypes/_array.py index <HASH>..<HASH> 100644 --- a/annotypes/_array.py +++ b/annotypes/_array.py @@ -8,9 +8,6 @@ from ._typing import TYPE_CHECKING, overload, Sequence, TypeVar, Generic, \ NEW_TYPING from ._stackinfo import find_caller_class - - - if TYPE_CHECKING: # pragma: no cover from typing import Union, Type
Added handling for Jython Arrays
dls-controls_annotypes
train
90642a4a815a69a96a028875aebbc03f080ce103
diff --git a/src/main/resources/META-INF/resources/primefaces/tree/tree.js b/src/main/resources/META-INF/resources/primefaces/tree/tree.js index <HASH>..<HASH> 100644 --- a/src/main/resources/META-INF/resources/primefaces/tree/tree.js +++ b/src/main/resources/META-INF/resources/primefaces/tree/tree.js @@ -875,18 +875,22 @@ PrimeFaces.widget.VerticalTree = PrimeFaces.widget.BaseTree.extend({ dragSource.selections = []; dragSource.writeSelections(); - selectedNodes = dropSource.findNodes(dropSource.selections); + if(dropSource.cfg.draggable) + selectedNodes = dropSource.findNodes(dropSource.selections); } else { selectedNodes = event.dragNodes; } dropSource.updateRowKeys(); - dropSource.selections = []; - $.each(selectedNodes, function(i, item) { - dropSource.selections.push(item.attr('data-rowkey')); - }); - dropSource.writeSelections(); + + if(dropSource.cfg.draggable) { + dropSource.selections = []; + $.each(selectedNodes, function(i, item) { + dropSource.selections.push(item.attr('data-rowkey')); + }); + dropSource.writeSelections(); + } }, fireDragDropEvent: function(event) {
Do not update selections if drop target is not draggable
primefaces_primefaces
train
6f71054b10da34d62d96ee4c1dc5d51e25d1796d
diff --git a/jpyutil.py b/jpyutil.py index <HASH>..<HASH> 100644 --- a/jpyutil.py +++ b/jpyutil.py @@ -531,7 +531,9 @@ def write_config_files(out_dir='.', jvm_dll_file=None, install_dir=None, req_java_api_conf=True, - req_py_api_conf=True): + req_py_api_conf=True, + jvm_properties=None, + jvm_options=None): """ Writes the jpy configuration files for Java and/or Python. @@ -566,8 +568,14 @@ def write_config_files(out_dir='.', f.write('jvm_dll = %s\n' % repr(jvm_dll_file)) f.write('jvm_maxmem = None\n') f.write('jvm_classpath = []\n') - f.write('jvm_properties = {}\n') - f.write('jvm_options = []\n') + if jvm_properties is None: + f.write('jvm_properties = {}\n') + else: + f.write('jvm_properties = ' + str(jvm_properties) + '\n') + if jvm_options is None: + f.write('jvm_options = []\n') + else: + f.write('jvm_options = [''' + jvm_options + ''']\n''') logger.info("jpy Python API configuration written to '%s'" % py_api_config_file) except Exception: logger.exception("Error while writing Python API configuration")
Update jpyutil.py Allow jvm_properties and jvm_options to be passed to the method write_config_files.
bcdev_jpy
train
fc0ee3f081822e8461629470590bbe62847b8bff
diff --git a/events.go b/events.go index <HASH>..<HASH> 100644 --- a/events.go +++ b/events.go @@ -37,6 +37,20 @@ func (e Event) ParseMessageId() (string, error) { return headers["message-id"].(string), nil } +func (e Event) ParseEventType() (string, error) { + obj, ok := e["event"] + if !ok { + return "", errors.New("'event' field not found in event") + } + + eventType, ok := obj.(string) + if !ok { + return "", errors.New("'event' field a string") + } + + return eventType, nil +} + // noTime always equals an uninitialized Time structure. // It's used to detect when a time parameter is provided. var noTime time.Time diff --git a/events_test.go b/events_test.go index <HASH>..<HASH> 100644 --- a/events_test.go +++ b/events_test.go @@ -179,6 +179,46 @@ var _ = Describe("Event{}", func() { }) }) }) + + Describe("ParseEventType()", func() { + Context("When 'event' exists and is valid", func() { + It("Should parse the event into string", func() { + event := Event{ + "event": "delivered", + } + eventType, err := event.ParseEventType() + ensure.Nil(t, err) + ensure.Equal(t, eventType, "delivered") + + event = Event{ + "event": "rejected", + } + eventType, err = event.ParseEventType() + ensure.Nil(t, err) + ensure.Equal(t, eventType, "rejected") + }) + }) + Context("When 'event' is missing", func() { + It("Should return error", func() { + event := Event{ + "blah": "", + } + _, err := event.ParseEventType() + ensure.NotNil(t, err) + ensure.DeepEqual(t, err.Error(), "'event' field not found in event") + }) + }) + Context("When 'event' is not a string", func() { + It("Should return error", func() { + event := Event{ + "event": 10, + } + _, err := event.ParseEvent() + ensure.NotNil(t, err) + ensure.DeepEqual(t, err.Error(), "'event' field not a string") + }) + }) + }) }) var _ = Describe("PollEvents()", func() {
Adds ParseEventType to Event
mailgun_mailgun-go
train
1372df2cab4e0341430a8201c78fbb0e247c0c50
diff --git a/agent.go b/agent.go index <HASH>..<HASH> 100644 --- a/agent.go +++ b/agent.go @@ -521,6 +521,23 @@ func getAnnounceFields() (logrus.Fields, error) { }, nil } +// formatFields converts logrus Fields (containing arbitrary types) into a string slice. +func formatFields(fields logrus.Fields) []string { + var results []string + + for k, v := range fields { + value, ok := v.(string) + if !ok { + // convert non-string value into a string + value = fmt.Sprint(v) + } + + results = append(results, fmt.Sprintf("%s=%q", k, value)) + } + + return results +} + // announce logs details of the agents version and capabilities. func announce() error { announceFields, err := getAnnounceFields() @@ -529,17 +546,12 @@ func announce() error { } if os.Getpid() == 1 { - var values []string + fields := formatFields(agentFields) + extraFields := formatFields(announceFields) - for k, v := range agentFields { - values = append(values, fmt.Sprintf("%s=%q", k, v)) - } - - for k, v := range announceFields { - values = append(values, fmt.Sprintf("%s=%q", k, v)) - } + fields = append(fields, extraFields...) - fmt.Printf("announce: %s\n", strings.Join(values, ",")) + fmt.Printf("announce: %s\n", strings.Join(fields, ",")) } else { agentLog.WithFields(announceFields).Info("announce") }
logging: Fix handling of non-string fields when PID 1 Convert all non-string types into strings prior to logging to avoid attempting to display non-string types with the `%q` format verb. Fixes #<I>.
kata-containers_agent
train
1253454167fde9a555f9563a825846f12a44e994
diff --git a/aegea/deploy.py b/aegea/deploy.py index <HASH>..<HASH> 100644 --- a/aegea/deploy.py +++ b/aegea/deploy.py @@ -140,6 +140,7 @@ def grant(args): GitHub repo URL, create and record a deployment key accessible to the IAM role. """ + repo = get_repo(args.repo) try: role = resources.iam.Role(args.iam_role_or_instance) role.load() @@ -153,7 +154,6 @@ def grant(args): iam_group=None, iam_user=None, generate_ssh_key=True)) - repo = get_repo(args.repo) repo.create_key(role.name, secret["ssh_public_key"]) parser = register_parser(grant, parent=deploy_parser)
Error out early on github auth failure
kislyuk_aegea
train
1281274677274fe1d709aded64ffddddd8b3f8e3
diff --git a/pecan/tests/test_util.py b/pecan/tests/test_util.py index <HASH>..<HASH> 100644 --- a/pecan/tests/test_util.py +++ b/pecan/tests/test_util.py @@ -93,3 +93,23 @@ class TestArgSpec(unittest.TestCase): actual = util.getargspec(dec(True)( self.controller.static_index)) assert expected == actual + + def test_nested_cells(self): + + def before(handler): + def deco(f): + def wrapped(*args, **kwargs): + if callable(handler): + handler() + return f(*args, **kwargs) + return wrapped + return deco + + class RootController(object): + @expose() + @before(lambda: True) + def index(self, a, b, c): + return 'Hello, World!' + + argspec = util._cfg(RootController.index)['argspec'] + assert argspec.args == ['self', 'a', 'b', 'c'] diff --git a/pecan/util.py b/pecan/util.py index <HASH>..<HASH> 100644 --- a/pecan/util.py +++ b/pecan/util.py @@ -26,15 +26,26 @@ def getargspec(method): # NOTE(sileht): if the closure is None we cannot look deeper, # so return actual argspec, this occurs when the method # is static for example. - if func_closure is None: + if not func_closure: return argspec - closure = next( - ( - c for c in func_closure if six.callable(c.cell_contents) - ), - None + closure = None + # In the case of deeply nested decorators (with arguments), it's possible + # that there are several callables in scope; Take a best guess and go + # with the one that looks most like a pecan controller function + # ('self' is the first argument) + func_closure = filter( + lambda c: six.callable(c.cell_contents), + func_closure ) + func_closure = sorted( + func_closure, + key=lambda c: 'self' in c.cell_contents.__code__.co_varnames, + reverse=True + ) + + closure = func_closure[0] + method = closure.cell_contents return getargspec(method)
Fix an argspec detection edge case in `pecan.util.getargspec` In lieu of decorators that responsibly duplicate the argspec of underlying functions, pecan must make a best guess. When multiple nested decorators are discovered, the __closure__ of the wrapped function is searched for callables (one of which likely represents the underlying wrapped function - the controller method itself). This change tweaks the algorithm a bit further to place higher match precedence on in-scope functions that have `self` as a first argument.
pecan_pecan
train
d2621d6c2fab07d8d4af9ad0bbd11494aa2cf95f
diff --git a/src/Application.php b/src/Application.php index <HASH>..<HASH> 100644 --- a/src/Application.php +++ b/src/Application.php @@ -121,6 +121,11 @@ class Application } } + public function __isset($name) + { + return property_exists($this, $name) || isset($this->components[$name]); + } + public function run() { $this->router->handleRequest($this->request); diff --git a/src/Base/Component.php b/src/Base/Component.php index <HASH>..<HASH> 100644 --- a/src/Base/Component.php +++ b/src/Base/Component.php @@ -13,4 +13,9 @@ abstract class Component implements Buildable { return new static($config); } + + public function __isset($name) + { + return property_exists($this, $name); + } }
add method __isset to accomodate twig access property
kenphp_ken
train
f14d80ec0bdb2d339472c1e019ebe9a8613f4741
diff --git a/config/web.php b/config/web.php index <HASH>..<HASH> 100644 --- a/config/web.php +++ b/config/web.php @@ -12,6 +12,7 @@ $config = [ ], 'user' => [ 'identityClass' => 'app\models\User', + 'enableAutoLogin' => true, ], 'errorHandler' => [ 'errorAction' => 'site/error',
Fixes #<I>: Added `enableAutoLogin` to basic and advanced application templates so "remember me" now works properly
yiisoft_yii2-app-basic
train
384e180fd6247aa66bc4aca891614c2fb08013a0
diff --git a/lib/git/diff/parser/patches.rb b/lib/git/diff/parser/patches.rb index <HASH>..<HASH> 100644 --- a/lib/git/diff/parser/patches.rb +++ b/lib/git/diff/parser/patches.rb @@ -2,13 +2,13 @@ require 'delegate' module Git module Diff module Parser - class Patches < DelegateClass(Set) + class Patches < DelegateClass(Array) def self.[](*ary) new(ary) end def initialize(*args) - super Set.new(*args) + super Array.new(*args) end end end
feat(patches): use array instead of set
packsaddle_ruby-git_diff_parser
train
1dda4ce3a1ac05a64180076256545bacf1f074bb
diff --git a/salesforce/backend/base.py b/salesforce/backend/base.py index <HASH>..<HASH> 100644 --- a/salesforce/backend/base.py +++ b/salesforce/backend/base.py @@ -25,7 +25,7 @@ from salesforce.backend.validation import DatabaseValidation from salesforce.backend.operations import DatabaseOperations from salesforce.backend.introspection import DatabaseIntrospection from salesforce.backend.schema import DatabaseSchemaEditor -from salesforce.backend.driver import IntegrityError, DatabaseError, SalesforceError # NOQA - TODO +from salesforce.backend.driver import IntegrityError, DatabaseError, SalesforceError, beatbox # NOQA - TODO from salesforce.backend import driver as Database, get_max_retries # from django.db.backends.signals import connection_created from salesforce import DJANGO_111_PLUS @@ -49,7 +49,8 @@ class DatabaseFeatures(BaseDatabaseFeatures): """ allows_group_by_pk = True supports_unspecified_pk = False - can_return_id_from_insert = False + can_return_id_from_insert = True + can_return_ids_from_bulk_insert = beatbox is not None has_bulk_insert = True # TODO If the following would be True, it requires a good relation name resolution supports_select_related = False diff --git a/salesforce/backend/compiler.py b/salesforce/backend/compiler.py index <HASH>..<HASH> 100644 --- a/salesforce/backend/compiler.py +++ b/salesforce/backend/compiler.py @@ -410,15 +410,28 @@ class SalesforceWhereNode(where.WhereNode): class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler): def execute_sql(self, return_id=False): - assert not (return_id and len(self.query.objs) != 1) + # copied from Django 1.11, except one line patch + assert not ( + return_id and len(self.query.objs) != 1 and + not self.connection.features.can_return_ids_from_bulk_insert + ) self.return_id = return_id - cursor = self.connection.cursor(query=self.query) - for sql, params in self.as_sql(): - cursor.execute(sql, params) - if not return_id: - return - return self.connection.ops.last_insert_id(cursor, - self.query.model._meta.db_table, self.query.model._meta.pk.column) + with self.connection.cursor() as cursor: + # this line is the adde patch: + cursor.query = self.query + for sql, params in self.as_sql(): + cursor.execute(sql, params) + if not (return_id and cursor): + return + if self.connection.features.can_return_ids_from_bulk_insert and len(self.query.objs) > 1: + return self.connection.ops.fetch_returned_insert_ids(cursor) + if self.connection.features.can_return_id_from_insert: + assert len(self.query.objs) == 1 + return self.connection.ops.fetch_returned_insert_id(cursor) + return self.connection.ops.last_insert_id( + cursor, self.query.get_meta().db_table, self.query.get_meta().pk.column + ) + return super(SQLInsertCompiler, self).execute_sql(return_id) class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler): diff --git a/salesforce/backend/operations.py b/salesforce/backend/operations.py index <HASH>..<HASH> 100644 --- a/salesforce/backend/operations.py +++ b/salesforce/backend/operations.py @@ -53,6 +53,12 @@ class DatabaseOperations(BaseDatabaseOperations): def last_insert_id(self, cursor, db_table, db_column): return cursor.lastrowid + def fetch_returned_insert_id(self, cursor): + return cursor.lastrowid + + def fetch_returned_insert_ids(self, cursor): + return cursor.lastrowid + def adapt_datefield_value(self, value): return value @@ -80,6 +86,10 @@ class DatabaseOperations(BaseDatabaseOperations): items_sql = "(%s)" % ", ".join(["%s"] * len(fields)) return "VALUES " + ", ".join([items_sql] * num_values) + def return_insert_id(self): + return "", () + + @deconstructible class DefaultedOnCreate(object): """ diff --git a/salesforce/tests/test_integration.py b/salesforce/tests/test_integration.py index <HASH>..<HASH> 100644 --- a/salesforce/tests/test_integration.py +++ b/salesforce/tests/test_integration.py @@ -438,6 +438,9 @@ class BasicSOQLRoTest(TestCase): request_count_0 = salesforce.backend.driver.request_count ret = Contact.objects.bulk_create(objects) + if DJANGO_110_PLUS: + # test that can return ids from bulk_create + self.assertTrue(ret and all(x.id for x in ret)) request_count_1 = salesforce.backend.driver.request_count self.assertEqual(request_count_1, request_count_0 + 1)
Support for can_return_ids_from_bulk_insert with Django <I>+
django-salesforce_django-salesforce
train
85412e9e14ccc65bcefec04bd791b27640744897
diff --git a/tests/WebDriverTest.php b/tests/WebDriverTest.php index <HASH>..<HASH> 100644 --- a/tests/WebDriverTest.php +++ b/tests/WebDriverTest.php @@ -2,11 +2,11 @@ namespace Tests; -use BotMan\BotMan\Messages\Attachments\Image; use Mockery as m; use BotMan\BotMan\Http\Curl; use PHPUnit_Framework_TestCase; use BotMan\Drivers\Web\WebDriver; +use BotMan\BotMan\Messages\Attachments\Image; use BotMan\BotMan\Messages\Outgoing\Question; use Symfony\Component\HttpFoundation\Request; use BotMan\Drivers\Facebook\Extensions\Element;
Apply fixes from StyleCI (#2)
botman_driver-web
train
ad96b0e4e75820966ecb5e2fc51ecbacf0fb9f0a
diff --git a/addon/lib/tween.js b/addon/lib/tween.js index <HASH>..<HASH> 100644 --- a/addon/lib/tween.js +++ b/addon/lib/tween.js @@ -52,7 +52,7 @@ export default function tween($element, properties = {}, options = {}, label = ' } function normalizeOptions(properties, options) { - const opts = Object.assign({}, TWEEN_DEFAULTS, options); + const opts = Ember.assign({}, TWEEN_DEFAULTS, options); normalizeCSS(opts, properties); opts.duration = opts.duration / 1000;
Object.assign is not a function Support for `Object.assign` is too limited now.
runspired_liquid-fire-tweenlite
train
f6892ae82074d0cc42a04f443f206b887656ba81
diff --git a/cherrypy/test/helper.py b/cherrypy/test/helper.py index <HASH>..<HASH> 100644 --- a/cherrypy/test/helper.py +++ b/cherrypy/test/helper.py @@ -124,7 +124,10 @@ def testmain(conf=None, *args, **kwargs): if conf is None: conf = {} setConfig(conf) - cherrypy.server.start_with_callback(_test_main_thread, *args, **kwargs) + try: + cherrypy.server.start_with_callback(_test_main_thread, *args, **kwargs) + except KeyboardInterrupt: + cherrypy.server.stop() def _test_main_thread(): try:
When individual tests were run with --help or -h as command line arguments a KeyboardInterrupt exception was raised and the test would hang until another KeyboardInterrupt was raised (i.e., by hitting Ctrl-C).
cherrypy_cheroot
train
ac0150e07833809cbb5a14bb81ac82f75417d802
diff --git a/edisgo/data/import_data.py b/edisgo/data/import_data.py index <HASH>..<HASH> 100644 --- a/edisgo/data/import_data.py +++ b/edisgo/data/import_data.py @@ -563,6 +563,7 @@ def _attach_aggregated(network, grid, aggregated, ding0_grid): v_level=v_level, subtype=subtype), type=aggr_line_type, + kind='cable', length=.5, grid=grid) }
add line kind of agg. gen during import
openego_eDisGo
train
b9df2313a092a58cec74db2f5e61a9e7957abaa1
diff --git a/pynYNAB/schema/Client.py b/pynYNAB/schema/Client.py index <HASH>..<HASH> 100644 --- a/pynYNAB/schema/Client.py +++ b/pynYNAB/schema/Client.py @@ -121,7 +121,6 @@ class nYnabClient_(Base): self.budget.be_accounts.append(account) self.budget.be_payees.append(payee) self.budget.be_transactions.append(transaction) - pass @operation(1) def delete_account(self, account): diff --git a/pynYNAB/schema/Entity.py b/pynYNAB/schema/Entity.py index <HASH>..<HASH> 100644 --- a/pynYNAB/schema/Entity.py +++ b/pynYNAB/schema/Entity.py @@ -78,6 +78,17 @@ class UnknowEntityFieldValueError(Exception): pass +def listfields(instance): + relations = inspect(instance.__class__).relationships + return {k: relations[k].mapper.class_ for k in relations.keys() if + relations[k].direction == ONETOMANY or relations[k].direction == MANYTOMANY} + + +def scalarfields(instance): + scalarcolumns = inspect(instance.__class__).columns + return {k: scalarcolumns[k].type.__class__ for k in scalarcolumns.keys() if k != 'parent_id' and k != 'knowledge_id'} + + class BaseModel(object): id = Column(String, primary_key=True, default=KeyGenerator.generateuuid) is_tombstone = Column(Boolean, default=False) @@ -86,17 +97,11 @@ class BaseModel(object): def __tablename__(cls): return cls.__name__.lower() - @property - def listfields(self): - relations = inspect(self.__class__).relationships - return {k: relations[k].mapper.class_ for k in relations.keys() if - relations[k].direction == ONETOMANY or relations[k].direction == MANYTOMANY} - - @property - def scalarfields(self): - scalarcolumns = self.__table__.columns - return {k: scalarcolumns[k].type.__class__ for k in scalarcolumns.keys() if k != 'parent_id' and k != 'knowledge_id'} - + def __new__(cls, *args, **kwargs): + new_obj = object.__new__(cls, *args, **kwargs) + setattr(new_obj, 'listfields', listfields(new_obj)) + setattr(new_obj, 'scalarfields', scalarfields(new_obj)) + return new_obj def configure_listener(mapper, class_): @@ -162,7 +167,6 @@ re_uuid = re.compile('[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{1 re_date = re.compile(r'\d{4}[\/ .-]\d{2}[\/.-]\d{2}') - def date_from_api(columntype, string): result = re_date.search(string) if result is not None: diff --git a/pynYNAB/utils.py b/pynYNAB/utils.py index <HASH>..<HASH> 100644 --- a/pynYNAB/utils.py +++ b/pynYNAB/utils.py @@ -21,6 +21,7 @@ def rate_limited(maxpersecond): elapsed = time.clock() - lastTimeCalled[0] leftToWait = minInterval - elapsed if leftToWait > 0: + print('rate limiting, waiting %g...' % leftToWait) time.sleep(leftToWait) ret = func(*args, **kargs) lastTimeCalled[0] = time.clock()
lsitfields scalarfields added to instance once in __new__
rienafairefr_pynYNAB
train
e2aa84763760517f45f33aafceb85a2b0036c963
diff --git a/test_ulid2.py b/test_ulid2.py index <HASH>..<HASH> 100644 --- a/test_ulid2.py +++ b/test_ulid2.py @@ -20,6 +20,9 @@ def test_ulid_time_monotonic(generator): '2013-12-01 10:10:10', '2016-07-07 14:12:10', '2016-07-07 14:13:10', + '2016-07-07 14:13:10', + '2016-07-07 14:13:10', + '2016-07-07 14:13:10', ]: dt = datetime.datetime.strptime(time, '%Y-%m-%d %H:%M:%S') ulid = generator(dt) diff --git a/ulid2.py b/ulid2.py index <HASH>..<HASH> 100644 --- a/ulid2.py +++ b/ulid2.py @@ -170,6 +170,8 @@ def get_ulid_time(ulid): return datetime.datetime.utcfromtimestamp(timestamp) +_last_entropy = None +_last_timestamp = None def generate_binary_ulid(timestamp=None): """ Generate the bytes for an ULID. @@ -180,6 +182,7 @@ def generate_binary_ulid(timestamp=None): :return: Bytestring of length 16. :rtype: bytes """ + global _last_entropy, _last_timestamp if timestamp is None: timestamp = time.time() elif isinstance(timestamp, datetime.datetime): @@ -189,7 +192,13 @@ def generate_binary_ulid(timestamp=None): ts_bytes = _to_binary( (ts >> shift) & 0xFF for shift in (40, 32, 24, 16, 8, 0) ) - return ts_bytes + os.urandom(10) + entropy = os.urandom(10) + if _last_timestamp == ts and _last_entropy is not None: + while entropy < _last_entropy: + entropy = os.urandom(10) + _last_entropy = entropy + _last_timestamp = ts + return ts_bytes + entropy def generate_ulid_as_uuid(timestamp=None):
first pass at monotonic randomness for ULIDs in the same millisecond
valohai_ulid2
train