hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
3cc205bc1a198c6789401609fc51ba5b166ed89b
diff --git a/kernel.py b/kernel.py index <HASH>..<HASH> 100755 --- a/kernel.py +++ b/kernel.py @@ -464,7 +464,7 @@ class Kernel: ''' if not out_filename: if self._filename: - out_filename = os.path.splitext(self._filename)[0] + out_filename = os.path.abspath(os.path.splitext(self._filename)[0]) else: out_filename = tempfile.mkstemp()
fixed path in Kernel.assemble
RRZE-HPC_kerncraft
train
9d6418aa4ba49c531dfd0b8940eba32c605c67f0
diff --git a/grails-datastore-gorm-hibernate5/src/main/groovy/org/grails/orm/hibernate/cfg/GrailsDomainBinder.java b/grails-datastore-gorm-hibernate5/src/main/groovy/org/grails/orm/hibernate/cfg/GrailsDomainBinder.java index <HASH>..<HASH> 100644 --- a/grails-datastore-gorm-hibernate5/src/main/groovy/org/grails/orm/hibernate/cfg/GrailsDomainBinder.java +++ b/grails-datastore-gorm-hibernate5/src/main/groovy/org/grails/orm/hibernate/cfg/GrailsDomainBinder.java @@ -3460,13 +3460,13 @@ public class GrailsDomainBinder implements MetadataContributor { protected final GrailsDomainBinder binder; protected final MetadataBuildingContext buildingContext; - protected static CollectionType SET; - protected static CollectionType LIST; - protected static CollectionType BAG; - protected static CollectionType MAP; - protected static boolean initialized; + protected CollectionType SET; + protected CollectionType LIST; + protected CollectionType BAG; + protected CollectionType MAP; + protected boolean initialized; - protected static final Map<Class<?>, CollectionType> INSTANCES = new HashMap<>(); + protected final Map<Class<?>, CollectionType> INSTANCES = new HashMap<>(); public abstract Collection create(ToMany property, PersistentClass owner, String path, InFlightMetadataCollector mappings, String sessionFactoryBeanName) throws MappingException;
fix for #<I> (#<I>)
grails_gorm-hibernate5
train
b30ee65e77ed70337f2f792af82656b966d07b89
diff --git a/lib/rtlize/rtlizer.rb b/lib/rtlize/rtlizer.rb index <HASH>..<HASH> 100644 --- a/lib/rtlize/rtlizer.rb +++ b/lib/rtlize/rtlizer.rb @@ -72,14 +72,14 @@ module Rtlize # selector.gsub!(/\/\*!= end\(no-rtl\) \*\//, '') end - selector + '{' + self.transform_declarations(declarations) + '}' + selector + '{' + self.transform_declarations(declarations, no_invert) + '}' else rule end end end - def transform_declarations(declarations) + def transform_declarations(declarations, no_invert = false) declarations.split(/;(?!base64)/).map do |decl| m = decl.match(/([^:]+):(.+)$/)
fix: no_invert was not working properly.
maljub01_RTLize
train
fe2cc2f8954cd460ae6ac1ed4296384e80d7b873
diff --git a/test/DebuggerAgent.js b/test/DebuggerAgent.js index <HASH>..<HASH> 100644 --- a/test/DebuggerAgent.js +++ b/test/DebuggerAgent.js @@ -189,10 +189,10 @@ describe('DebuggerAgent', function() { }); }); - describe('canGetStringValuesLargerThan80Chars', function() { + describe('evaluateOnCallFrame', function() { before(setupDebugScenario); - it('returns large String values of 10000', function(done) { + it('truncates String values at 10,000 characters', function(done) { var testExpression = "Array(10000).join('a');"; var expectedValue = Array(10000).join('a'); @@ -223,10 +223,10 @@ describe('DebuggerAgent', function() { } }); - describe('canResumeScriptExecutionWithoutError', function() { + describe('resume()', function() { before(setupDebugScenario); - it('resumes without error', function(done) { + it('does not throw an error', function(done) { expect(function () { agent.resume(); }) .to.not.throw();
test: improved describe/it names Fix few test names to improve the output of `mocha -R spec`.
node-inspector_node-inspector
train
d80a0693902d3832bbf36a46c91c77b373584026
diff --git a/lib/DnsError.js b/lib/DnsError.js index <HASH>..<HASH> 100644 --- a/lib/DnsError.js +++ b/lib/DnsError.js @@ -7,17 +7,10 @@ var DnsError = module.exports = createError({ name: 'DnsError', preprocess: function (err) { if (!(err instanceof DnsError)) { - if (err && err.code) { - var errorName; - if (DnsError.hasOwnProperty(err.code)) { - errorName = err.code; - } else { - errorName = 'NotDnsError'; - } - return new DnsError[errorName](err); - } else { - return new DnsError.NotDnsError(err); + if (err && err.code && DnsError.hasOwnProperty(err.code)) { + return new DnsError[err.code](err); } + // else return generic DnsError } } }); @@ -32,7 +25,8 @@ DnsError.supports = function (errorOrErrorCode) { } }; -function createDnsError(errorCode) { +// create a new DNS error for each error code +Object.keys(dnsErrorCodesMap).forEach(function (errorCode) { var statusCode = dnsErrorCodesMap[errorCode] || 'Unknown'; var options = _.defaults({ name: errorCode, @@ -40,16 +34,9 @@ function createDnsError(errorCode) { statusCode: statusCode, status: statusCode }, _.omit(httpErrors(statusCode), 'message')); - DnsError[errorCode] = createError(options, DnsError); - return DnsError[errorCode]; -} - -// create an Unknown error sentinel -DnsError.NotDnsError = createDnsError('NotDnsError'); + DnsError[errorCode] = createError(options, DnsError); +}); // For backwards compatibility: DnsError.DnsError = DnsError; - -// create a new DNS error for each error code -Object.keys(dnsErrorCodesMap).forEach(createDnsError); diff --git a/test/DnsError.js b/test/DnsError.js index <HASH>..<HASH> 100644 --- a/test/DnsError.js +++ b/test/DnsError.js @@ -40,14 +40,11 @@ describe('DnsError', function () { }); }); - it('will return unknown error if it was not mapped', function () { + it('will return a generic DnsError if it was not mapped', function () { var err = new Error(); var dnsError = new DnsError(err); - expect(dnsError, 'to equal', new DnsError.NotDnsError()); - - // has named errorCode property - expect(dnsError.NotDnsError, 'to be true'); + expect(dnsError, 'to equal', new DnsError()); }); it('will not alter the original error', function () {
Drop the NotDnsError class, use DnsError.supports to check before instantiating instead.
One-com_node-dnserrors
train
5e028c2b5196589ae7e19bd5f956173729358841
diff --git a/org.openbel.framework.common/src/main/java/org/openbel/framework/common/record/StringColumn.java b/org.openbel.framework.common/src/main/java/org/openbel/framework/common/record/StringColumn.java index <HASH>..<HASH> 100644 --- a/org.openbel.framework.common/src/main/java/org/openbel/framework/common/record/StringColumn.java +++ b/org.openbel.framework.common/src/main/java/org/openbel/framework/common/record/StringColumn.java @@ -37,11 +37,9 @@ package org.openbel.framework.common.record; import static java.lang.System.arraycopy; import static java.util.Arrays.fill; -import static java.util.regex.Pattern.compile; import static org.openbel.framework.common.Strings.UTF_8; import java.io.UnsupportedEncodingException; -import java.util.regex.Pattern; /** * Represents a {@link Column} of type {@link String} where the length of the @@ -57,10 +55,8 @@ import java.util.regex.Pattern; */ public class StringColumn extends Column<String> { static final byte space = 32; - static final Pattern pattern; static final byte[] nullValue; static { - pattern = compile("[ ]*$"); nullValue = new byte[space]; fill(nullValue, space); nullValue[0] = (byte) 0; @@ -111,7 +107,7 @@ public class StringColumn extends Column<String> { protected String decodeData(byte[] buffer) { // buffer is guaranteed non-null and proper length String ret = new String(buffer); - ret = pattern.matcher(ret).replaceAll(""); + if (ret.trim().length() == 0) return ""; return ret; }
improved StringColumn.decodeData performance decodeData was noticably slow using regex pattern matching switched to checking the length of the trim() function and returning the "" (empty string) if 0 over <I> strings the run-time went from <I> to <I> milliseconds
OpenBEL_openbel-framework
train
6790999f806353baa43576579812f54742fbab75
diff --git a/alot/message.py b/alot/message.py index <HASH>..<HASH> 100644 --- a/alot/message.py +++ b/alot/message.py @@ -500,21 +500,27 @@ class Envelope(object): if self.sent_time: self.modified_since_sent = True - def attach(self, path, filename=None, ctype=None): + def attach(self, attachment, filename=None, ctype=None): """ attach a file - :param path: (globable) path of the file(s) to attach. - :type path: str + :param attachment: File to attach, given as :class:`Attachment` object + or (globable) path to the file(s). + :type attachment: :class:`Attachment` or str :param filename: filename to use in content-disposition. Will be ignored if `path` matches multiple files :param ctype: force content-type to be used for this attachment :type ctype: str """ - path = os.path.expanduser(path) - part = helper.mimewrap(path, filename, ctype) - self.attachments.append(part) + if isinstance(attachment, Attachment): + self.attachments.append(attachment) + elif isinstance(attachment, str): + path = os.path.expanduser(attachment) + part = helper.mimewrap(path, filename, ctype) + self.attachments.append(Attachment(part)) + else: + raise TypeError('attach accepts an Attachment or str') if self.sent_time: self.modified_since_sent = True @@ -534,7 +540,7 @@ class Envelope(object): for v in vlist: msg[k] = encode_header(k, v) for a in self.attachments: - msg.attach(a) + msg.attach(a.get_mime_representation()) return msg def parse_template(self, tmp, reset=False):
Envelope.attachments: list of Attachment store Attachment objects instead of raw email parts and use accordingly in construct_mail. Envelope.attach now accepts path strings as well as Attachment objects.
pazz_alot
train
0a39449694cf874c92a04d0b634d2dfe0627ac48
diff --git a/spec/api-browser-window-spec.js b/spec/api-browser-window-spec.js index <HASH>..<HASH> 100644 --- a/spec/api-browser-window-spec.js +++ b/spec/api-browser-window-spec.js @@ -153,6 +153,14 @@ describe('browser-window module', function () { }) w.loadURL('file://' + path.join(fixtures, 'api', 'did-fail-load-iframe.html')) }) + + it('does not crash in did-fail-provisional-load handler', function (done) { + w.webContents.once('did-fail-provisional-load', function () { + w.loadURL('http://somewhere-that-does-not.exist/') + done() + }) + w.loadURL('http://somewhere-that-does-not.exist/') + }) }) describe('BrowserWindow.show()', function () {
spec: loadUrl should not crash in did-fail-provisional-load handler
electron_electron
train
9e375e7f37ec5342357a421157774aa21a470aa5
diff --git a/server/sonar-process/src/main/java/org/sonar/process/ProcessEntryPoint.java b/server/sonar-process/src/main/java/org/sonar/process/ProcessEntryPoint.java index <HASH>..<HASH> 100644 --- a/server/sonar-process/src/main/java/org/sonar/process/ProcessEntryPoint.java +++ b/server/sonar-process/src/main/java/org/sonar/process/ProcessEntryPoint.java @@ -78,7 +78,7 @@ public class ProcessEntryPoint implements Stoppable { boolean ready = false; while (!ready) { ready = monitored.isReady(); - Thread.sleep(200L); + Thread.sleep(20L); } // notify monitor that process is ready
Faster sonar-process monitor to detect when process is started
SonarSource_sonarqube
train
ba6f2debb5e032dfe0d15996e827e77bd7aa454d
diff --git a/src/parsers/date.js b/src/parsers/date.js index <HASH>..<HASH> 100644 --- a/src/parsers/date.js +++ b/src/parsers/date.js @@ -12,12 +12,19 @@ module.exports.getPublishedAtDateFromSubtitle = function(subtitle) { return subDays(new Date(), 1); } - const dateRaw = _.head(subtitle.match(dateRegex)); - const dateWithDayFirst = swapDayAndMonthInDate(dateRaw); + const dateString = getDateStringFromSubtitle(subtitle); - return parse(dateWithDayFirst); + return parseDateString(dateString); }; +function getDateStringFromSubtitle(subtitle) { + return _.head(subtitle.match(dateRegex)); +} + +function parseDateString(dateString) { + return parse(swapDayAndMonthInDate(dateString)); +} + function swapDayAndMonthInDate(date) { return date.replace(/(\d+)-(\d+)-(\d+)/, '$2-$1-$3'); }
refactor: extract functions further from date parser
tneudevteam_tneu-news
train
93546c5070d6b29fe4aa0f82cffe7a7e49f763f2
diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index <HASH>..<HASH> 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -79,18 +79,15 @@ def fake_file(fake_dataset, encoding, tmp_path): @pytest.fixture -def fake_files(fake_dataset, encoding): +def fake_files(fake_dataset, encoding, tmp_path): """Write the same fake dataset into two different files.""" filenames = [ - "CPPin20140101001500305SVMSG01MD.nc", - "CPPin20140101003000305SVMSG01MD.nc", + tmp_path / "CPPin20140101001500305SVMSG01MD.nc", + tmp_path / "CPPin20140101003000305SVMSG01MD.nc", ] for filename in filenames: fake_dataset.to_netcdf(filename, encoding=encoding) yield filenames - # Cleanup executed after test - for filename in filenames: - os.unlink(filename) @pytest.fixture
Use tmp_path fixture in another test
pytroll_satpy
train
1b230eb59842a3e8080ab25fd445638a52929080
diff --git a/lib/node.js b/lib/node.js index <HASH>..<HASH> 100644 --- a/lib/node.js +++ b/lib/node.js @@ -161,7 +161,7 @@ Node.prototype = { if (!Array.isArray(this.content)) return; this.content.splice(index, 1); - this.indexHasChanged[0] = 1; + if (this.indexHasChanged) this.indexHasChanged[0] = 1; }, toJson: function() { diff --git a/lib/parse.js b/lib/parse.js index <HASH>..<HASH> 100644 --- a/lib/parse.js +++ b/lib/parse.js @@ -36,12 +36,22 @@ function parse(css, options) { try { var ast = parse(tokens, rule, needInfo); - ast.traverseByType = traverseByType.bind(ast); - ast.traverseByTypes = traverseByTypes.bind(ast); } catch (e) { throw new ParsingError(e, css); } + Object.defineProperty(ast, 'traverseByType', { + enumerable: false, + configurable: false, + writable: false, + value: traverseByType.bind(ast) + }); + Object.defineProperty(ast, 'traverseByTypes', { + enumerable: false, + configurable: false, + writable: false, + value: traverseByTypes.bind(ast) + }); return ast; } @@ -58,12 +68,12 @@ function buildIndex(ast, index, indexHasChanged) { i: i }); - buildIndex(node, index); + buildIndex(node, index, indexHasChanged); } } function traverseByType(type, callback) { - if (!this.index || this.indexHasChanged && this.indexHasChanged[0]) { + if (!this.index) { this.index = {stylesheet: [this]}; this.indexHasChanged = [0]; buildIndex(this, this.index, this.indexHasChanged); @@ -75,6 +85,15 @@ function traverseByType(type, callback) { if (!nodes) return; for (var i = 0, l = nodes.length; i < l; i++) { + if (this.indexHasChanged[0]) { + this.index = {stylesheet: [this]}; + this.indexHasChanged = [0]; + buildIndex(this, this.index, this.indexHasChanged); + nodes = this.index[type]; + i += nodes.length - l; + l = nodes.length; + } + var node = nodes[i]; breakLoop = callback(node.node, node.i, node.parent); if (breakLoop === null) break;
[api, node] Update `traverseBy...` methods
tonyganch_gonzales-pe
train
5ef3c5e9a96fd5761342f4f8277d49ecdd5fce96
diff --git a/observe_sequence.js b/observe_sequence.js index <HASH>..<HASH> 100644 --- a/observe_sequence.js +++ b/observe_sequence.js @@ -4,12 +4,14 @@ module.exports = function(Meteor) { var Tracker = Meteor.Tracker; var LocalCollection = Meteor.LocalCollection; var ReactiveVar = Meteor.ReactiveVar; + var EJSON = Meteor.EJSON; var ObserveSequence; //Copied from minimongo package to remove dependency, as this code should go into a helper package var LocalCollection = {}; var Package = {minimongo: {LocalCollection: LocalCollection}}; + // NB: used by livedata LocalCollection._idStringify = function (id) { if (id instanceof LocalCollection._ObjectID) {
remove minimongo dependcies to reduce size
eface2face_meteor-observe-sequence
train
4045c1791ebb4cb41705f5d411cbd0c47d17c327
diff --git a/builtin/credential/github/path_login.go b/builtin/credential/github/path_login.go index <HASH>..<HASH> 100644 --- a/builtin/credential/github/path_login.go +++ b/builtin/credential/github/path_login.go @@ -1,6 +1,7 @@ package github import ( + "context" "fmt" "net/url" "strings" @@ -126,7 +127,7 @@ func (b *backend) verifyCredentials(req *logical.Request, token string) (*verify } // Get the user - user, _, err := client.Users.Get("") + user, _, err := client.Users.Get(context.Background(), "") if err != nil { return nil, nil, err } @@ -140,7 +141,7 @@ func (b *backend) verifyCredentials(req *logical.Request, token string) (*verify var allOrgs []*github.Organization for { - orgs, resp, err := client.Organizations.List("", orgOpt) + orgs, resp, err := client.Organizations.List(context.Background(), "", orgOpt) if err != nil { return nil, nil, err } @@ -170,7 +171,7 @@ func (b *backend) verifyCredentials(req *logical.Request, token string) (*verify var allTeams []*github.Team for { - teams, resp, err := client.Organizations.ListUserTeams(teamOpt) + teams, resp, err := client.Organizations.ListUserTeams(context.Background(), teamOpt) if err != nil { return nil, nil, err }
Fix github compile breakage after dep upgrade
hashicorp_vault
train
1df3981476337fa6a03b572ca5b3f9c0ac7f03d2
diff --git a/centinel/experiments/baseline.py b/centinel/experiments/baseline.py index <HASH>..<HASH> 100644 --- a/centinel/experiments/baseline.py +++ b/centinel/experiments/baseline.py @@ -15,6 +15,7 @@ import csv import logging import os +from random import shuffle import time import urlparse @@ -176,6 +177,7 @@ class BaselineExperiment(Experiment): # the actual tests are run concurrently here + shuffle(http_inputs) start = time.time() logging.info("Running HTTP GET requests...") result["http"] = http.get_requests_batch(http_inputs) @@ -183,7 +185,7 @@ class BaselineExperiment(Experiment): logging.info("HTTP GET requests took " "%d seconds for %d URLs." % (elapsed, len(http_inputs))) - + shuffle(tls_inputs) start = time.time() logging.info("Running TLS certificate requests...") result["tls"] = tls.get_fingerprint_batch(tls_inputs) @@ -191,7 +193,7 @@ class BaselineExperiment(Experiment): logging.info("TLS certificate requests took " "%d seconds for %d domains." % (elapsed, len(tls_inputs))) - + shuffle(dns_inputs) start = time.time() logging.info("Running DNS requests...") result["dns"] = dnslib.lookup_domains(dns_inputs) @@ -201,6 +203,7 @@ class BaselineExperiment(Experiment): len(dns_inputs))) for method in self.traceroute_methods: + shuffle(traceroute_inputs) start = time.time() logging.info("Running %s traceroutes..." % (method.upper()) ) result["traceroute.%s" % (method) ] = (
shuffle input lists to be marginally less detectable
iclab_centinel
train
c3cc8c106ffa19c0d5e91c5a2cbae39da2348c69
diff --git a/WeaveServices/src/weave/servlets/AdminService.java b/WeaveServices/src/weave/servlets/AdminService.java index <HASH>..<HASH> 100644 --- a/WeaveServices/src/weave/servlets/AdminService.java +++ b/WeaveServices/src/weave/servlets/AdminService.java @@ -1267,7 +1267,7 @@ public class AdminService extends GenericServlet // using modified driver from // http://kato.iki.fi/sw/db/postgresql/jdbc/copy/ ((PGConnection) conn).getCopyAPI().copyIntoDB( - String.format("COPY %s FROM STDIN WITH DELIMITER ','", quotedTable), + String.format("COPY %s FROM STDIN WITH CSV HEADER", quotedTable), new FileInputStream(formatted_CSV_path)); }
Fixed bug with postgres support. CSV import syntax was incorrect because it was not skipping the header. Change-Id: I0d8bb4eeffc<I>d6ad<I>cadbbf<I>a4ad<I>aef
WeaveTeam_WeaveJS
train
0cab82961cbba512defffbde8b8c2134da0913ad
diff --git a/lib/inspectors/res.js b/lib/inspectors/res.js index <HASH>..<HASH> 100644 --- a/lib/inspectors/res.js +++ b/lib/inspectors/res.js @@ -570,7 +570,7 @@ getWriterFile(util.getRuleFile(resRules.resWriteRaw), _res.statusCode)], functio }); } try { - res.writeHead(_res.statusCode, _res.headers); + res.writeHead(_res.statusCode, util.formatHeaders(_res.headers)); _res.trailers && res.addTrailers(_res.trailers); } catch(e) { e._resError = true; diff --git a/lib/util/index.js b/lib/util/index.js index <HASH>..<HASH> 100644 --- a/lib/util/index.js +++ b/lib/util/index.js @@ -1265,3 +1265,21 @@ function mergeRules(req, add, isResRules) { } exports.mergeRules = mergeRules; + +var upperCaseRE = /^([a-z])|-([a-z])/g; +var firstCharToUpperCase = function(all, first, dash) { + if (first) { + return first.toUpperCase(); + } + return '-' + dash.toUpperCase(); +}; +function formatHeaders(headers) { + var newHeaders = {}; + Object.keys(headers).forEach(function(name) { + newHeaders[name.replace(upperCaseRE, firstCharToUpperCase)] = headers[name]; + }); + return newHeaders; +} + +exports.formatHeaders = formatHeaders; +
refactor: Output the formated headers
avwo_whistle
train
17e73d15269009118301967e0a2ccf8d6e746c32
diff --git a/chempy/util/tests/test_parsing.py b/chempy/util/tests/test_parsing.py index <HASH>..<HASH> 100644 --- a/chempy/util/tests/test_parsing.py +++ b/chempy/util/tests/test_parsing.py @@ -36,7 +36,9 @@ def test_formula_to_composition_caged(): """Should parse cage species.""" assert formula_to_composition("Li@C60") == {3: 1, 6: 60} assert formula_to_composition("Li@C60Cl") == {3: 1, 6: 60, 17: 1} + assert formula_to_composition("(Li@C60)+") == {0: 1, 3: 1, 6: 60} assert formula_to_composition("Na@C60") == {11: 1, 6: 60} + assert formula_to_composition("(Na@C60)+") == {0: 1, 11: 1, 6: 60} @requires(parsing_library) @@ -108,6 +110,9 @@ def test_formula_to_composition_ionic_compounds(): assert formula_to_composition("Na2CO3..7H2O(s)") == {11: 2, 6: 1, 8: 10, 1: 14} assert formula_to_composition("NaCl") == {11: 1, 17: 1} assert formula_to_composition("NaCl(s)") == {11: 1, 17: 1} + assert formula_to_composition("Ni") == {28: 1} + assert formula_to_composition("NI") == {7: 1, 53: 1} + assert formula_to_composition("KF") == {9: 1, 19: 1} @requires(parsing_library) @@ -408,7 +413,9 @@ def test_formula_to_latex_braces(): def test_formula_to_latex_caged(): """Should produce LaTeX for cage species.""" assert formula_to_latex("Li@C60") == r"Li@C_{60}" + assert formula_to_latex("(Li@C60)+") == r"(Li@C_{60})^{+}" assert formula_to_latex("Na@C60") == r"Na@C_{60}" + assert formula_to_latex("(Na@C60)+") == r"(Na@C_{60})^{+}" @requires(parsing_library) @@ -461,7 +468,9 @@ def test_formula_to_unicode(): def test_formula_to_unicode_caged(): """Should produce LaTeX for cage species.""" assert formula_to_unicode("Li@C60") == r"Li@C₆₀" + assert formula_to_unicode("(Li@C60)+") == r"(Li@C₆₀)⁺" assert formula_to_unicode("Na@C60") == r"Na@C₆₀" + assert formula_to_unicode("(Na@C60)+") == r"(Na@C₆₀)⁺" @requires(parsing_library) @@ -533,4 +542,6 @@ def test_formula_to_html(): def test_formula_to_html_caged(): """Should produce HTML for cage species.""" assert formula_to_html("Li@C60") == r"Li@C<sub>60</sub>" + assert formula_to_html("(Li@C60)+") == r"(Li@C<sub>60</sub>)<sup>+</sup>" assert formula_to_html("Na@C60") == r"Na@C<sub>60</sub>" + assert formula_to_html("(Na@C60)+") == r"(Na@C<sub>60</sub>)<sup>+</sup>"
feat(testing): add more caged examples
bjodah_chempy
train
5d4eff6cd7c8aecb8575ce048e90d6fb80ba0ed5
diff --git a/examples/bookstore/BooksList.js b/examples/bookstore/BooksList.js index <HASH>..<HASH> 100644 --- a/examples/bookstore/BooksList.js +++ b/examples/bookstore/BooksList.js @@ -6,7 +6,6 @@ module.exports = class BooksList extends CollectionView { constructor(properties) { super(Object.assign({id: 'booksList', cellHeight: 72}, properties)); this._books = books.filter(this.filter); - this.on('select', ({index}) => this._showBookDetailsPage(books[index])); this.itemCount = this.books.length; } @@ -22,11 +21,6 @@ module.exports = class BooksList extends CollectionView { return this._filter || (() => true); } - _showBookDetailsPage(book) { - const BookDetailsPage = require('./BookDetailsPage'); - new BookDetailsPage({title: book.title, book}).appendTo(contentView.find('NavigationView').first()); - } - createCell() { super.createCell(); return new BookCell(); @@ -34,8 +28,7 @@ module.exports = class BooksList extends CollectionView { updateCell(view, index) { super.updateCell(view, index); - const {image, title, author} = books[index]; - Object.assign(view, {image, title, author}); + Object.assign(view, {book: books[index]}); } }; @@ -47,30 +40,23 @@ class BookCell extends Composite { this._createUI(); this._applyLayout(); this._applyStyles(); + this.onTap(this._showBookDetailsPage); } - set image(image) { - this.find('#image').first().image = image; - } - - get image() { - return this.find('#image').first().image; + set book(book) { + this._book = book; + this.find('#image').first().image = book.image; + this.find('#titleLabel').first().text = book.title; + this.find('#authorLabel').first().text = book.author; } - set title(title) { - this.find('#titleLabel').first().text = title; + get book() { + return this._book; } - get title() { - return this.find('#titleLabel').first().text; - } - - set author(author) { - this.find('#authorLabel').first().text = author; - } - - get author() { - return this.find('#authorLabel').first().text; + _showBookDetailsPage() { + const BookDetailsPage = require('./BookDetailsPage'); + new BookDetailsPage({title: this.book.title, book: this.book}).appendTo(contentView.find('NavigationView').first()); } _createUI() {
Fix book selection in bookstore example Replace CollectionView select event with tap event on cell.
eclipsesource_tabris-js
train
2934bf511c65261a8b8ad9abb728e5215ca5fa19
diff --git a/src/Graviton/SchemaBundle/Listener/SchemaLinkResponseListener.php b/src/Graviton/SchemaBundle/Listener/SchemaLinkResponseListener.php index <HASH>..<HASH> 100644 --- a/src/Graviton/SchemaBundle/Listener/SchemaLinkResponseListener.php +++ b/src/Graviton/SchemaBundle/Listener/SchemaLinkResponseListener.php @@ -53,7 +53,11 @@ class SchemaLinkResponseListener implements ContainerAwareInterface // extract info from route $routeName = $request->get('_route'); $routeParts = explode('.', $routeName); - list($app, $module, $routeType, $model, $method) = $routeParts; + + list($app, $module, $method) = $routeParts; + if ($routeName !== 'graviton.schema.get') { + list($app, $module, $routeType, $model, $method) = $routeParts; + } $schemaRouteName = 'graviton.schema.get'; $parameters = array('id' => implode('/', array($module, $model)));
hacky workaround for parsing routename in schema case
libgraviton_graviton
train
90f4d014eecd212134c156c6ccf8ca6de7c7fec3
diff --git a/cycy/parser/sourceparser.py b/cycy/parser/sourceparser.py index <HASH>..<HASH> 100644 --- a/cycy/parser/sourceparser.py +++ b/cycy/parser/sourceparser.py @@ -175,6 +175,10 @@ class SourceParser(object): def while_loop(self, p): return While(condition=p[2], body=p[4]) + @pg.production("while_loop : while LEFT_BRACKET expr RIGHT_BRACKET statement") + def while_loop_single_line(self, p): + return While(condition=p[2], body=Block(statements=[p[4]])) + @pg.production("func_call : IDENTIFIER LEFT_BRACKET param_list RIGHT_BRACKET") def function_call(self, p): return Call(name=p[0].getstr(), args=p[2].get_items()) diff --git a/cycy/tests/test_parser.py b/cycy/tests/test_parser.py index <HASH>..<HASH> 100644 --- a/cycy/tests/test_parser.py +++ b/cycy/tests/test_parser.py @@ -449,6 +449,20 @@ class TestParser(TestCase): ) ) + def test_braceless_while_loop(self): + self.assertEqual( + parse(self.function_wrap(""" + while ( i < 10 ) + i++; + """)), + self.function_wrap_node( + While( + condition=BinaryOperation(operator="<", left=Variable(name="i"), right=Int32(value=10)), + body=Block([PostOperation(operator="++", variable=Variable(name="i"))]), + ) + ) + ) + def test_while_loop(self): self.assertEqual( parse(self.function_wrap("while (string[i] != NULL) { putc(string[i++]); }")),
parser supports single line while (no curly braces)
Magnetic_cycy
train
4b29036db919b3dab283a4920eb5055f63f8b002
diff --git a/pkg/kubectl/cmd/apply/apply.go b/pkg/kubectl/cmd/apply/apply.go index <HASH>..<HASH> 100644 --- a/pkg/kubectl/cmd/apply/apply.go +++ b/pkg/kubectl/cmd/apply/apply.go @@ -398,6 +398,7 @@ func (o *ApplyOptions) Run() error { if err != nil { return cmdutil.AddSourceToErr("serverside-apply", info.Source, err) } + options := metav1.PatchOptions{ Force: &o.ForceConflicts, FieldManager: o.FieldManager, @@ -405,6 +406,7 @@ func (o *ApplyOptions) Run() error { if o.ServerDryRun { options.DryRun = []string{metav1.DryRunAll} } + obj, err := resource.NewHelper(info.Client, info.Mapping).Patch( info.Namespace, info.Name, @@ -412,29 +414,33 @@ func (o *ApplyOptions) Run() error { data, &options, ) - if err == nil { - info.Refresh(obj, true) - metadata, err := meta.Accessor(info.Object) - if err != nil { - return err - } - visitedUids.Insert(string(metadata.GetUID())) - count++ - if len(output) > 0 && !shortOutput { - objs = append(objs, info.Object) - return nil - } - printer, err := o.ToPrinter("serverside-applied") - if err != nil { - return err + if err != nil { + if isIncompatibleServerError(err) { + klog.Warningf("serverside-apply incompatible server: %v", err) } - return printer.PrintObj(info.Object, o.Out) - } else if !isIncompatibleServerError(err) { + return err } - // If we're talking to a server which does not implement server-side apply, - // continue with the client side apply after this block. - klog.Warningf("serverside-apply incompatible server: %v", err) + + info.Refresh(obj, true) + metadata, err := meta.Accessor(info.Object) + if err != nil { + return err + } + + visitedUids.Insert(string(metadata.GetUID())) + count++ + if len(output) > 0 && !shortOutput { + objs = append(objs, info.Object) + return nil + } + + printer, err := o.ToPrinter("serverside-applied") + if err != nil { + return err + } + + return printer.PrintObj(info.Object, o.Out) } // Get the modified configuration of the object. Embed the result
Inverted error handling to ensure server-side apply does not fall back on client-side apply when there is an error
kubernetes_kubernetes
train
a947eb7660055027e7406aa45e3c82dd3daa5cab
diff --git a/spring-boot/src/main/java/org/springframework/boot/bind/RelaxedConversionService.java b/spring-boot/src/main/java/org/springframework/boot/bind/RelaxedConversionService.java index <HASH>..<HASH> 100644 --- a/spring-boot/src/main/java/org/springframework/boot/bind/RelaxedConversionService.java +++ b/spring-boot/src/main/java/org/springframework/boot/bind/RelaxedConversionService.java @@ -16,6 +16,9 @@ package org.springframework.boot.bind; +import java.util.EnumSet; +import java.util.Set; + import org.springframework.core.convert.ConversionFailedException; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.TypeDescriptor; @@ -117,7 +120,14 @@ class RelaxedConversionService implements ConversionService { // It's an empty enum identifier: reset the enum value to null. return null; } - return (T) Enum.valueOf(this.enumType, source.trim().toUpperCase()); + source = source.trim(); + for (T candidate : (Set<T>) EnumSet.allOf(this.enumType)) { + if (candidate.name().equalsIgnoreCase(source)) { + return candidate; + } + } + throw new IllegalArgumentException("No enum constant " + + this.enumType.getCanonicalName() + "." + source); } } diff --git a/spring-boot/src/test/java/org/springframework/boot/bind/RelaxedDataBinderTests.java b/spring-boot/src/test/java/org/springframework/boot/bind/RelaxedDataBinderTests.java index <HASH>..<HASH> 100644 --- a/spring-boot/src/test/java/org/springframework/boot/bind/RelaxedDataBinderTests.java +++ b/spring-boot/src/test/java/org/springframework/boot/bind/RelaxedDataBinderTests.java @@ -442,7 +442,7 @@ public class RelaxedDataBinderTests { result = bind(target, "bingo: oR"); assertThat(result.getErrorCount(), equalTo(0)); - assertThat(target.getBingo(), equalTo(Bingo.OR)); + assertThat(target.getBingo(), equalTo(Bingo.or)); result = bind(target, "bingo: that"); assertThat(result.getErrorCount(), equalTo(0)); @@ -755,7 +755,7 @@ public class RelaxedDataBinderTests { } static enum Bingo { - THIS, OR, THAT + THIS, or, THAT } public static class ValidatedTarget {
RelaxedConversionService support lowercase enums Update RelaxedConversionService to support enums that are themselves declared as lower-case (or mixed case) items. Fixes gh-<I>
spring-projects_spring-boot
train
a2875d18f95dd6d64c3ef79f4c37adbfcd81305e
diff --git a/example/recommenders/recotools.py b/example/recommenders/recotools.py index <HASH>..<HASH> 100644 --- a/example/recommenders/recotools.py +++ b/example/recommenders/recotools.py @@ -28,6 +28,7 @@ def CosineLoss(a, b, label): dot = mx.symbol.sum_axis(dot, axis=1) dot = mx.symbol.Flatten(dot) cosine = 1 - dot + cosine = cosine / 2 return mx.symbol.MAERegressionOutput(data=cosine, label=label) def SparseRandomProjection(indexes, values, input_dim, output_dim, ngram=1):
Fixes: Cosine Loss Formula (#<I>)
apache_incubator-mxnet
train
1a4220c4dbba3b331ddd396d3c158ff0fcdc72f2
diff --git a/macros/index.js b/macros/index.js index <HASH>..<HASH> 100644 --- a/macros/index.js +++ b/macros/index.js @@ -184,14 +184,23 @@ macro $adt__compile { } } function parseField(inp) { - var res = inp.takeAPeek(IDENT, COLON); - if (res) { - var cons = parseConstraint(inp); - if (cons) { + var res1 = inp.takeAPeek(IDENT); + if (res1) { + var res2 = inp.takeAPeek(COLON); + if (res2) { + var cons = parseConstraint(inp); + if (cons) { + return { + name: unwrapSyntax(res1[0]), + constraint: cons + }; + } + syntaxError(res2, 'Expected constraint'); + } else { return { - name: unwrapSyntax(res[0]), - constraint: cons - }; + name: unwrapSyntax(res1[0]), + constraint: { type: 'any' } + } } } } @@ -206,7 +215,10 @@ macro $adt__compile { if (expr.success && !expr.rest.length) { return { type: 'literal', stx: expr.result }; } - throwSyntaxError('adt-simple', 'Unexpected token', expr.success ? expr.rest[0] : res[0]); + syntaxError(expr.success ? expr.rest[0] : res[0]); + } + if (inp.length) { + syntaxError(inp.take()); } } function parseClassName(inp) { diff --git a/src/parser.js b/src/parser.js index <HASH>..<HASH> 100644 --- a/src/parser.js +++ b/src/parser.js @@ -65,14 +65,23 @@ function parseSingleton(inp) { } function parseField(inp) { - var res = inp.takeAPeek(IDENT, COLON); - if (res) { - var cons = parseConstraint(inp); - if (cons) { + var res1 = inp.takeAPeek(IDENT); + if (res1) { + var res2 = inp.takeAPeek(COLON); + if (res2) { + var cons = parseConstraint(inp); + if (cons) { + return { + name: unwrapSyntax(res1[0]), + constraint: cons + }; + } + syntaxError(res2, 'Expected constraint'); + } else { return { - name: unwrapSyntax(res[0]), - constraint: cons - }; + name: unwrapSyntax(res1[0]), + constraint: { type: 'any' } + } } } } @@ -90,7 +99,10 @@ function parseConstraint(inp) { if (expr.success && !expr.rest.length) { return { type: 'literal', stx: expr.result }; } - throwSyntaxError('adt-simple', 'Unexpected token', expr.success ? expr.rest[0] : res[0]); + syntaxError(expr.success ? expr.rest[0] : res[0]); + } + if (inp.length) { + syntaxError(inp.take()); } } diff --git a/test/expand.sjs b/test/expand.sjs index <HASH>..<HASH> 100644 --- a/test/expand.sjs +++ b/test/expand.sjs @@ -5,7 +5,7 @@ describe 'Expansion' { data Test1 data Test2 = 'test' data Test3(*, *) - data Test4 { foo: *, bar: * } + data Test4 { foo: *, bar } var posTest = Test3(1, 2); var recTest = Test4(3, 4); @@ -21,7 +21,7 @@ describe 'Expansion' { Test1, Test2 = 'test', Test3(*, *), - Test4 { foo: *, bar: * } + Test4 { foo: *, bar } } var posTest = Test3(1, 2);
Make constraints/contracts optional for record declarations
natefaubion_adt-simple
train
8c42ed630d96565055c5cd2fdedb37dbd6ec4524
diff --git a/js/front/dataset/index.js b/js/front/dataset/index.js index <HASH>..<HASH> 100644 --- a/js/front/dataset/index.js +++ b/js/front/dataset/index.js @@ -75,7 +75,7 @@ new Vue({ * @return {Object} The parsed dataset */ extractDataset() { - const selector = "script[type='application/ld+json']"; + const selector = '#json_ld'; const dataset = JSON.parse(document.querySelector(selector).text) dataset.resources = dataset.distribution; delete dataset.distribution; diff --git a/udata/templates/macros/metadata.html b/udata/templates/macros/metadata.html index <HASH>..<HASH> 100644 --- a/udata/templates/macros/metadata.html +++ b/udata/templates/macros/metadata.html @@ -45,7 +45,7 @@ <meta name="territory-enabled" content="{{ 'true' if config.ACTIVATE_TERRITORIES else 'false' }}"> {% if json_ld %} -<script type="application/ld+json">{{ json_ld|safe }}</script> +<script id="json_ld" type="application/ld+json">{{ json_ld|safe }}</script> {% endif %} {% endmacro %} diff --git a/udata/tests/frontend/__init__.py b/udata/tests/frontend/__init__.py index <HASH>..<HASH> 100644 --- a/udata/tests/frontend/__init__.py +++ b/udata/tests/frontend/__init__.py @@ -20,6 +20,6 @@ class FrontTestCase(WebTestMixin, SearchTestMixin, TestCase): # In the pattern below, we extract the content of the JSON-LD script # The first ? is used to name the extracted string # The second ? is used to express the non-greediness of the extraction - pattern = '<script type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>' + pattern = '<script id="json_ld" type="application/ld\+json">(?P<json_ld>[\s\S]*?)</script>' json_ld = re.search(pattern, response.data).group('json_ld') return json.loads(json_ld)
Use an ID to speed up the retrieval of the JSON on the client side
opendatateam_udata
train
1d14164d72418e42ec201cdbcf37d1ae9ecfa948
diff --git a/panoptes_client/panoptes.py b/panoptes_client/panoptes.py index <HASH>..<HASH> 100644 --- a/panoptes_client/panoptes.py +++ b/panoptes_client/panoptes.py @@ -61,10 +61,10 @@ class Panoptes(object): _endpoint_client_ids = { 'default': ( - 'f79cf5ea821bb161d8cbb52d061ab9a2321d7cb169007003af66b43f7b79ce2a' + 'ce310d45f951de68c4cc8ef46ca38cc0a008f607a2026680295757bfef99f43c' ), 'https://panoptes-staging.zooniverse.org': ( - '535759b966935c297be11913acee7a9ca17c025f9f15520e7504728e71110a27' + 'e094b63362fdef0548e0bbcc6e6cb5996c422d3a770074ef972432d57d41049c' ), }
switch to distinct oauth apps for the python client ensure we have distinct applications for the different clients, PFE, python, etc
zooniverse_panoptes-python-client
train
4fcf0fa12879af7286a345ae68f617fc07cbd284
diff --git a/gui/main.py b/gui/main.py index <HASH>..<HASH> 100644 --- a/gui/main.py +++ b/gui/main.py @@ -138,6 +138,12 @@ class Main(QObject, Ui_MainWindow): QObject.connect(self.buttonPlay, SIGNAL("clicked(bool)"), self.onButtonPlay) QObject.connect(self.buttonIMDB, SIGNAL("clicked(bool)"), self.onButtonIMDB) + self.videoView.__class__.dragEnterEvent = self.dragEnterEvent + self.videoView.__class__.dragMoveEvent = self.dragEnterEvent + self.videoView.__class__.dropEvent = self.dropEvent + self.videoView.setAcceptDrops(1) + + #SETTING UP UPLOAD_VIEW self.uploadModel = UploadListModel(window) self.uploadView.setModel(self.uploadModel) @@ -218,7 +224,20 @@ class Main(QObject, Ui_MainWindow): self.tabs.setCurrentIndex(3) pass + def dragEnterEvent(self, event): + if event.mimeData().hasFormat("text/plain") or event.mimeData().hasFormat("text/uri-list"): + event.accept() + else: + event.ignore() + def dropEvent(self, event): + if event.mimeData().hasFormat('text/uri-list'): + urls = [str(u.toLocalFile().toUtf8()) for u in event.mimeData().urls()] + print urls + else: + url =event.mimeData().text() + print url + def read_settings(self): settings = QSettings() self.window.resize(settings.value("mainwindow/size", QVariant(QSize(1000, 700))).toSize()) @@ -230,7 +249,7 @@ class Main(QObject, Ui_MainWindow): self.uploadIMDB.addItem("%s : %s" % (imdbId, title), QVariant(imdbId)) settings.endArray() programPath = settings.value("options/VideoPlayerPath", QVariant()).toString() - if not programPath == QVariant(): #If not found videoplayer + if programPath == QVariant(): #If not found videoplayer self.initializeVideoPlayer(settings) diff --git a/gui/uploadlistview.py b/gui/uploadlistview.py index <HASH>..<HASH> 100644 --- a/gui/uploadlistview.py +++ b/gui/uploadlistview.py @@ -200,8 +200,26 @@ class UploadListView(QTableView): COL_SUB = 1 def __init__(self, parent): QTableView.__init__(self, parent) - self.setAcceptDrops(1) - + self.setAcceptDrops(True) + + def dragMoveEvent(self, event): + print 1 + if event.mimeData().hasFormat("text/plain"): + event.accept() + else: + event.ignore() + def dragEnterEvent(self, event): + print 2 + if event.mimeData().hasFormat("text/plain"): + event.accept() + else: + event.ignore() + + def dropEvent(self, event): + print 3 + link=event.mimeData().text() + print link + # def dragMoveEvent(self, event): # md = event.mimeData() # if md.hasFormat('application/x-subdownloader-video-id'):
-fixbug in autodetect videoplayers. -started the drag and drop.
subdownloader_subdownloader
train
67b0c9203da650a7bacc4429af5290d8f613da62
diff --git a/src/main/java/javapns/notification/PushNotificationPayload.java b/src/main/java/javapns/notification/PushNotificationPayload.java index <HASH>..<HASH> 100644 --- a/src/main/java/javapns/notification/PushNotificationPayload.java +++ b/src/main/java/javapns/notification/PushNotificationPayload.java @@ -222,8 +222,6 @@ public class PushNotificationPayload extends Payload { } - - /** * Get the custom alert object, creating it if it does not yet exist. * @@ -294,6 +292,11 @@ public class PushNotificationPayload extends Payload { } + public void addCustomAlertTitle(String title) throws JSONException { + put("title", title, getOrAddCustomAlert(), false); + } + + /** * Create a custom alert (if none exist) and add a body to the custom alert. * @@ -306,6 +309,41 @@ public class PushNotificationPayload extends Payload { /** + * Create a custom alert (if none exist) and add a custom subtitle. + * + * @param subtitle the subtitle of the alert + * @throws JSONException + */ + public void addCustomAlertSubtitle(String subtitle) throws JSONException { + put("subtitle", subtitle, getOrAddCustomAlert(), false); + } + + + /** + * Create a custom alert (if none exist) and add a title-loc-key parameter. + * + * @param titleLocKey + * @throws JSONException + */ + public void addCustomAlertTitleLocKey(String titleLocKey) throws JSONException { + Object value = titleLocKey != null ? titleLocKey : new JSONNull(); + put("title-loc-key", titleLocKey, getOrAddCustomAlert(), false); + } + + + /** + * Create a custom alert (if none exist) and add sub-parameters for the title-loc-key parameter. + * + * @param args + * @throws JSONException + */ + public void addCustomAlertTitleLocArgs(List args) throws JSONException { + Object value = args != null && !args.isEmpty() ? args: new JSONNull(); + put("title-loc-args", value, getOrAddCustomAlert(), false); + } + + + /** * Create a custom alert (if none exist) and add a custom text for the right button of the popup. * * @param actionLocKey the title of the alert's right button, or null to remove the button @@ -338,6 +376,18 @@ public class PushNotificationPayload extends Payload { put("loc-args", args, getOrAddCustomAlert(), false); } + + /** + * Create a custom alert (if none exist) and add a launch image. + * + * @param launchImage the subtitle of the alert + * @throws JSONException + */ + public void addCustomAlertLaunchImage(String launchImage) throws JSONException { + put("launch-image", launchImage, getOrAddCustomAlert(), false); + } + + /** * Sets the content available. *
Adding title, subtitle, title-loc-key, title-loc-args, launch-image support.
fernandospr_javapns-jdk16
train
6e068d59688749271c6362035c455e4a87342fd7
diff --git a/app/Http/Controllers/EditNoteController.php b/app/Http/Controllers/EditNoteController.php index <HASH>..<HASH> 100644 --- a/app/Http/Controllers/EditNoteController.php +++ b/app/Http/Controllers/EditNoteController.php @@ -57,7 +57,7 @@ class EditNoteController extends AbstractBaseController { $edit_restriction = $request->get('edit-restriction', ''); // Convert line endings to GEDDCOM continuations - $note = str_replace(["\r\n", "\r", "\n"], "\n1 CONT ", $note); + $note = preg_replace('/\r|\r\n|\n|\r/', "\n1 CONT ", $note); $gedcom = '0 @XREF@ NOTE ' . $note;
Fix: matching line endings when creating new shared notes
fisharebest_webtrees
train
bc619640e15e905718c41201ac8a557695de6965
diff --git a/abydos/stats.py b/abydos/stats.py index <HASH>..<HASH> 100644 --- a/abydos/stats.py +++ b/abydos/stats.py @@ -851,6 +851,13 @@ def amean(nums): :param list nums: A series of numbers :returns: The arithmetric mean of nums :rtype: float + + >>> amean([1, 2, 3, 4]) + 2.5 + >>> amean([1, 2]) + 1.5 + >>> amean([0, 5, 1000]) + 335.0 """ return sum(nums)/len(nums) @@ -866,6 +873,13 @@ def gmean(nums): :param list nums: A series of numbers :returns: The geometric mean of nums :rtype: float + + >>> gmean([1, 2, 3, 4]) + 2.213363839400643 + >>> gmean([1, 2]) + 1.4142135623730951 + >>> gmean([0, 5, 1000]) + 0.0 """ return prod(nums)**(1/len(nums)) @@ -885,6 +899,13 @@ def hmean(nums): :param list nums: A series of numbers :returns: The harmonic mean of nums :rtype: float + + >>> hmean([1, 2, 3, 4]) + 1.9200000000000004 + >>> hmean([1, 2]) + 1.3333333333333333 + >>> hmean([0, 5, 1000]) + 0 """ if len(nums) < 1: raise AttributeError('hmean requires at least one value') @@ -1079,6 +1100,13 @@ def agmean(nums): :param list nums: A series of numbers :returns: The arithmetic-geometric mean of nums :rtype: float + + >>> agmean([1, 2, 3, 4]) + 2.3545004777751077 + >>> agmean([1, 2]) + 1.4567910310469068 + >>> agmean([0, 5, 1000]) + 2.9753977059954195e-13 """ m_a = amean(nums) m_g = gmean(nums) @@ -1099,6 +1127,18 @@ def ghmean(nums): :param list nums: A series of numbers :returns: The geometric-harmonic mean of nums :rtype: float + + >>> ghmean([1, 2, 3, 4]) + 2.058868154613003 + >>> ghmean([1, 2]) + 1.3728805006183502 + >>> ghmean([0, 5, 1000]) + 0.0 + + >>> ghmean([0, 0]) + 0.0 + >>> ghmean([0, 0, 5]) + nan """ m_g = gmean(nums) m_h = hmean(nums) @@ -1120,6 +1160,13 @@ def aghmean(nums): :param list nums: A series of numbers :returns: The arithmetic-geometric-harmonic mean of nums :rtype: float + + >>> aghmean([1, 2, 3, 4]) + 2.198327159900212 + >>> aghmean([1, 2]) + 1.4142135623731884 + >>> aghmean([0, 5, 1000]) + 335.0 """ m_a = amean(nums) m_g = gmean(nums)
a/g/h and combined a/g/h means
chrislit_abydos
train
9cfb9bd860e53121f47103dd5ebdd32cfe9b4107
diff --git a/web/app_dev.php b/web/app_dev.php index <HASH>..<HASH> 100644 --- a/web/app_dev.php +++ b/web/app_dev.php @@ -10,11 +10,7 @@ use Symfony\Component\HttpFoundation\Request; // Feel free to remove this, extend it, or make something more sophisticated. if (isset($_SERVER['HTTP_CLIENT_IP']) || isset($_SERVER['HTTP_X_FORWARDED_FOR']) - || !in_array(@$_SERVER['REMOTE_ADDR'], array( - '127.0.0.1', - 'fe80::1', - '::1', - )) + || !in_array(@$_SERVER['REMOTE_ADDR'], array('127.0.0.1', 'fe80::1', '::1')) ) { header('HTTP/1.0 403 Forbidden'); exit('You are not allowed to access this file. Check '.basename(__FILE__).' for more information.'); diff --git a/web/config.php b/web/config.php index <HASH>..<HASH> 100644 --- a/web/config.php +++ b/web/config.php @@ -4,10 +4,7 @@ if (!isset($_SERVER['HTTP_HOST'])) { exit('This script cannot be run from the CLI. Run it from a browser.'); } -if (!in_array(@$_SERVER['REMOTE_ADDR'], array( - '127.0.0.1', - '::1', -))) { +if (!in_array(@$_SERVER['REMOTE_ADDR'], array('127.0.0.1', 'fe80::1', '::1'))) { header('HTTP/1.0 403 Forbidden'); exit('This script is only accessible from localhost.'); }
added fe<I>::1 as a local IP address in config.php
symfony_symfony-standard
train
8900649e16c2247d1ad2f9faec4ffdb8a0a5c864
diff --git a/shinken/daemons/brokerdaemon.py b/shinken/daemons/brokerdaemon.py index <HASH>..<HASH> 100644 --- a/shinken/daemons/brokerdaemon.py +++ b/shinken/daemons/brokerdaemon.py @@ -472,11 +472,13 @@ class Broker(BaseSatellite): # and for external queues # REF: doc/broker-modules.png (3) - for b in self.broks: + # We put to external queues broks that was not already send + queues = self.modules_manager.get_external_to_queues() + for b in (b for b in self.broks if getattr(b, 'need_send_to_ext', True)): # if b.type != 'log': - # print "Broker : put brok id : %d" % b.id - for q in self.modules_manager.get_external_to_queues(): + for q in queues: q.put(b) + b.need_send_to_ext = False # We must had new broks at the end of the list, so we reverse the list self.broks.reverse()
Fix : do NOT resend already send broks to external queues if internal ones are low (like the ndo one).
Alignak-monitoring_alignak
train
3df54dfe7a6c41a225ae02cd66eb267b3234f943
diff --git a/axiom/store.py b/axiom/store.py index <HASH>..<HASH> 100644 --- a/axiom/store.py +++ b/axiom/store.py @@ -28,7 +28,7 @@ class XFilePath(FilePath): def dirname(self): return os.path.dirname(self.path) -def _md(dirname): +def _mkdirIfNotExists(dirname): if os.path.isdir(dirname): return False os.makedirs(dirname) @@ -51,7 +51,7 @@ class AtomicFile(file): now = time.time() try: file.close(self) - _md(self._destpath.dirname()) + _mkdirIfNotExists(self._destpath.dirname()) self.finalpath = self._destpath os.rename(self.name, self.finalpath.path) os.utime(self.finalpath.path, (now, now)) @@ -106,9 +106,9 @@ class Store(Empowered): "The path %r is already a directory, " "but not an Axiom Store" % (dbfpath,)) else: - _md(dbdir) - _md(self.filesdir) - _md(os.path.join(dbdir, 'temp')) + _mkdirIfNotExists(dbdir) + _mkdirIfNotExists(self.filesdir) + _mkdirIfNotExists(os.path.join(dbdir, 'temp')) self.dbdir = dbdir self.connection = sqlite.connect(dbfpath) self.cursor = self.connection.cursor()
i don't understand what is so hard about typing.
twisted_axiom
train
3e967dd85d849e2c0f8720b12d2070974279bcb8
diff --git a/cli/src/main/java/hudson/cli/CLI.java b/cli/src/main/java/hudson/cli/CLI.java index <HASH>..<HASH> 100644 --- a/cli/src/main/java/hudson/cli/CLI.java +++ b/cli/src/main/java/hudson/cli/CLI.java @@ -136,6 +136,13 @@ public class CLI { return execute(Arrays.asList(args)); } + /** + * Returns true if the named command exists. + */ + public boolean hasCommand(String name) { + return entryPoint.hasCommand(name); + } + public static void main(final String[] _args) throws Exception { List<String> args = Arrays.asList(_args); diff --git a/cli/src/main/java/hudson/cli/CliEntryPoint.java b/cli/src/main/java/hudson/cli/CliEntryPoint.java index <HASH>..<HASH> 100644 --- a/cli/src/main/java/hudson/cli/CliEntryPoint.java +++ b/cli/src/main/java/hudson/cli/CliEntryPoint.java @@ -43,6 +43,11 @@ public interface CliEntryPoint { int main(List<String> args, Locale locale, InputStream stdin, OutputStream stdout, OutputStream stderr); /** + * Does the named command exist? + */ + boolean hasCommand(String name); + + /** * Returns {@link #VERSION}, so that the client and the server can detect version incompatibility * gracefully. */ diff --git a/core/src/main/java/hudson/cli/CliManagerImpl.java b/core/src/main/java/hudson/cli/CliManagerImpl.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/cli/CliManagerImpl.java +++ b/core/src/main/java/hudson/cli/CliManagerImpl.java @@ -86,6 +86,10 @@ public class CliManagerImpl implements CliEntryPoint, Serializable { return -1; } + public boolean hasCommand(String name) { + return CLICommand.clone(name)!=null; + } + public int protocolVersion() { return VERSION; }
added a mechanism to check the existence of a command. git-svn-id: <URL>
jenkinsci_jenkins
train
f5eee2a837fa157b4d4ba476103ab252bb11414b
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -15,8 +15,10 @@ var testNode = require('./lib/ast-tests'), * @param {object} opt An options hash */ function browserifyNgInject(opt) { - var options = merge({ filter: filter }, opt); - return esprimaTools.createTransform(updater, opt); // json files cannot be parsed by esprima + var options = merge({ + filter: filter // try to remove files that cannot be parsed by esprima + }, opt); + return esprimaTools.createTransform(updater, options); } module.exports = browserifyNgInject;
allow json files to be passed through without transform and without error
bholloway_browserify-nginject
train
9fddb5de999bb4faf3f95c81efcd23c19c857247
diff --git a/src/Forms/GridField/GridFieldDeleteAction.php b/src/Forms/GridField/GridFieldDeleteAction.php index <HASH>..<HASH> 100644 --- a/src/Forms/GridField/GridFieldDeleteAction.php +++ b/src/Forms/GridField/GridFieldDeleteAction.php @@ -41,7 +41,7 @@ class GridFieldDeleteAction implements GridField_ColumnProvider, GridField_Actio */ public function __construct($removeRelation = false) { - $this->removeRelation = $removeRelation; + $this->setRemoveRelation($removeRelation); } /** @@ -115,7 +115,7 @@ class GridFieldDeleteAction implements GridField_ColumnProvider, GridField_Actio */ public function getColumnContent($gridField, $record, $columnName) { - if ($this->removeRelation) { + if ($this->getRemoveRelation()) { if (!$record->canEdit()) { return null; } @@ -187,4 +187,25 @@ class GridFieldDeleteAction implements GridField_ColumnProvider, GridField_Actio } } } + + /** + * Get whether to remove or delete the relation + * + * @return bool + */ + public function getRemoveRelation() + { + return $this->removeRelation; + } + + /** + * Set whether to remove or delete the relation + * @param bool $removeRelation + * @return $this + */ + public function setRemoveRelation($removeRelation) + { + $this->removeRelation = (bool) $removeRelation; + return $this; + } }
NEW Add getter and setter for removeRelation in GridFieldDeleteAction This allows users to modify the property value in the component without having to remove it and add a new one when they want to change it.
silverstripe_silverstripe-framework
train
d2d973794bd381478f345b1ee1cbcf4854d79859
diff --git a/sqlite_migrations.js b/sqlite_migrations.js index <HASH>..<HASH> 100644 --- a/sqlite_migrations.js +++ b/sqlite_migrations.js @@ -371,7 +371,7 @@ function migrateDb(connection, onDone){ SELECT address, IFNULL(asset, 'base'), SUM(amount) AS balance \n\ FROM aa_addresses \n\ CROSS JOIN outputs USING(address) \n\ - CROSS JOIN units USING(unit) \n\ + CROSS JOIN units ON outputs.unit=units.unit \n\ WHERE is_spent=0 AND ( \n\ is_stable=1 \n\ OR EXISTS (SELECT 1 FROM unit_authors CROSS JOIN aa_addresses USING(address) WHERE unit_authors.unit=outputs.unit) \n\
fix join, was using unit from wrong table
byteball_ocore
train
87ca8fe21f2a06fa3b5b186964a3261c68942f1c
diff --git a/src/Impetus.js b/src/Impetus.js index <HASH>..<HASH> 100644 --- a/src/Impetus.js +++ b/src/Impetus.js @@ -4,6 +4,11 @@ const bounceDeceleration = 0.04; const bounceAcceleration = 0.11; +// fixes weird safari 10 bug where preventDefault is prevented +// @see https://github.com/metafizzy/flickity/issues/457#issuecomment-254501356 +window.addEventListener('touchmove', function() {}); + + export default class Impetus { constructor({ source: sourceEl = document,
fix: preventDefault not working in Safari <I>
chrisbateman_impetus
train
d19dd3547a253dabe745966ee1219ea279f017c2
diff --git a/src/Handlers/AbstractHandler.php b/src/Handlers/AbstractHandler.php index <HASH>..<HASH> 100644 --- a/src/Handlers/AbstractHandler.php +++ b/src/Handlers/AbstractHandler.php @@ -38,5 +38,4 @@ abstract class AbstractHandler { $this->registered = true; } - } diff --git a/src/Handlers/ErrorHandler.php b/src/Handlers/ErrorHandler.php index <HASH>..<HASH> 100644 --- a/src/Handlers/ErrorHandler.php +++ b/src/Handlers/ErrorHandler.php @@ -15,21 +15,21 @@ class ErrorHandler extends AbstractHandler } public function handle() - { + { /** * Overloading methods with different parameters is not supported in PHP * through language structures. This hack allows to simulate that. */ $args = func_get_args(); + if (!isset($args[0]) || !isset($args[1])) { throw new \Exception('No $errno or $errstr to be passed to the error handler.'); - } else { - $errno = $args[0]; - $errstr = $args[1]; } + + $errno = $args[0]; + $errstr = $args[1]; $errfile = isset($args[2]) ? $args[2] : null; $errline = isset($args[3]) ? $args[3] : null; - $errcontext = isset($args[4]) ? $args[4] : null; parent::handle(); @@ -58,5 +58,4 @@ class ErrorHandler extends AbstractHandler return false; } - } diff --git a/src/Handlers/ExceptionHandler.php b/src/Handlers/ExceptionHandler.php index <HASH>..<HASH> 100644 --- a/src/Handlers/ExceptionHandler.php +++ b/src/Handlers/ExceptionHandler.php @@ -15,7 +15,7 @@ class ExceptionHandler extends AbstractHandler } public function handle() - { + { parent::handle(); @@ -24,12 +24,13 @@ class ExceptionHandler extends AbstractHandler * through language structures. This hack allows to simulate that. */ $args = func_get_args(); + if (!isset($args[0])) { throw new \Exception('No exception to be passed to the exception handler.'); - } else { - $exception = $args[0]; } + $exception = $args[0]; + $this->logger()->log(Level::ERROR, $exception, array(), true); if ($this->previousHandler) { restore_exception_handler(); @@ -38,7 +39,5 @@ class ExceptionHandler extends AbstractHandler } throw $exception; - } - } diff --git a/src/Handlers/FatalHandler.php b/src/Handlers/FatalHandler.php index <HASH>..<HASH> 100644 --- a/src/Handlers/FatalHandler.php +++ b/src/Handlers/FatalHandler.php @@ -32,7 +32,6 @@ class FatalHandler extends AbstractHandler $lastError = error_get_last(); if ($this->isFatal($lastError)) { - $errno = $lastError['type']; $errstr = $lastError['message']; $errfile = $lastError['file']; @@ -44,14 +43,13 @@ class FatalHandler extends AbstractHandler $this->logger()->log(Level::CRITICAL, $exception, array(), true); } - } /** * Check if the error triggered is indeed a fatal error. - * + * * @var array $lastError Information fetched from error_get_last(). - * + * * @return bool */ protected function isFatal($lastError) @@ -63,5 +61,4 @@ class FatalHandler extends AbstractHandler !(isset($lastError['message']) && strpos($lastError['message'], 'Uncaught') === 0); } - } diff --git a/tests/Handlers/ExceptionHandlerTest.php b/tests/Handlers/ExceptionHandlerTest.php index <HASH>..<HASH> 100644 --- a/tests/Handlers/ExceptionHandlerTest.php +++ b/tests/Handlers/ExceptionHandlerTest.php @@ -21,17 +21,16 @@ class ExceptionHandlerTest extends BaseRollbarTest /** * It's impossible to throw an uncaught exception with PHPUnit and thus * trigger the exception handler automatically. To overcome this limitation, - * this test invokes the handle() methd manually with an assertion in the + * this test invokes the handle() methd manually with an assertion in the * previously set exception handler. */ public function testPreviousExceptionHandler() { $testCase = $this; - set_exception_handler(function() use ($testCase) { + set_exception_handler(function () use ($testCase) { $testCase->assertTrue(true, "Previous exception handler invoked."); - }); $handler = new ExceptionHandler(new RollbarLogger(self::$simpleConfig)); @@ -61,7 +60,6 @@ class ExceptionHandlerTest extends BaseRollbarTest $setExceptionHandler = set_exception_handler(null); $setExceptionHandler(null); - } /** @@ -70,7 +68,7 @@ class ExceptionHandlerTest extends BaseRollbarTest * expected with an unhandled exception. Unfortunately, for some reason, * this doesn't happen when you run the full TestSuite. That's why * there is no expectedException here. - * + * * @expectedException \Exception */ public function testHandle()
github-<I>: fix codacy issues
rollbar_rollbar-php
train
806276702bcb5cb6027cbe89e11533fd4ed5a319
diff --git a/file-adapter/adapter.go b/file-adapter/adapter.go index <HASH>..<HASH> 100644 --- a/file-adapter/adapter.go +++ b/file-adapter/adapter.go @@ -43,7 +43,7 @@ func NewAdapter(filePath string) *Adapter { // LoadPolicy loads all policy rules from the storage. func (a *Adapter) LoadPolicy(model model.Model) error { if a.filePath == "" { - return errors.New("Invalid file path, file path cannot be empty") + return errors.New("invalid file path, file path cannot be empty") } err := a.loadPolicyFile(model, persist.LoadPolicyLine) @@ -53,7 +53,7 @@ func (a *Adapter) LoadPolicy(model model.Model) error { // SavePolicy saves all policy rules to the storage. func (a *Adapter) SavePolicy(model model.Model) error { if a.filePath == "" { - return errors.New("Invalid file path, file path cannot be empty") + return errors.New("invalid file path, file path cannot be empty") } var tmp bytes.Buffer
Improve the error string in file adapter.
casbin_casbin
train
fbd58f9789d27207e30ab2c126773bc6894a15d6
diff --git a/menuconfig.py b/menuconfig.py index <HASH>..<HASH> 100755 --- a/menuconfig.py +++ b/menuconfig.py @@ -742,12 +742,16 @@ def _jump_to(node): # parent menus before. _parent_screen_rows = [] - # Turn on show-all mode if the node isn't visible - if not (node.prompt and expr_value(node.prompt[1])): - _show_all = True - _cur_menu = _parent_menu(node) _shown = _shown_nodes(_cur_menu) + if node not in _shown: + # Turn on show-all mode if the node wouldn't be shown. Checking whether + # the node is visible instead would needlessly turn on show-all mode in + # an obscure case: when jumping to an invisible symbol with visible + # children from an implicit submenu. + _show_all = True + _shown = _shown_nodes(_cur_menu) + _sel_node_i = _shown.index(node) _center_vertically()
menuconfig: Fix a case of needlessly turning on show-all Show-all mode does not need to be enabled when jumping to an invisible symbol with visible children from an implicit submenu, because the invisible symbol will be shown anyway in that case. Explicitly check whether the jumped-to node would be shown instead, and enable show-all mode otherwise.
ulfalizer_Kconfiglib
train
f1a38d0c099324f7c53871f8ab7566d916a700de
diff --git a/ui/src/utils/groupByTimeSeriesTransform.js b/ui/src/utils/groupByTimeSeriesTransform.js index <HASH>..<HASH> 100644 --- a/ui/src/utils/groupByTimeSeriesTransform.js +++ b/ui/src/utils/groupByTimeSeriesTransform.js @@ -124,13 +124,14 @@ const constructCells = serieses => { forEach(rows, ({vals}) => { const [time, ...rowValues] = vals forEach(rowValues, (value, i) => { - cells.label[cellIndex] = unsortedLabels[i].label - cells.value[cellIndex] = value - cells.time[cellIndex] = time - cells.seriesIndex[cellIndex] = seriesIndex - cells.responseIndex[cellIndex] = responseIndex - cells.isGroupBy[cellIndex] = isGroupBy - cellIndex++ // eslint-disable-line no-plusplus + if (!isGroupBy) { + cells.label[cellIndex] = unsortedLabels[i].label + cells.value[cellIndex] = value + cells.time[cellIndex] = time + cells.seriesIndex[cellIndex] = seriesIndex + cells.responseIndex[cellIndex] = responseIndex + cellIndex++ // eslint-disable-line no-plusplus + } }) }) } @@ -197,11 +198,6 @@ const constructTimeSeries = (serieses, cells, sortedLabels, seriesLabels) => { const seriesIndex = cells.seriesIndex[i] const responseIndex = cells.responseIndex[i] - if (cells.isGroupBy[i]) { - // we've already inserted GroupByValues - continue - } - if (label.includes('_shifted__')) { const [, quantity, duration] = label.split('__') time = +shiftDate(time, quantity, duration).format('x')
Prevent construction of cells which are from series that have a groupby
influxdata_influxdb
train
8b114305351e5a2a7d6ae009487050ad7f57056f
diff --git a/lib/pghero.rb b/lib/pghero.rb index <HASH>..<HASH> 100644 --- a/lib/pghero.rb +++ b/lib/pghero.rb @@ -217,14 +217,14 @@ module PgHero end # private - # Rails 6.1 deprecate `spec_name` and use `name` for configurations + # Rails 6.1 deprecates `spec_name` for `name` # https://github.com/rails/rails/pull/38536 def spec_name_key ActiveRecord::VERSION::STRING.to_f >= 6.1 ? :name : :spec_name end # private - # Rails 7.0 deprecate `include_replicas` and use `include_hidden` + # Rails 7.0 deprecates `include_replicas` for `include_hidden` def include_replicas_key ActiveRecord::VERSION::MAJOR >= 7 ? :include_hidden : :include_replicas end
Updated comments [skip ci]
ankane_pghero
train
5a15fef7c7fc8d46b8f701229213a1447bfd008d
diff --git a/tests/test_for_support/test_for_sbo.py b/tests/test_for_support/test_for_sbo.py index <HASH>..<HASH> 100644 --- a/tests/test_for_support/test_for_sbo.py +++ b/tests/test_for_support/test_for_sbo.py @@ -85,7 +85,8 @@ def test_find_components_without_sbo_terms(model, num, components): ("multiple_sbo_terms", 1, "reactions", ["SBO:1","SBO:2","SBO:3"]), ("multiple_sbo_terms", 1, "genes", ["SBO:1","SBO:2","SBO:3"]) ], indirect=["model"]) -def test_find_components_without_specific_sbo_term(model, num, components): +def test_find_components_without_specific_sbo_term(model, num, components, + term): """Expect `num` components to have a specific sbo annotation.""" no_match_to_specific_term = sbo.check_component_for_specific_sbo_term( getattr(model, components), term)
fix(): add missing term argument in unit test Add a missing argument (term) in a refactored unit test.
opencobra_memote
train
74118767135b8e65f083ce3aed4cf2b192c41c82
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -11,10 +11,6 @@ module.exports = function(grunt) { // Project configuration. grunt.initConfig({ - - // Project paths and files. - links: grunt.file.readYAML('docs/data/url.yml'), - // package.json pkg: grunt.file.readJSON('package.json'), meta: { @@ -24,6 +20,7 @@ module.exports = function(grunt) { '* Copyright (c) <%= grunt.template.today("yyyy") %> <%= pkg.author.name %>;' + ' Licensed <%= _.pluck(pkg.licenses, "type").join(", ") %> */' }, + jshint: { files: [ 'Gruntfile.js', @@ -43,6 +40,26 @@ module.exports = function(grunt) { options: { reporter: 'nyan' } + }, + + // Run simple tests. + assemble: { + tests: { + options: { + layout: 'test/files/layout-includes.hbs' + }, + files: { + 'test/actual': ['test/files/extend.hbs'] + } + }, + yaml: { + options: { + layout: 'test/files/layout.hbs' + }, + files: { + 'test/actual/yaml': ['test/yaml/*.hbs'] + } + } } }); @@ -55,12 +72,13 @@ module.exports = function(grunt) { // Default task. grunt.registerTask('default', [ + 'assemble', 'jshint' ]); // Tests to be run. grunt.registerTask('test', [ - 'jshint', + 'default', 'mochaTest' ]); };
remove no-longer-needed links object from Gruntfile.
assemble_grunt-assemble
train
08340ca19c411a9b35b7e51c0232e3ea1cc72300
diff --git a/HISTORY.md b/HISTORY.md index <HASH>..<HASH> 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -2,6 +2,9 @@ None yet! +* Fix issue where calling Worker.find, Worker.all, or Worker.working from withing + a running job would rewrite the PID file with the PID of the forked worker. (@jeremywadsack) + ## 1.26.0 (2016-03-10) This changelog is a bit incomplete. We will be much stricter about the changelog for diff --git a/lib/resque/tasks.rb b/lib/resque/tasks.rb index <HASH>..<HASH> 100644 --- a/lib/resque/tasks.rb +++ b/lib/resque/tasks.rb @@ -15,6 +15,7 @@ namespace :resque do abort "set QUEUE env var, e.g. $ QUEUE=critical,high rake resque:work" end + worker.prepare worker.log "Starting worker #{self}" worker.work(ENV['INTERVAL'] || 5) # interval, will block end diff --git a/lib/resque/worker.rb b/lib/resque/worker.rb index <HASH>..<HASH> 100644 --- a/lib/resque/worker.rb +++ b/lib/resque/worker.rb @@ -124,6 +124,9 @@ module Resque # If passed a single "*", this Worker will operate on all queues # in alphabetical order. Queues can be dynamically added or # removed without needing to restart workers using this method. + # + # Workers should have `#prepare` called after they are initialized + # if you are running work on the worker. def initialize(*queues) @shutdown = nil @paused = nil @@ -137,6 +140,13 @@ module Resque self.graceful_term = ENV['GRACEFUL_TERM'] self.run_at_exit_hooks = ENV['RUN_AT_EXIT_HOOKS'] + self.queues = queues + end + + # Daemonizes the worker if ENV['BACKGROUND'] is set and writes + # the process id to ENV['PIDFILE'] if set. Should only be called + # once per worker. + def prepare if ENV['BACKGROUND'] unless Process.respond_to?('daemon') abort "env var BACKGROUND is set, which requires ruby >= 1.9" @@ -148,8 +158,6 @@ module Resque if ENV['PIDFILE'] File.open(ENV['PIDFILE'], 'w') { |f| f << pid } end - - self.queues = queues end def queues=(queues) diff --git a/test/test_helper.rb b/test/test_helper.rb index <HASH>..<HASH> 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -253,3 +253,26 @@ def without_forking ENV['FORK_PER_JOB'] = orig_fork_per_job end end + +def with_pidfile + old_pidfile = ENV["PIDFILE"] + begin + file = Tempfile.new("pidfile") + file.close + ENV["PIDFILE"] = file.path + yield + ensure + file.unlink if file + ENV["PIDFILE"] = old_pidfile + end +end + +def with_background + old_background = ENV["BACKGROUND"] + begin + ENV["BACKGROUND"] = "true" + yield + ensure + ENV["BACKGROUND"] = old_background + end +end diff --git a/test/worker_test.rb b/test/worker_test.rb index <HASH>..<HASH> 100644 --- a/test/worker_test.rb +++ b/test/worker_test.rb @@ -55,6 +55,25 @@ describe "Resque::Worker" do assert_equal 0, Resque::Failure.count end + it "writes to ENV['PIDFILE'] when supplied and #prepare is called" do + with_pidfile do + tmpfile = Tempfile.new("test_pidfile") + File.expects(:open).with(ENV["PIDFILE"], anything).returns tmpfile + @worker.prepare + end + end + + it "daemonizes when ENV['BACKGROUND'] is supplied and #prepare is called" do + if Process.respond_to?("daemon") + Process.expects(:daemon) + with_background do + @worker.prepare + end + else + skip("Process.daemon not supported; requires ruby >= 1.9") + end + end + it "executes at_exit hooks when configured with run_at_exit_hooks" do tmpfile = File.join(Dir.tmpdir, "resque_at_exit_test_file") FileUtils.rm_f tmpfile @@ -595,6 +614,18 @@ describe "Resque::Worker" do end end + it "doesn't write PID file when finding" do + with_pidfile do + File.expects(:open).never + + without_forking do + @worker.work(0) do + Resque::Worker.find(@worker.to_s) + end + end + end + end + it "prunes dead workers with heartbeat older than prune interval" do now = Time.now
Initializing a worker should not write the PID file or re-daemonize. Class methods like ::find (and those that depend on it, like ::all and ::working) call #new to create new instances of the worker objects. If these methods are called from withing the forked object it would cause #initialize to rewrite the PIDFILE or re-daemonize the process. This resolves that by making a separate rake task when starting a worker. See <URL>
resque_resque
train
c517affaf13e4682e7622b14540e2056e43cf24e
diff --git a/src/Notifynder/Traits/Notifable.php b/src/Notifynder/Traits/Notifable.php index <HASH>..<HASH> 100644 --- a/src/Notifynder/Traits/Notifable.php +++ b/src/Notifynder/Traits/Notifable.php @@ -53,7 +53,7 @@ trait Notifable * * @return \Illuminate\Database\Eloquent\Relations\HasMany|\Illuminate\Database\Eloquent\Relations\MorphMany */ - private function getLazyLoadedNotificationRelation() + protected function getLazyLoadedNotificationRelation() { return $this->notifications(); } diff --git a/src/Notifynder/Traits/NotifableLaravel53.php b/src/Notifynder/Traits/NotifableLaravel53.php index <HASH>..<HASH> 100644 --- a/src/Notifynder/Traits/NotifableLaravel53.php +++ b/src/Notifynder/Traits/NotifableLaravel53.php @@ -53,7 +53,7 @@ trait NotifableLaravel53 * * @return \Illuminate\Database\Eloquent\Relations\HasMany|\Illuminate\Database\Eloquent\Relations\MorphMany */ - private function getLazyLoadedNotificationRelation() + protected function getLazyLoadedNotificationRelation() { return $this->notifynderNotifications(); } diff --git a/tests/integration/Traits/NotifableEagerLoadingTest.php b/tests/integration/Traits/NotifableEagerLoadingTest.php index <HASH>..<HASH> 100644 --- a/tests/integration/Traits/NotifableEagerLoadingTest.php +++ b/tests/integration/Traits/NotifableEagerLoadingTest.php @@ -65,7 +65,7 @@ class NotifableEagerLoadingTest extends NotifynderTestCase $this->assertModelHasNoLoadedRelations($notifications[0], ['to']); } - private function assertModelHasLoadedRelations($model, $relationNames = []) + protected function assertModelHasLoadedRelations($model, $relationNames = []) { $modelLoadedRelations = $model->getRelations(); foreach ($relationNames as $relationName) { @@ -73,7 +73,7 @@ class NotifableEagerLoadingTest extends NotifynderTestCase } } - private function assertModelHasNoLoadedRelations($model, $relationNames = []) + protected function assertModelHasNoLoadedRelations($model, $relationNames = []) { $modelLoadedRelations = $model->getRelations(); foreach ($relationNames as $relationName) {
Modifying function visibilities from private to protected
fenos_Notifynder
train
1a93e2e319a005a6992274096af63dc15b6f410b
diff --git a/engineer/engine.py b/engineer/engine.py index <HASH>..<HASH> 100644 --- a/engineer/engine.py +++ b/engineer/engine.py @@ -1,7 +1,6 @@ # coding=utf-8 import argparse import gzip -import humanize import logging import sys import time @@ -9,6 +8,7 @@ import times from codecs import open from path import path from engineer.exceptions import ThemeNotFoundException +from engineer.filters import naturaltime from engineer.log import get_console_handler, bootstrap from engineer.plugins import CommandPlugin, load_plugins from engineer.util import relpath, compress @@ -165,7 +165,7 @@ def build(args=None): logger.warning("This site contains the following pending posts:") for post in all_posts.pending: logger.warning("\t'%s' - publish time: %s, %s." % (post.title, - humanize.naturaltime(post.timestamp), + naturaltime(post.timestamp), post.timestamp_local)) logger.warning("These posts won't be published until you build the site again after their publish time.")
Fix pending posts warning to use correct natural time.
tylerbutler_engineer
train
b8dcadadb75a43d10b3d56ec7829237ae41db843
diff --git a/packages/roc-package-webpack-dev/src/builder/index.js b/packages/roc-package-webpack-dev/src/builder/index.js index <HASH>..<HASH> 100644 --- a/packages/roc-package-webpack-dev/src/builder/index.js +++ b/packages/roc-package-webpack-dev/src/builder/index.js @@ -21,7 +21,8 @@ export default ({ previousValue: { buildConfig = {}, builder = require('webpack' const DEV = (buildSettings.mode === 'dev'); const DIST = (buildSettings.mode === 'dist'); - const ENV = DIST ? 'production' : 'development'; + let ENV = DIST ? 'production' : null; + ENV = DEV ? 'development' : buildSettings.mode; const entry = getAbsolutePath(getValueFromPotentialObject(buildSettings.input, target)); const outputPath = getAbsolutePath(getValueFromPotentialObject(buildSettings.output, target)); @@ -128,7 +129,8 @@ export default ({ previousValue: { buildConfig = {}, builder = require('webpack' new builder.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify(ENV), '__DEV__': DEV, - '__DIST__': DIST + '__DIST__': DIST, + '__CWD__': JSON.stringify(process.cwd()) }) );
Added fallback for other modes than dev and dist and added __CWD__
rocjs_extensions
train
ef31334f16567cd4f65af40e316702a3b2f695c0
diff --git a/bosh-director/spec/unit/config_spec.rb b/bosh-director/spec/unit/config_spec.rb index <HASH>..<HASH> 100644 --- a/bosh-director/spec/unit/config_spec.rb +++ b/bosh-director/spec/unit/config_spec.rb @@ -20,4 +20,4 @@ describe Bosh::Director::Config do end end -end \ No newline at end of file +end diff --git a/bosh-director/spec/unit/vm_creator_spec.rb b/bosh-director/spec/unit/vm_creator_spec.rb index <HASH>..<HASH> 100644 --- a/bosh-director/spec/unit/vm_creator_spec.rb +++ b/bosh-director/spec/unit/vm_creator_spec.rb @@ -107,6 +107,15 @@ describe Bosh::Director::VmCreator do }.to raise_error(Bosh::Clouds::VMCreationFailed) end + it 'should try exactly five times when it is a retryable error' do + @cloud.should_receive(:create_vm).exactly(5).times.and_raise(Bosh::Clouds::VMCreationFailed.new(true)) + + expect { + vm = Bosh::Director::VmCreator.new.create(@deployment, @stemcell, @resource_pool_spec.cloud_properties, + @network_settings, nil, @resource_pool_spec.env) + }.to raise_error(Bosh::Clouds::VMCreationFailed) + end + it "should have deep copy of environment" do Bosh::Director::Config.encryption = true env_id = nil
Back-fill test for create_vm retry in director
cloudfoundry_bosh
train
f0c143b948482fb781ab7e3a8f64cee46bf4ecdd
diff --git a/src/labels/label_multipoint.js b/src/labels/label_multipoint.js index <HASH>..<HASH> 100644 --- a/src/labels/label_multipoint.js +++ b/src/labels/label_multipoint.js @@ -22,7 +22,14 @@ export default function fitToLine (line, size, options) { switch (strategy){ case PLACEMENT.SPACED: - let {positions, angles} = getPositionsAndAngles(line, options); + let result = getPositionsAndAngles(line, options); + // false will be returned if line have no length + if (!result) { + return []; + } + + let positions = result.positions; + let angles = result.angles; for (let i = 0; i < positions.length; i++){ let position = positions[i]; let angle = angles[i]; @@ -69,7 +76,12 @@ function getPositionsAndAngles(line, options){ let spacing = (options.placement_spacing || default_spacing) * upp; let length = getLineLength(line); - let num_labels = Math.floor(length / spacing); + + if (length === 0){ + return false; + } + + let num_labels = Math.max(Math.floor(length / spacing), 1); let remainder = length - (num_labels - 1) * spacing; let positions = []; @@ -104,6 +116,7 @@ function norm(p, q){ function interpolateLine(line, distance, options){ let sum = 0; + let position, angle; for (let i = 0; i < line.length-1; i++){ let p = line[i]; let q = line[i+1]; @@ -111,11 +124,12 @@ function interpolateLine(line, distance, options){ sum += norm(p, q); if (sum > distance){ - let position = interpolateSegment(p, q, sum - distance); - let angle = getAngle(p, q, options.angle); - return {position, angle}; + position = interpolateSegment(p, q, sum - distance); + angle = getAngle(p, q, options.angle); + break; } } + return {position, angle}; } function interpolateSegment(p, q, distance){
Make sure at least one multipoint label is placed on a line
tangrams_tangram
train
3d130ac0164378aba627b925f982a08e2f9a1560
diff --git a/pyaavso/formats/visual.py b/pyaavso/formats/visual.py index <HASH>..<HASH> 100644 --- a/pyaavso/formats/visual.py +++ b/pyaavso/formats/visual.py @@ -2,4 +2,9 @@ from __future__ import unicode_literals class VisualFormatWriter(object): - pass + """ + A class responsible for writing observation data in AAVSO + `Visual File Format`_. + + .. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format + """
Added a basic docstring to VisualFormatWriter.
zsiciarz_pyaavso
train
c367c5bacc51fe99f674255f945d7a9b12a23861
diff --git a/aeron-client/src/main/java/io/aeron/logbuffer/ExclusiveTermAppender.java b/aeron-client/src/main/java/io/aeron/logbuffer/ExclusiveTermAppender.java index <HASH>..<HASH> 100644 --- a/aeron-client/src/main/java/io/aeron/logbuffer/ExclusiveTermAppender.java +++ b/aeron-client/src/main/java/io/aeron/logbuffer/ExclusiveTermAppender.java @@ -43,7 +43,7 @@ import static org.agrona.BitUtil.align; * A message of type {@link FrameDescriptor#PADDING_FRAME_TYPE} is appended at the end of the buffer if claimed * space is not sufficiently large to accommodate the message about to be written. */ -public class ExclusiveTermAppender +public final class ExclusiveTermAppender { /** * The append operation tripped the end of the buffer and needs to rotate. @@ -51,7 +51,6 @@ public class ExclusiveTermAppender public static final int FAILED = -1; private final long tailAddressOffset; - private final byte[] tailBuffer; private final UnsafeBuffer termBuffer; /** @@ -68,7 +67,6 @@ public class ExclusiveTermAppender metaDataBuffer.boundsCheck(tailCounterOffset, SIZE_OF_LONG); this.termBuffer = termBuffer; - tailBuffer = metaDataBuffer.byteArray(); tailAddressOffset = metaDataBuffer.addressOffset() + tailCounterOffset; } @@ -621,6 +619,6 @@ public class ExclusiveTermAppender private void putRawTailOrdered(final int termId, final int termOffset) { - UnsafeAccess.UNSAFE.putOrderedLong(tailBuffer, tailAddressOffset, packTail(termId, termOffset)); + UnsafeAccess.UNSAFE.putOrderedLong(null, tailAddressOffset, packTail(termId, termOffset)); } } diff --git a/aeron-client/src/main/java/io/aeron/logbuffer/TermAppender.java b/aeron-client/src/main/java/io/aeron/logbuffer/TermAppender.java index <HASH>..<HASH> 100644 --- a/aeron-client/src/main/java/io/aeron/logbuffer/TermAppender.java +++ b/aeron-client/src/main/java/io/aeron/logbuffer/TermAppender.java @@ -49,7 +49,7 @@ import static java.nio.ByteOrder.LITTLE_ENDIAN; * A message of type {@link FrameDescriptor#PADDING_FRAME_TYPE} is appended at the end of the buffer if claimed * space is not sufficiently large to accommodate the message about to be written. */ -public class TermAppender +public final class TermAppender { /** * The append operation failed because it was past the end of the buffer. @@ -57,7 +57,6 @@ public class TermAppender public static final int FAILED = -2; private final long tailAddressOffset; - private final byte[] tailBuffer; private final UnsafeBuffer termBuffer; /** @@ -73,7 +72,6 @@ public class TermAppender metaDataBuffer.boundsCheck(tailCounterOffset, SIZE_OF_LONG); this.termBuffer = termBuffer; - tailBuffer = metaDataBuffer.byteArray(); tailAddressOffset = metaDataBuffer.addressOffset() + tailCounterOffset; } @@ -84,7 +82,7 @@ public class TermAppender */ public long rawTailVolatile() { - return UnsafeAccess.UNSAFE.getLongVolatile(tailBuffer, tailAddressOffset); + return UnsafeAccess.UNSAFE.getLongVolatile(null, tailAddressOffset); } /** @@ -627,6 +625,6 @@ public class TermAppender private long getAndAddRawTail(final int alignedLength) { - return UnsafeAccess.UNSAFE.getAndAddLong(tailBuffer, tailAddressOffset, alignedLength); + return UnsafeAccess.UNSAFE.getAndAddLong(null, tailAddressOffset, alignedLength); } }
[Java] Remove unneeded byteArray field from term appenders.
real-logic_aeron
train
09edef40d70e17ebdefb0c3eb6bfeaa6b16ac7ac
diff --git a/BaseModel.php b/BaseModel.php index <HASH>..<HASH> 100644 --- a/BaseModel.php +++ b/BaseModel.php @@ -455,11 +455,11 @@ class BaseModel extends Record implements JsonSerializable return $this->$record; } - public function load($names) + public function includes($names) { foreach ((array) $names as $name) { // load relations - $record = $this->$name(); + $record = $this->{'get' . ucfirst($name)}(); $baseName = lcfirst(end(explode('\\', get_class($record))));
load => includes, 确定关联方法为getXxx #<I>
miaoxing_plugin
train
19a2250d73f77bab39399dd2c36251767929c2ff
diff --git a/api/app/handler.go b/api/app/handler.go index <HASH>..<HASH> 100644 --- a/api/app/handler.go +++ b/api/app/handler.go @@ -101,9 +101,8 @@ func CloneRepositoryHandler(w http.ResponseWriter, r *http.Request) error { if err != nil { return err } - out, err = installDeps(&app, nil, nil) + _, err = installDeps(&app, w, w) if err != nil { - write(w, out) return err } err = write(w, out)
setting writer how stdout and stderr for installDeps
tsuru_tsuru
train
66bff0ef0582e2d2da89c08f45be8247a8dbc203
diff --git a/public/js/views/standard-list.js b/public/js/views/standard-list.js index <HASH>..<HASH> 100644 --- a/public/js/views/standard-list.js +++ b/public/js/views/standard-list.js @@ -62,6 +62,7 @@ define(function (require) { } }, + // Init an individual row in the list initRow: function (row) { // Find vars @@ -156,6 +157,9 @@ define(function (require) { $helper.children().each(function(index) { $(this).width($originals.eq(index).width()); }); + + // Without this, the size was being inflated by the border + $helper.css('width', (tr.width()) + 'px'); return $helper; }, diff --git a/sass/partials/_listing.scss b/sass/partials/_listing.scss index <HASH>..<HASH> 100644 --- a/sass/partials/_listing.scss +++ b/sass/partials/_listing.scss @@ -108,7 +108,20 @@ position: relative; left: -1px; top: -1px; + } + + // Fix for long cells not looking right + // https://github.com/BKWLD/decoy/issues/53 + display: block !important; + white-space: nowrap; + @include box-sizing(border-box); + font-size: 0; // Remove the whitespace between tds + td { + display: inline-block; + font-size: 14px; // Put the font size back + } + } tr { cursor: move; diff --git a/views/shared/list/_table.php b/views/shared/list/_table.php index <HASH>..<HASH> 100644 --- a/views/shared/list/_table.php +++ b/views/shared/list/_table.php @@ -45,9 +45,11 @@ if (!$many_to_many && isset($iterator[0]->visible)) $actions++; data-model-id="<?=$many_to_many?$item->pivot_id(): $item->id?>" <? - // Add positoin value from the row or from the pivor table - if (isset($item->position)) echo "data-position='{$item->position}'"; - elseif (isset($item->pivot->position)) echo "data-position='{$item->pivot->position}'"; + // Add position value from the row or from the pivot table. Item must be converted to an array + // to this because Laravel doesn't test for __isset explicitly: https://github.com/laravel/laravel/pull/1678 + $test = $item->to_array(); + if (isset($test['position'])) echo "data-position='{$item->position}'"; + elseif (isset($test['pivot']['position'])) echo "data-position='{$item->pivot->position}'"; ?> > <td><input type="checkbox" name="select-row"></td>
Fixing drag and drop and improving it too, fixes #<I>
BKWLD_decoy
train
14f77607910df94b2e175b5d85da730cd366b73a
diff --git a/lib/searchkick/model.rb b/lib/searchkick/model.rb index <HASH>..<HASH> 100644 --- a/lib/searchkick/model.rb +++ b/lib/searchkick/model.rb @@ -78,7 +78,7 @@ module Searchkick def searchkick_reindex(method_name = nil, **options) scoped = Searchkick.relation?(self) - relation = scoped ? all : searchkick_klass + relation = scoped ? all : searchkick_klass.all # prevent scope from affecting search_data unscoped do searchkick_index.reindex(relation, method_name, scoped: scoped, **options) diff --git a/test/reindex_test.rb b/test/reindex_test.rb index <HASH>..<HASH> 100644 --- a/test/reindex_test.rb +++ b/test/reindex_test.rb @@ -185,6 +185,12 @@ class ReindexTest < Minitest::Test assert_match "unsupported keywords: :async", error.message end + def test_full_default_scope + store_names ["Test", "Test 2"], Band, reindex: false + Band.reindex + assert_search "*", ["Test"], {load: false}, Band + end + def test_callbacks_false Searchkick.callbacks(false) do store_names ["Product A", "Product B"]
Updated default_scope behavior match Searchkick 5 [skip ci]
ankane_searchkick
train
d07eca5997c55fd61e6d5c857a23659251d8505f
diff --git a/app/controllers/devise_token_auth/registrations_controller.rb b/app/controllers/devise_token_auth/registrations_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/devise_token_auth/registrations_controller.rb +++ b/app/controllers/devise_token_auth/registrations_controller.rb @@ -94,8 +94,7 @@ module DeviseTokenAuth def update if @resource - - if @resource.update_attributes(account_update_params) + if @resource.send(resource_update_method, account_update_params) yield @resource if block_given? render json: { status: 'success', @@ -142,6 +141,14 @@ module DeviseTokenAuth private + def resource_update_method + if account_update_params.has_key?(:current_password) + "update_with_password" + else + "update_attributes" + end + end + def validate_sign_up_params validate_post_data sign_up_params, 'Please submit proper sign up data in request body.' end diff --git a/test/controllers/devise_token_auth/registrations_controller_test.rb b/test/controllers/devise_token_auth/registrations_controller_test.rb index <HASH>..<HASH> 100644 --- a/test/controllers/devise_token_auth/registrations_controller_test.rb +++ b/test/controllers/devise_token_auth/registrations_controller_test.rb @@ -440,6 +440,18 @@ class DeviseTokenAuth::RegistrationsControllerTest < ActionDispatch::Integration assert_equal @email.downcase, @existing_user.email assert_equal @email.downcase, @existing_user.uid end + + test "Supply current password" do + @request_params.merge!( + current_password: "secret123", + email: "[email protected]", + ) + + put "/auth", @request_params, @auth_headers + @data = JSON.parse(response.body) + @existing_user.reload + assert_equal @existing_user.email, "[email protected]" + end end describe 'validate non-empty body' do diff --git a/test/dummy/app/controllers/application_controller.rb b/test/dummy/app/controllers/application_controller.rb index <HASH>..<HASH> 100644 --- a/test/dummy/app/controllers/application_controller.rb +++ b/test/dummy/app/controllers/application_controller.rb @@ -10,5 +10,6 @@ class ApplicationController < ActionController::Base devise_parameter_sanitizer.for(:sign_up) << :favorite_color devise_parameter_sanitizer.for(:account_update) << :operating_thetan devise_parameter_sanitizer.for(:account_update) << :favorite_color + devise_parameter_sanitizer.for(:account_update) << :current_password end end
Allow current_password to be supplied when updating profile.
lynndylanhurley_devise_token_auth
train
1ad1c5e87e1d3988832833111d14754e9e7a68f7
diff --git a/stagpy/__init__.py b/stagpy/__init__.py index <HASH>..<HASH> 100644 --- a/stagpy/__init__.py +++ b/stagpy/__init__.py @@ -68,16 +68,16 @@ def load_mplstyle(): plt = importlib.import_module('matplotlib.pyplot') if conf.plot.mplstyle: for style in conf.plot.mplstyle.split(): - stfile = config.CONFIG_DIR / (style + '.mplstyle') - if stfile.is_file(): - style = str(stfile) - if ISOLATED: - break + if not ISOLATED: + stfile = config.CONFIG_DIR / (style + '.mplstyle') + if stfile.is_file(): + style = str(stfile) try: plt.style.use(style) except OSError: - print('Cannot import style {}.'.format(style), - file=sys.stderr) + if not ISOLATED or DEBUG: + print('Cannot import style {}.'.format(style), + file=sys.stderr) conf.plot.mplstyle = '' if conf.plot.xkcd: plt.xkcd()
Don't report faulty mplstyle when ISOLATED Report them anyway if DEBUG
StagPython_StagPy
train
ab2d66880be34fba6773466e9ba31c7829cb400c
diff --git a/addok/core.py b/addok/core.py index <HASH>..<HASH> 100644 --- a/addok/core.py +++ b/addok/core.py @@ -360,7 +360,9 @@ class Search(BaseHelper): self.keys = [t.db_key for t in self.meaningful] if self.bucket_empty: self.new_bucket(self.keys, 10) - if self.has_cream(): + if not self._autocomplete and self.has_cream(): + # Do not check cream before computing autocomplete when + # autocomplete is on. self.debug('Cream found. Returning.') return True if not self.bucket_empty:
Do not check cream before autocompleting when autocomplete is on Rationale: daumaz will never catch daumazan because daumaz exists and so will be considered as cream
addok_addok
train
344b768a4c2569ade173f1c9f8a4dcd2232023b0
diff --git a/src/entity/camera.js b/src/entity/camera.js index <HASH>..<HASH> 100644 --- a/src/entity/camera.js +++ b/src/entity/camera.js @@ -292,10 +292,13 @@ /** * shake the camera - * @param {int} intensity - * @param {int} duration - * @param {axis} axis AXIS.HORIZONTAL, AXIS.VERTICAL, AXIS.BOTH - * @param {function} [onComplete] callback once shaking is over + * @param {int} intensity maximum offset that the screen can be moved while shaking + * @param {int} duration expressed in frame + * @param {axis} axis specify on which axis you want the shake effect (AXIS.HORIZONTAL, AXIS.VERTICAL, AXIS.BOTH) + * @param {function} [onComplete] callback once shaking effect is over + * @example + * // shake it baby ! + * me.game.viewport.shake(10, 30, me.game.viewport.AXIS.BOTH); */ shake : function (intensity, duration, axis, onComplete) @@ -402,23 +405,31 @@ return this.checkAxisAligned(rect); }, - /** + /** * @private - * render the camera effects - */ - - draw : function(context) - { - // fading effect - if (this._fadeIn.alpha < 1.0) - me.video.clearSurface(context, me.utils.HexToRGB(this._fadeIn.color, this._fadeIn.alpha)); - - // flashing effect + * render the camera effects + */ + draw : function(context) + { + // fading effect + if (this._fadeIn.alpha < 1.0) + { + context.globalAlpha = this._fadeIn.alpha; + me.video.clearSurface(context, me.utils.HexToRGB(this._fadeIn.color)); + // set back full opacity + context.globalAlpha = 1.0; + } + // flashing effect if (this._fadeOut.alpha > 0.0) - me.video.clearSurface(context, me.utils.HexToRGB(this._fadeOut.color, this._fadeOut.alpha)); - } + { + context.globalAlpha = this._fadeOut.alpha; + me.video.clearSurface(context, me.utils.HexToRGB(this._fadeOut.color)); + // set back full opacity + context.globalAlpha = 1.0; + } + } - }); + }); /*---------------------------------------------------------*/ // expose our stuff to the global scope
Use "globalAlpha" as well here for fading effects, and completed other function inline documentation.
melonjs_melonJS
train
6e8cd94d25e6d9f53420e0f7ee8f206feb3ba832
diff --git a/trinity/protocol/common/managers.py b/trinity/protocol/common/managers.py index <HASH>..<HASH> 100644 --- a/trinity/protocol/common/managers.py +++ b/trinity/protocol/common/managers.py @@ -248,7 +248,14 @@ class ExchangeManager(Generic[TRequestPayload, TResponsePayload, TResult]): timeout: float = None) -> TResult: if not self.is_operational: - raise ValidationError("You must call `launch_service` before initiating a peer request") + if self.service is None or not self.service.is_cancelled: + raise ValidationError( + f"Must call `launch_service` before sending request to {self._peer}" + ) + else: + raise PeerConnectionLost( + f"Response stream closed before sending request to {self._peer}" + ) stream = self._response_stream
Fix when requesting from a peer after cancellaton
ethereum_py-evm
train
5bb8bd8d5685bcd8f18c97a5a56cdec8a31560a9
diff --git a/oauth2client/client.py b/oauth2client/client.py index <HASH>..<HASH> 100644 --- a/oauth2client/client.py +++ b/oauth2client/client.py @@ -760,6 +760,8 @@ class OAuth2Credentials(Credentials): d = json.loads(content) if 'error' in d: error_msg = d['error'] + if 'error_description' in d: + error_msg += ': ' + d['error_description'] self.invalid = True if self.store: self.store.locked_put(self)
Added error description to AccessTokenRefreshError The JSON response returned can contain an "error_description" field that contains additional information about the error. If found, appending to the error message.
googleapis_oauth2client
train
dd7f338c40c7eb629b1f2b80a0058a98c3a114c6
diff --git a/core/css.js b/core/css.js index <HASH>..<HASH> 100644 --- a/core/css.js +++ b/core/css.js @@ -216,7 +216,7 @@ Blockly.Css.CONTENT = [ '}', '.blocklyDropDownDiv {', - 'position: absolute;', + 'position: fixed;', 'left: 0;', 'top: 0;', 'z-index: 1000;', @@ -228,6 +228,11 @@ Blockly.Css.CONTENT = [ '-webkit-user-select: none;', '}', + '.blocklyDropDownContent {', + 'max-height: 300px;', // @todo: spec for maximum height. + 'overflow: auto;', + '}', + '.blocklyDropDownArrow {', 'position: absolute;', 'left: 0;',
Temporary fix for page-breaking drop-downs (#<I>)
LLK_scratch-blocks
train
94271d90372f097ab9326649c1830ab0992dffad
diff --git a/bezier/_surface_helpers.py b/bezier/_surface_helpers.py index <HASH>..<HASH> 100644 --- a/bezier/_surface_helpers.py +++ b/bezier/_surface_helpers.py @@ -27,6 +27,7 @@ import six from bezier import _curve_helpers from bezier import _helpers +from bezier import curved_polygon MAX_POLY_SUBDIVISIONS = 5 @@ -1399,6 +1400,31 @@ def verify_duplicates(duplicates, uniques): raise ValueError('Unexpected duplicate count', count) +def combine_intersections(intersections): + """Combine curve-curve intersections into curved polygon(s). + + Does so assuming each intersection lies on an edge of one of + two :class:`.Surface`-s. + + .. note :: + + This assumes that each ``intersection`` has been classified via + :func:`classify_intersection`. + + Args: + intersections (list): A list of :class:`.Intersection` objects + produced by :func:`.all_intersections` applied to each of the 9 + edge-edge pairs from a surface-surface pairing. + + Returns: + List[~bezier.curved_polygon.CurvedPolygon]: A. + """ + if len(intersections) == 0: + return [] + + raise NotImplementedError + + class IntersectionClassification(enum.Enum): """Enum classifying the "interior" curve in an intersection.""" first = 'first' diff --git a/tests/test__surface_helpers.py b/tests/test__surface_helpers.py index <HASH>..<HASH> 100644 --- a/tests/test__surface_helpers.py +++ b/tests/test__surface_helpers.py @@ -935,3 +935,20 @@ class Test_verify_duplicates(unittest.TestCase): uniq = make_intersect(self.LEFT, 0.375, self.RIGHT, 0.75) with self.assertRaises(ValueError): self._call_function_under_test([uniq, uniq], [uniq]) + + +class Test_combine_intersections(unittest.TestCase): + + @staticmethod + def _call_function_under_test(intersections): + from bezier import _surface_helpers + + return _surface_helpers.combine_intersections(intersections) + + def test_empty(self): + result = self._call_function_under_test([]) + self.assertEqual(result, []) + + def test_non_empty(self): + with self.assertRaises(NotImplementedError): + self._call_function_under_test([None])
Adding beginnings of `combine_intersections()`.
dhermes_bezier
train
196c280c4bc756c78af63b2e468519713709eff9
diff --git a/src/toil/provisioners/abstractProvisioner.py b/src/toil/provisioners/abstractProvisioner.py index <HASH>..<HASH> 100644 --- a/src/toil/provisioners/abstractProvisioner.py +++ b/src/toil/provisioners/abstractProvisioner.py @@ -97,6 +97,16 @@ class Shape(object): self.cores, self.disk, self.preemptable) + + def __hash__(self): + # Since we replaced __eq__ we need to replace __hash__ as well. + return hash( + (self.wallTime, + self.memory, + self.cores, + self.disk, + self.preemptable)) + class AbstractProvisioner(with_metaclass(ABCMeta, object)): """
Define __hash__ for Shape We need this for using shapes as keys to work right. Python 3 started actually enforcing this. Fixes src/toil/test/provisioners/clusterScalerTest.py::BinPackingTest
DataBiosphere_toil
train
1adc4f0f2ae939b8d0e56281574062b78078a8de
diff --git a/plugins/inputs/x509_cert/x509_cert.go b/plugins/inputs/x509_cert/x509_cert.go index <HASH>..<HASH> 100644 --- a/plugins/inputs/x509_cert/x509_cert.go +++ b/plugins/inputs/x509_cert/x509_cert.go @@ -211,6 +211,7 @@ func (c *X509Cert) Gather(acc telegraf.Accumulator) error { // name validation against the URL hostname. opts := x509.VerifyOptions{ Intermediates: x509.NewCertPool(), + KeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, } if i == 0 { if c.ServerName == "" { diff --git a/plugins/inputs/x509_cert/x509_cert_test.go b/plugins/inputs/x509_cert/x509_cert_test.go index <HASH>..<HASH> 100644 --- a/plugins/inputs/x509_cert/x509_cert_test.go +++ b/plugins/inputs/x509_cert/x509_cert_test.go @@ -142,6 +142,7 @@ func TestGatherLocal(t *testing.T) { {name: "not a certificate", mode: 0640, content: "test", error: true}, {name: "wrong certificate", mode: 0640, content: wrongCert, error: true}, {name: "correct certificate", mode: 0640, content: pki.ReadServerCert()}, + {name: "correct client certificate", mode: 0640, content: pki.ReadClientCert()}, {name: "correct certificate and extra trailing space", mode: 0640, content: pki.ReadServerCert() + " "}, {name: "correct certificate and extra leading space", mode: 0640, content: " " + pki.ReadServerCert()}, {name: "correct multiple certificates", mode: 0640, content: pki.ReadServerCert() + pki.ReadCACert()},
Allow any key usage type on x<I> certificate (#<I>)
influxdata_telegraf
train
9d61bdcba8cd5d905ef582e80740e679ed00f738
diff --git a/src/com/jayantkrish/jklol/ccg/lambda/ExpressionParser.java b/src/com/jayantkrish/jklol/ccg/lambda/ExpressionParser.java index <HASH>..<HASH> 100644 --- a/src/com/jayantkrish/jklol/ccg/lambda/ExpressionParser.java +++ b/src/com/jayantkrish/jklol/ccg/lambda/ExpressionParser.java @@ -88,7 +88,7 @@ public class ExpressionParser<T> { ExpressionFactories.getTypeFactory()); } - private List<String> tokenize(String expression) { + public List<String> tokenize(String expression) { for (int i = 0; i < preprocessingPatterns.length; i++) { expression = Pattern.compile(preprocessingPatterns[i]).matcher(expression) .replaceAll(preprocessingReplacements[i]); @@ -97,7 +97,8 @@ public class ExpressionParser<T> { boolean inQuotes = false; int exprStart = -1; List<String> tokens = Lists.newArrayList(); - for (int i = 0; i < expression.length(); i++) { + int length = expression.length(); + for (int i = 0; i < length; i++) { char character = expression.charAt(i); boolean quoteOk = false; @@ -115,7 +116,7 @@ public class ExpressionParser<T> { } } Preconditions.checkState((character != openQuote && character != closeQuote) || quoteOk, - "Quoting error. Current: " + expression); + "Quoting error. Current: %s", expression); if (!inQuotes) { if ((whitespaceSeparated && Character.isWhitespace(character)) ||
fixing serious efficiency bug with expression parsing
jayantk_jklol
train
73f259732368fa13cd3e84f1e9d24b6469769304
diff --git a/build/breeze.base.debug.js b/build/breeze.base.debug.js index <HASH>..<HASH> 100644 --- a/build/breeze.base.debug.js +++ b/build/breeze.base.debug.js @@ -23,7 +23,7 @@ })(this, function (global) { "use strict"; var breeze = { - version: "1.5.17", + version: "1.6.0", metadataVersion: "1.0.5" }; ;/** diff --git a/build/breeze.debug.js b/build/breeze.debug.js index <HASH>..<HASH> 100644 --- a/build/breeze.debug.js +++ b/build/breeze.debug.js @@ -23,7 +23,7 @@ })(this, function (global) { "use strict"; var breeze = { - version: "1.5.17", + version: "1.6.0", metadataVersion: "1.0.5" }; ;/** diff --git a/build/breeze.intellisense.js b/build/breeze.intellisense.js index <HASH>..<HASH> 100644 --- a/build/breeze.intellisense.js +++ b/build/breeze.intellisense.js @@ -1,4 +1,4 @@ -// Generated on: Mon Nov 14 2016 15:25:18 GMT-0800 (Pacific Standard Time) +// Generated on: Mon Nov 14 2016 15:38:06 GMT-0800 (Pacific Standard Time) intellisense.annotate(breeze.core, { 'Enum': function() { /// <signature> diff --git a/src/_head.jsfrag b/src/_head.jsfrag index <HASH>..<HASH> 100644 --- a/src/_head.jsfrag +++ b/src/_head.jsfrag @@ -23,7 +23,7 @@ })(this, function (global) { "use strict"; var breeze = { - version: "1.5.17", + version: "1.6.0", metadataVersion: "1.0.5" }; \ No newline at end of file
Updated version number to <I>
Breeze_breeze.js
train
3745cd7cb0a7dc1cada5dd688b2f51128ea9203e
diff --git a/packages/tools/build-translation.js b/packages/tools/build-translation.js index <HASH>..<HASH> 100644 --- a/packages/tools/build-translation.js +++ b/packages/tools/build-translation.js @@ -166,19 +166,35 @@ async function createPotFile(potFilePath) { const extractor = new GettextExtractor(); + // In the following string: + // + // _('Hello %s', 'Scott') + // + // "Hello %s" is the `text` (or "msgstr" in gettext parlance) , and "Scott" + // is the `context` ("msgctxt"). + // + // gettext-extractor allows adding both the text and context to the pot + // file, however we should avoid this because a change in the context string + // would mark the associated string as fuzzy. We want to avoid this because + // the point of splitting into text and context is that even if the context + // changes we don't need to retranslate the text. We use this for URLs for + // instance. + // + // Because of this, below we don't set the "context" property. + const parser = extractor .createJsParser([ JsExtractors.callExpression('_', { arguments: { text: 0, - context: 1, + // context: 1, }, }), JsExtractors.callExpression('_n', { arguments: { text: 0, textPlural: 1, - context: 2, + // context: 2, }, }), ]);
Tools: Do not process context when running build-translation tool
laurent22_joplin
train
d4d22e6b9aa07df93e38503f21ad3b49b11fff92
diff --git a/simulator/src/main/java/com/hazelcast/simulator/worker/loadsupport/AbstractAsyncStreamer.java b/simulator/src/main/java/com/hazelcast/simulator/worker/loadsupport/AbstractAsyncStreamer.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/worker/loadsupport/AbstractAsyncStreamer.java +++ b/simulator/src/main/java/com/hazelcast/simulator/worker/loadsupport/AbstractAsyncStreamer.java @@ -58,8 +58,14 @@ abstract class AbstractAsyncStreamer<K, V> implements Streamer<K, V> { @SuppressWarnings("unchecked") public void pushEntry(K key, V value) { acquirePermit(1); - ICompletableFuture<V> future = storeAsync(key, value); - future.andThen(callback); + try { + ICompletableFuture<V> future = storeAsync(key, value); + future.andThen(callback); + } catch (Exception e) { + releasePermit(1); + + throw rethrow(e); + } } @Override @@ -101,7 +107,6 @@ abstract class AbstractAsyncStreamer<K, V> implements Streamer<K, V> { public void onResponse(V response) { releasePermit(1); counter.incrementAndGet(); - } @Override diff --git a/simulator/src/test/java/com/hazelcast/simulator/worker/loadsupport/AsyncMapStreamerTest.java b/simulator/src/test/java/com/hazelcast/simulator/worker/loadsupport/AsyncMapStreamerTest.java index <HASH>..<HASH> 100644 --- a/simulator/src/test/java/com/hazelcast/simulator/worker/loadsupport/AsyncMapStreamerTest.java +++ b/simulator/src/test/java/com/hazelcast/simulator/worker/loadsupport/AsyncMapStreamerTest.java @@ -4,12 +4,14 @@ import com.hazelcast.core.ExecutionCallback; import com.hazelcast.core.ICompletableFuture; import com.hazelcast.core.IMap; import com.hazelcast.util.EmptyStatement; +import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import static com.hazelcast.simulator.utils.CommonUtils.joinThread; +import static com.hazelcast.simulator.utils.FileUtils.deleteQuiet; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; @@ -37,6 +39,11 @@ public class AsyncMapStreamerTest { streamer = StreamerFactory.getInstance(map); } + @After + public void tearDown() { + deleteQuiet("1.exception"); + } + @Test public void testPushEntry() { when(map.putAsync(anyInt(), anyString())).thenReturn(future); @@ -98,7 +105,7 @@ public class AsyncMapStreamerTest { @Test public void testAwait_withExceptionOnPushEntry() { - doThrow(new IllegalArgumentException()).when(map).putAsync(anyInt(), anyString()); + doThrow(new IllegalArgumentException("expected exception")).when(map).putAsync(anyInt(), anyString()); Thread thread = new Thread() { @Override
Fixed AbstractAsyncStreamer to call releasePermit() if the future could not be created.
hazelcast_hazelcast-simulator
train
548385888e400254605d8edbb7b9dcc8ac27a478
diff --git a/TYPO3.Flow/Classes/TYPO3/Flow/Http/Headers.php b/TYPO3.Flow/Classes/TYPO3/Flow/Http/Headers.php index <HASH>..<HASH> 100644 --- a/TYPO3.Flow/Classes/TYPO3/Flow/Http/Headers.php +++ b/TYPO3.Flow/Classes/TYPO3/Flow/Http/Headers.php @@ -99,9 +99,6 @@ class Headers { * @api */ public function set($name, $values, $replaceExistingHeader = TRUE) { - if (strtoupper(substr($name, 0, 4)) === 'HTTP') { - throw new \InvalidArgumentException('The "HTTP" status header must be set via setStatus().', 1220541963); - } if (strtoupper(substr($name, 0, 10)) === 'SET-COOKIE') { throw new \InvalidArgumentException('The "Set-Cookie" headers must be set via setCookie().', 1345128153); } diff --git a/TYPO3.Flow/Tests/Unit/Http/HeadersTest.php b/TYPO3.Flow/Tests/Unit/Http/HeadersTest.php index <HASH>..<HASH> 100644 --- a/TYPO3.Flow/Tests/Unit/Http/HeadersTest.php +++ b/TYPO3.Flow/Tests/Unit/Http/HeadersTest.php @@ -290,6 +290,19 @@ class HeadersTest extends UnitTestCase { } /** + * @test + * + * Note: This is a fix for https://jira.neos.io/browse/FLOW-324 (see https://code.google.com/p/chromium/issues/detail?id=501095) + */ + public function setExceptsHttpsHeaders() { + $headers = new Headers(); + $headers->set('HTTPS', 1); + + // dummy assertion to suppress PHPUnit warning + $this->assertTrue(TRUE); + } + + /** * (RFC 2616 / 14.9.1) * * @test diff --git a/TYPO3.Flow/Tests/Unit/Http/RequestTest.php b/TYPO3.Flow/Tests/Unit/Http/RequestTest.php index <HASH>..<HASH> 100644 --- a/TYPO3.Flow/Tests/Unit/Http/RequestTest.php +++ b/TYPO3.Flow/Tests/Unit/Http/RequestTest.php @@ -1179,4 +1179,16 @@ class RequestTest extends UnitTestCase { $this->assertEquals($expectedUri, (string)$request->getUri()); } + /** + * @test + * + * Note: This is a fix for https://jira.neos.io/browse/FLOW-324 (see https://code.google.com/p/chromium/issues/detail?id=501095) + */ + public function constructorIgnoresHttpsHeader() { + $server = array ( + 'HTTP_HTTPS' => '1', + ); + new Request(array(), array(), array(), $server); + } + }
[BUGFIX] Accept incoming HTTPS headers This removes a check from ``Http\Headers`` that prevented any header starting with "HTTP". Background: A recent version of Google Chrome seems to send a ``HTTPS`` header for requests via SSL (see <URL>
neos_flow-development-collection
train
19a5889d213774aa3ad2ca7ef58d053d4cb4d779
diff --git a/cufflinks/plotlytools.py b/cufflinks/plotlytools.py index <HASH>..<HASH> 100644 --- a/cufflinks/plotlytools.py +++ b/cufflinks/plotlytools.py @@ -615,7 +615,7 @@ def _iplot(self,data=None,layout=None,filename='',sharing=None, # 'error_y','error_type','locations','lon','lat','asFrame','asDates','asFigure', # 'asImage','dimensions','asPlot','asUrl','online'] valid_kwargs = ['color','opacity','column','columns','labels','text','world_readable','colorbar'] - TRACE_KWARGS = ['hoverinfo'] + TRACE_KWARGS = ['hoverinfo','connectgaps'] PIE_KWARGS=['sort','pull','hole','textposition','textinfo','linecolor'] OHLC_KWARGS=['up_color','down_color','open','high','low','close','volume','name','decreasing','increasing'] SUBPLOT_KWARGS=['horizontal_spacing', 'vertical_spacing',
Added support for connectgaps
santosjorge_cufflinks
train
9d9b7e596b78214b73c153a35c1e7498470c2d14
diff --git a/src/Phergie/Irc/Parser.php b/src/Phergie/Irc/Parser.php index <HASH>..<HASH> 100644 --- a/src/Phergie/Irc/Parser.php +++ b/src/Phergie/Irc/Parser.php @@ -3,7 +3,7 @@ * Phergie (http://phergie.org) * * @link http://github.com/phergie/phergie-irc-parser for the canonical source repository - * @copyright Copyright (c) 2008-2012 Phergie Development Team (http://phergie.org) + * @copyright Copyright (c) 2008-2013 Phergie Development Team (http://phergie.org) * @license http://phergie.org/license New BSD License * @package Phergie\Irc */ diff --git a/src/Phergie/Irc/ParserInterface.php b/src/Phergie/Irc/ParserInterface.php index <HASH>..<HASH> 100644 --- a/src/Phergie/Irc/ParserInterface.php +++ b/src/Phergie/Irc/ParserInterface.php @@ -3,7 +3,7 @@ * Phergie (http://phergie.org) * * @link http://github.com/phergie/phergie-irc-parser for the canonical source repository - * @copyright Copyright (c) 2008-2012 Phergie Development Team (http://phergie.org) + * @copyright Copyright (c) 2008-2013 Phergie Development Team (http://phergie.org) * @license http://phergie.org/license New BSD License * @package Phergie\Irc */
Copyright year bump in src
phergie_phergie-irc-parser
train
652fdf2766209645c383ed492392c7df0e3daba6
diff --git a/PyFunceble/cli/processes/base.py b/PyFunceble/cli/processes/base.py index <HASH>..<HASH> 100644 --- a/PyFunceble/cli/processes/base.py +++ b/PyFunceble/cli/processes/base.py @@ -107,18 +107,24 @@ class ProcessesManagerBase: input_queue: Optional[queue.Queue] = None, output_queue: Optional[queue.Queue] = None, daemon: bool = False, + generate_input_queue: bool = True, + generate_output_queue: bool = True, ) -> None: self.manager = manager if input_queue is None: self.input_queue = self.manager.Queue() - else: + elif generate_input_queue: self.input_queue = input_queue + else: + self.input_queue = None if output_queue is None: self.output_queue = self.manager.Queue() - else: + elif generate_output_queue: self.output_queue = output_queue + else: + self.output_queue = None if max_worker is not None: self.max_worker = max_worker diff --git a/PyFunceble/cli/processes/migrator.py b/PyFunceble/cli/processes/migrator.py index <HASH>..<HASH> 100644 --- a/PyFunceble/cli/processes/migrator.py +++ b/PyFunceble/cli/processes/migrator.py @@ -296,9 +296,12 @@ class MigratorProcessesManager(ProcessesManagerBase): continue worker = MigratorWorker( - self.manager, name=f"pyfunceble_{method}", daemon=True + None, + name=f"pyfunceble_{method}", + daemon=True, + continuous_integration=self.continuous_integration, ) - worker.target_args = (self.continuous_integration,) + worker.target = getattr(self, method) self._created_workers.append(worker) diff --git a/PyFunceble/cli/processes/workers/base.py b/PyFunceble/cli/processes/workers/base.py index <HASH>..<HASH> 100644 --- a/PyFunceble/cli/processes/workers/base.py +++ b/PyFunceble/cli/processes/workers/base.py @@ -88,20 +88,20 @@ class WorkerBase(multiprocessing.Process): send_feeding_message: Optional[bool] = None accept_waiting_delay: Optional[bool] = None - __parent_connection: Optional[multiprocessing.connection.Connection] = None + _parent_connection: Optional[multiprocessing.connection.Connection] = None _child_connection: Optional[multiprocessing.connection.Connection] = None - __exception: Optional[multiprocessing.Pipe] = None + _exception: Optional[multiprocessing.Pipe] = None def __init__( self, - input_queue: queue.Queue, + input_queue: Optional[queue.Queue], output_queue: Optional[queue.Queue] = None, global_exit_event: Optional[multiprocessing.Event] = None, *, name: Optional[str] = None, daemon: Optional[bool] = None, continuous_integration: Optional[ContinuousIntegrationBase] = None, - configuration: Optional[dict] = None + configuration: Optional[dict] = None, ) -> None: self.configuration = configuration self.input_queue = input_queue @@ -112,8 +112,8 @@ class WorkerBase(multiprocessing.Process): self.global_exit_event = global_exit_event self.exit_it = multiprocessing.Event() - self.__parent_connection, self._child_connection = multiprocessing.Pipe() - self.__exception = None + self._parent_connection, self._child_connection = multiprocessing.Pipe() + self._exception = None self.send_feeding_message = True self.accept_waiting_delay = True @@ -133,10 +133,10 @@ class WorkerBase(multiprocessing.Process): Provides the exception of the current worker. """ - if self.__parent_connection.poll(): - self.__exception = self.__parent_connection.recv() + if self._parent_connection.poll(): + self._exception = self._parent_connection.recv() - return self.__exception + return self._exception def add_to_input_queue( self, data: Any, *, worker_name: Optional[str] = None diff --git a/PyFunceble/cli/processes/workers/migrator.py b/PyFunceble/cli/processes/workers/migrator.py index <HASH>..<HASH> 100644 --- a/PyFunceble/cli/processes/workers/migrator.py +++ b/PyFunceble/cli/processes/workers/migrator.py @@ -51,7 +51,6 @@ License: """ import traceback -from typing import Optional import PyFunceble.facility import PyFunceble.factory @@ -68,11 +67,10 @@ class MigratorWorker(WorkerBase): STD_NAME: str = "pyfunceble_migrator_worker" - target_args: Optional[list] = list() - def run(self) -> None: try: - self.target(*self.target_args) + + self.target(self.continuous_integration) self._child_connection.send(None) except Exception as exception: # pylint: disable=broad-except PyFunceble.facility.Logger.critical( diff --git a/PyFunceble/cli/system/launcher.py b/PyFunceble/cli/system/launcher.py index <HASH>..<HASH> 100644 --- a/PyFunceble/cli/system/launcher.py +++ b/PyFunceble/cli/system/launcher.py @@ -180,6 +180,8 @@ class SystemLauncher(SystemBase): self.manager, continuous_integration=self.continuous_integration, daemon=True, + generate_input_queue=False, + generate_output_queue=False, ) if PyFunceble.storage.CONFIGURATION.cli_testing.mining:
Fix pickling issue under MacOS and Windows.
funilrys_PyFunceble
train
1c7d31557f5b0db72b4ac40da0f817929f445b6e
diff --git a/src/SortIterator.php b/src/SortIterator.php index <HASH>..<HASH> 100644 --- a/src/SortIterator.php +++ b/src/SortIterator.php @@ -10,11 +10,16 @@ namespace Jasny\Iterator; class SortIterator implements \OuterIterator { /** - * @var \Iterator|\ArrayIterator + * @var \Iterator */ protected $iterator; /** + * @var \ArrayIterator + */ + protected $sortedIterator; + + /** * @var callable */ protected $compare; @@ -29,34 +34,9 @@ class SortIterator implements \OuterIterator public function __construct(\Iterator $iterator, callable $compare = null) { $this->compare = $compare; - - if ($iterator instanceof \ArrayIterator) { - $this->iterator = clone $iterator; - $this->sort(); - } else { - $this->iterator = $iterator; - } + $this->iterator = $iterator; } - /** - * Sort the values of the iterator. - * Requires traversing through the iterator, turning it into an array. - * - * @return void - */ - protected function sort(): void - { - if (!$this->iterator instanceof \ArrayIterator) { - $elements = iterator_to_array($this->iterator); - $this->iterator = new \ArrayIterator($elements); - } - - if (isset($this->compare)) { - $this->iterator->uasort($this->compare); - } else { - $this->iterator->asort(); - } - } /** * Return the current element @@ -65,7 +45,7 @@ class SortIterator implements \OuterIterator */ public function current() { - return $this->getInnerIterator()->current(); + return $this->getSortedIterator()->current(); } /** @@ -75,7 +55,7 @@ class SortIterator implements \OuterIterator */ public function next(): void { - $this->getInnerIterator()->next(); + $this->getSortedIterator()->next(); } /** @@ -85,7 +65,7 @@ class SortIterator implements \OuterIterator */ public function key() { - return $this->getInnerIterator()->key(); + return $this->getSortedIterator()->key(); } /** @@ -95,7 +75,7 @@ class SortIterator implements \OuterIterator */ public function valid(): bool { - return $this->getInnerIterator()->valid(); + return $this->getSortedIterator()->valid(); } /** @@ -105,7 +85,57 @@ class SortIterator implements \OuterIterator */ public function rewind(): void { - $this->getInnerIterator()->rewind(); + $this->getSortedIterator()->rewind(); + } + + + /** + * Convert the inner iterator to an ArrayIterator. + * + * @return \ArrayIterator + */ + protected function createArrayIterator(): \ArrayIterator + { + if ($this->iterator instanceof \ArrayIterator) { + return clone $this->iterator; + } + + $array = method_exists($this->iterator, 'toArray') + ? $this->iterator->toArray() + : iterator_to_array($this->iterator); + + return new \ArrayIterator($array); + } + + /** + * Sort the values of the iterator. + * Requires traversing through the iterator, turning it into an array. + * + * @return void + */ + protected function initSortedIterator(): void + { + $this->sortedIterator = $this->createArrayIterator(); + + if (isset($this->compare)) { + $this->sortedIterator->uasort($this->compare); + } else { + $this->sortedIterator->asort(); + } + } + + /** + * Get the iterator with sorted values + * + * @return \ArrayIterator + */ + protected function getSortedIterator(): \ArrayIterator + { + if (!isset($this->sortedIterator)) { + $this->initSortedIterator(); + } + + return $this->sortedIterator; } @@ -116,10 +146,6 @@ class SortIterator implements \OuterIterator */ public function getInnerIterator(): \Iterator { - if (!$this->iterator instanceof \ArrayIterator) { - $this->sort(); - } - return $this->iterator; } } diff --git a/tests/SortIteratorTest.php b/tests/SortIteratorTest.php index <HASH>..<HASH> 100644 --- a/tests/SortIteratorTest.php +++ b/tests/SortIteratorTest.php @@ -53,7 +53,7 @@ class SortIteratorTest extends TestCase $this->assertEquals($this->sorted, array_values($result)); $this->assertNotEquals($values, array_values($result)); - $this->assertNotSame($inner, $iterator->getInnerIterator()); + $this->assertSame($inner, $iterator->getInnerIterator()); $this->assertInstanceOf(\ArrayIterator::class, $iterator->getInnerIterator()); $this->assertEquals($values, iterator_to_array($inner), "Original iterator should not be changed"); @@ -100,9 +100,7 @@ class SortIteratorTest extends TestCase $result = iterator_to_array($iterator); $this->assertEquals($this->sorted, array_values($result)); - - $this->assertNotSame($generator, $iterator->getInnerIterator()); - $this->assertInstanceOf(\ArrayIterator::class, $iterator->getInnerIterator()); + $this->assertSame($generator, $iterator->getInnerIterator()); } public function testIterateCallback()
The inner iterator should always return the iterator that has been passed. Fixed for SortIterator.
improved-php-library_iterable
train
d242848d39b0458792a5c566451a28eeb0d9161a
diff --git a/lib/engineyard-serverside/cli.rb b/lib/engineyard-serverside/cli.rb index <HASH>..<HASH> 100644 --- a/lib/engineyard-serverside/cli.rb +++ b/lib/engineyard-serverside/cli.rb @@ -93,6 +93,9 @@ module EY servers, config, shell = init_and_propagate(integrate_options, 'integrate') + # We have to rsync the entire app dir, so we need all the permissions to be correct! + shell.logged_system "sudo sh -l -c 'find #{app_dir} -not -user #{config.user} -or -not -group #{config.group} -exec chown #{config.user}:#{config.group} {} +'" + servers.each do |server| shell.logged_system server.sync_directory_command(app_dir) # we're just about to recreate this, so it has to be gone
Fix permissions across the whole app directory before integrate runs rsync Rsync stumbles on anything with owner/group that isn't the user. [SS-<I>]
engineyard_engineyard-serverside
train
b79bab0920eef442cd713b4d828d3c3537613e0f
diff --git a/watchtower/wtdb/mock.go b/watchtower/wtdb/mock.go index <HASH>..<HASH> 100644 --- a/watchtower/wtdb/mock.go +++ b/watchtower/wtdb/mock.go @@ -2,16 +2,23 @@ package wtdb -import "sync" +import ( + "sync" + + "github.com/lightningnetwork/lnd/chainntnfs" +) type MockDB struct { - mu sync.Mutex - sessions map[SessionID]*SessionInfo + mu sync.Mutex + lastEpoch *chainntnfs.BlockEpoch + sessions map[SessionID]*SessionInfo + blobs map[BreachHint]map[SessionID]*SessionStateUpdate } func NewMockDB() *MockDB { return &MockDB{ sessions: make(map[SessionID]*SessionInfo), + blobs: make(map[BreachHint]map[SessionID]*SessionStateUpdate), } } @@ -29,6 +36,13 @@ func (db *MockDB) InsertStateUpdate(update *SessionStateUpdate) (uint16, error) return info.LastApplied, err } + sessionsToUpdates, ok := db.blobs[update.Hint] + if !ok { + sessionsToUpdates = make(map[SessionID]*SessionStateUpdate) + db.blobs[update.Hint] = sessionsToUpdates + } + sessionsToUpdates[update.ID] = update + return info.LastApplied, nil } @@ -55,3 +69,46 @@ func (db *MockDB) InsertSessionInfo(info *SessionInfo) error { return nil } + +func (db *MockDB) GetLookoutTip() (*chainntnfs.BlockEpoch, error) { + db.mu.Lock() + defer db.mu.Unlock() + + return db.lastEpoch, nil +} + +func (db *MockDB) QueryMatches(breachHints []BreachHint) ([]Match, error) { + db.mu.Lock() + defer db.mu.Unlock() + + var matches []Match + for _, hint := range breachHints { + sessionsToUpdates, ok := db.blobs[hint] + if !ok { + continue + } + + for id, update := range sessionsToUpdates { + info, ok := db.sessions[id] + if !ok { + panic("session not found") + } + + match := Match{ + ID: id, + SeqNum: update.SeqNum, + Hint: hint, + EncryptedBlob: update.EncryptedBlob, + SessionInfo: info, + } + matches = append(matches, match) + } + } + + return matches, nil +} + +func (db *MockDB) SetLookoutTip(epoch *chainntnfs.BlockEpoch) error { + db.lastEpoch = epoch + return nil +}
watchtower/wtdb/mock: adds lookout-related mock functions
lightningnetwork_lnd
train
c7bab1b28e71559802c20fddcf3353d04a654e54
diff --git a/Lib/ufo2ft/kernFeatureWriter.py b/Lib/ufo2ft/kernFeatureWriter.py index <HASH>..<HASH> 100644 --- a/Lib/ufo2ft/kernFeatureWriter.py +++ b/Lib/ufo2ft/kernFeatureWriter.py @@ -52,7 +52,10 @@ class KernFeatureWriter(AbstractFeatureWriter): self._collectFeaClasses() self._collectFeaClassKerning() + self._collectUfoClasses() + self._correctUfoClassNames() + self._collectUfoKerning() self._removeConflictingKerningRules() @@ -124,17 +127,45 @@ class KernFeatureWriter(AbstractFeatureWriter): for name, contents in self.groups.items(): if self._isClassName(self.leftUfoGroupRe, name): - self.leftUfoClasses[self._makeFeaClassName(name)] = contents + self.leftUfoClasses[name] = contents if self._isClassName(self.rightUfoGroupRe, name): - self.rightUfoClasses[self._makeFeaClassName(name)] = contents + self.rightUfoClasses[name] = contents + + def _correctUfoClassNames(self): + """Detect and replace OTF-illegal class names found in UFO kerning.""" + + for name, members in self.leftUfoClasses.items(): + newName = self._makeFeaClassName(name) + if name == newName: + continue + self.leftUfoClasses[newName] = members + del self.leftUfoClasses[name] + for pair, kerningVal in self.kerning.getLeft(name): + self.kerning[newName, pair[1]] = kerningVal + self.kerning.remove(pair) + + for name, members in self.rightUfoClasses.items(): + newName = self._makeFeaClassName(name) + if name == newName: + continue + self.rightUfoClasses[newName] = members + del self.rightUfoClasses[name] + for pair, kerningVal in self.kerning.getRight(name): + self.kerning[pair[0], newName] = kerningVal + self.kerning.remove(pair) def _collectUfoKerning(self): - """Sort UFO kerning rules into glyph pair or class rules.""" + """Sort UFO kerning rules into glyph pair or class rules. + + Assumes classes are present in the UFO's groups, though this is not + required by the UFO spec. Kerning rules using non-existent classes + should break the OTF compiler, so this *should* be a safe assumption. + """ for glyphPair, val in sorted(self.kerning.items()): left, right = glyphPair - leftIsClass = self._isClassName(self.leftUfoGroupRe, left) - rightIsClass = self._isClassName(self.rightUfoGroupRe, right) + leftIsClass = left in self.leftUfoClasses + rightIsClass = right in self.rightUfoClasses if leftIsClass: if rightIsClass: self.classPairKerning[glyphPair] = val
[kerning] Update UFO kerning with group names
googlefonts_ufo2ft
train
491c9c078f5aeaef39133d5e654a9d0d21e5f004
diff --git a/jujupy.py b/jujupy.py index <HASH>..<HASH> 100644 --- a/jujupy.py +++ b/jujupy.py @@ -578,7 +578,6 @@ class EnvJujuClient: if debug is not _backend.debug: raise ValueError('debug mismatch: {} {}'.format( debug, _backend.debug)) - self.feature_flags = set() if env is not None: if juju_home is None: if env.juju_home is None: diff --git a/tests/test_jujupy.py b/tests/test_jujupy.py index <HASH>..<HASH> 100644 --- a/tests/test_jujupy.py +++ b/tests/test_jujupy.py @@ -491,11 +491,8 @@ class FakeJujuClient(EnvJujuClient): _backend = FakeBackend(backend_state.controller, version=version, full_path=full_path, debug=debug) _backend.set_feature('jes', jes_enabled) - # Preserve feature flags - old_flags = set(_backend.feature_flags) super(FakeJujuClient, self).__init__( env, version, full_path, juju_home, debug, _backend=_backend) - _backend.feature_flags = old_flags self.bootstrap_replaces = {} def _get_env(self, env):
Don't set feature flags in EnvJujuClient constructor.
juju_juju
train
fb760314374a7b7811d433276baf4cd045a46c3a
diff --git a/spyderlib/plugins/help.py b/spyderlib/plugins/help.py index <HASH>..<HASH> 100644 --- a/spyderlib/plugins/help.py +++ b/spyderlib/plugins/help.py @@ -363,15 +363,11 @@ class Help(SpyderPluginWidget): self.plain_text.editor.toggle_wrap_mode(self.get_option('wrap')) # Add entries to read-only editor context-menu - font_action = create_action(self, _("&Font..."), None, - ima.icon('font'), _("Set font style"), - triggered=self.change_font) self.wrap_action = create_action(self, _("Wrap lines"), toggled=self.toggle_wrap_mode) self.wrap_action.setChecked(self.get_option('wrap')) self.plain_text.editor.readonly_menu.addSeparator() - add_actions(self.plain_text.editor.readonly_menu, - (font_action, self.wrap_action)) + add_actions(self.plain_text.editor.readonly_menu, (self.wrap_action,)) self.set_rich_text_font(self.get_plugin_font('rich_text')) @@ -633,15 +629,6 @@ class Help(SpyderPluginWidget): """Set plain text mode color scheme""" self.plain_text.set_color_scheme(color_scheme) - @Slot() - def change_font(self): - """Change console font""" - font, valid = QFontDialog.getFont(get_font(self.CONF_SECTION), self, - _("Select a new font")) - if valid: - self.set_plain_text_font(font) - set_font(font, self.CONF_SECTION) - @Slot(bool) def toggle_wrap_mode(self, checked): """Toggle wrap mode"""
Help: Remove the possibility to set a new font to its plain text widget
spyder-ide_spyder
train
7e03eb0da44a70eaf8b6e12bea74dd1396c0a8e0
diff --git a/d1_common_python/src/d1_common/util.py b/d1_common_python/src/d1_common/util.py index <HASH>..<HASH> 100644 --- a/d1_common_python/src/d1_common/util.py +++ b/d1_common_python/src/d1_common/util.py @@ -165,15 +165,21 @@ def urlencode(query, doseq=0): parameters in the output will match the order of parameters in the input. ''' - # Remove None parameters from query. - for k in query.keys(): - if query[k] is None: - del query[k] - if hasattr(query, "items"): + # Remove None parameters from query. Dictionaries are mutable, so we can + # remove the the items directly. dict.keys() creates a copy of the + # dictionary keys, making it safe to remove elements from the dictionary + # while iterating. + for k in query.keys(): + if query[k] is None: + del query[k] # mapping objects query = query.items() else: + # Remove None parameters from query. Tuples are immutable, so we have to + # build a new version that does not contain the elements we want to remove, + # and replace the original with it. + query = filter((lambda x: x[1] is not None), query) # it's a bother at times that strings and string-like objects are # sequences... try: diff --git a/d1_common_python/src/tests/test_testcasewithurlcompare.py b/d1_common_python/src/tests/test_testcasewithurlcompare.py index <HASH>..<HASH> 100644 --- a/d1_common_python/src/tests/test_testcasewithurlcompare.py +++ b/d1_common_python/src/tests/test_testcasewithurlcompare.py @@ -50,6 +50,7 @@ class Test_URLCompare(TestCaseWithURLCompare): self.failUnlessRaises(AssertionError, self.assertUrlEqual, a, b) #=============================================================================== + if __name__ == "__main__": argv = sys.argv if "--debug" in argv:
Fixed bug I had introduced in urlencode.
DataONEorg_d1_python
train
0f0805a2f03b8e9fd1d522b2200bf69898803bd8
diff --git a/ui/src/side_nav/components/NavItems.js b/ui/src/side_nav/components/NavItems.js index <HASH>..<HASH> 100644 --- a/ui/src/side_nav/components/NavItems.js +++ b/ui/src/side_nav/components/NavItems.js @@ -35,10 +35,6 @@ const NavHeader = React.createClass({ render() { const {link, title, useAnchor} = this.props - if (!link) { - return <div className="sidebar-menu--heading">{title}</div> - } - // Some nav items, such as Logout, need to hit an external link rather // than simply route to an internal page. Anchor tags serve that purpose. return useAnchor diff --git a/ui/src/side_nav/containers/SideNav.js b/ui/src/side_nav/containers/SideNav.js index <HASH>..<HASH> 100644 --- a/ui/src/side_nav/containers/SideNav.js +++ b/ui/src/side_nav/containers/SideNav.js @@ -83,9 +83,16 @@ const SideNav = React.createClass({ /> </NavBlock> <div className="sidebar--bottom"> - <NavBlock icon="heart"> - <NavHeader title="Woogles" /> - </NavBlock> + <div className="sidebar--item"> + <div className="sidebar--square"> + <span className="sidebar--icon icon zap" /> + </div> + <div className="sidebar-menu"> + <div className="sidebar-menu--heading"> + Version: {VERSION}{/* eslint no-undef */} + </div> + </div> + </div> {showLogout ? <NavBlock icon="user" className="sidebar--item-last"> <NavHeader diff --git a/ui/src/style/layout/sidebar.scss b/ui/src/style/layout/sidebar.scss index <HASH>..<HASH> 100644 --- a/ui/src/style/layout/sidebar.scss +++ b/ui/src/style/layout/sidebar.scss @@ -151,6 +151,7 @@ $sidebar-menu--gutter: 18px; @include gradient-h($sidebar-menu--item-bg-hover,$sidebar-menu--item-bg-hover-accent); color: $sidebar-menu--item-text-hover; } +.sidebar-menu--heading, .sidebar-menu--heading:link, .sidebar-menu--heading:visited, .sidebar-menu--heading:active,
Complete refactor of sidenav - Reduced DOM complexity - Less styles overall - Added an icon in the navbar to show version number - Still needs an eslint ignore
influxdata_influxdb
train
9abbb25449eac1f37eb62117fe72a8482355bc90
diff --git a/graphql/error/graphql_error.py b/graphql/error/graphql_error.py index <HASH>..<HASH> 100644 --- a/graphql/error/graphql_error.py +++ b/graphql/error/graphql_error.py @@ -39,14 +39,16 @@ class GraphQLError(Exception): """ path: Optional[List[Union[str, int]]] - """A list of GraphQL AST Nodes corresponding to this error""" + """ + + A list of field names and array indexes describing the JSON-path into the execution + response which corresponds to this error. + + Only included for errors during execution. + """ nodes: Optional[List["Node"]] - """The source GraphQL document for the first location of this error - - Note that if this Error represents more than one node, the source may not represent - nodes after the first node. - """ + """A list of GraphQL AST Nodes corresponding to this error""" source: Optional["Source"] """The source GraphQL document for the first location of this error
Fix docstrings for GraphQLErrors
graphql-python_graphql-core-next
train
f71a1e4eb8d68e50940e69c32d446507a6f9e211
diff --git a/src/ProcessControl.php b/src/ProcessControl.php index <HASH>..<HASH> 100644 --- a/src/ProcessControl.php +++ b/src/ProcessControl.php @@ -77,33 +77,35 @@ class ProcessControl * Check if the signal is catchable, then set the signal to be caught by $callable * * @param int $signal - * @param callable $callable + * @param callable|int $handler + * @param bool $restart_syscalls * * @return bool * @throws Exception\SignalNotCatchable * @throws \InvalidArgumentException */ - public function __invoke($signal, $callable) + public function __invoke($signal, $handler, $restart_syscalls = true) { if (! array_key_exists($signal, $this->catchable_signals)) { throw new SignalNotCatchable(sprintf("The singal '%d' is not catchable.", $signal)); }; - if (! is_callable($callable)) { - throw new \InvalidArgumentException('$callable must be a callable.'); + if (! is_callable($handler) && ! is_int($handler)) { + throw new \InvalidArgumentException('handler must be of type callable or int.'); } - return $this->catchSignal($signal, $callable); + return $this->catchSignal($signal, $handler, $restart_syscalls); } /** * @param int $signal - * @param callable $callable + * @param callable|int $handler + * @param bool $restart_syscalls * * @return bool */ - protected function catchSignal($signal, $callable) + protected function catchSignal($signal, $handler, $restart_syscalls) { - return pcntl_signal($signal, $callable); + return pcntl_signal($signal, $handler, $restart_syscalls); } }
Update API to comply with pcntl_signal function
auraphp_Aura.Cli
train
2101c24fc4628d3ad26e9190774f6120114c5a7e
diff --git a/src/Builder/Builder.php b/src/Builder/Builder.php index <HASH>..<HASH> 100644 --- a/src/Builder/Builder.php +++ b/src/Builder/Builder.php @@ -2,6 +2,8 @@ namespace p810\MySQL\Builder; +use PDOStatement; + use function ucfirst; use function is_array; use function array_map; @@ -67,6 +69,14 @@ abstract class Builder } /** + * Processes the result of the query + * + * @param \PDOStatement $statement + * @return mixed + */ + abstract public function process(PDOStatement $statement); + + /** * Prefixes column names with their corresponding tables, e.g. for a * query that joins data from foreign tables * diff --git a/src/Builder/Delete.php b/src/Builder/Delete.php index <HASH>..<HASH> 100644 --- a/src/Builder/Delete.php +++ b/src/Builder/Delete.php @@ -2,6 +2,8 @@ namespace p810\MySQL\Builder; +use PDOStatement; + class Delete extends Builder { use Grammar\Where; @@ -20,6 +22,14 @@ class Delete extends Builder protected $table; /** + * @inheritdoc + */ + public function process(PDOStatement $statement) + { + return $statement->rowCount(); + } + + /** * Specifies the table to remove data from * * @param string $table The table to remove data from diff --git a/src/Builder/Insert.php b/src/Builder/Insert.php index <HASH>..<HASH> 100644 --- a/src/Builder/Insert.php +++ b/src/Builder/Insert.php @@ -2,6 +2,7 @@ namespace p810\MySQL\Builder; +use PDO; use PDOStatement; use function array_map; @@ -54,6 +55,14 @@ class Insert extends Builder protected $updateOnDuplicate; /** + * @inheritdoc + */ + public function process(PDOStatement $statement) + { + return $statement->rowCount(); + } + + /** * Specifies the table that the data should be inserted into * * @param string $table The table to insert data into diff --git a/src/Builder/Replace.php b/src/Builder/Replace.php index <HASH>..<HASH> 100644 --- a/src/Builder/Replace.php +++ b/src/Builder/Replace.php @@ -2,6 +2,7 @@ namespace p810\MySQL\Builder; +use PDOStatement; use InvalidArgumentException; use p810\MySQL\Builder\Grammar\Expression; @@ -23,6 +24,14 @@ class Replace extends Insert /** * @inheritdoc + */ + public function process(PDOStatement $statement) + { + return $statement->rowCount(); + } + + /** + * @inheritdoc * @throws \InvalidArgumentException */ public function compilePriority(): ?string diff --git a/src/Builder/Select.php b/src/Builder/Select.php index <HASH>..<HASH> 100644 --- a/src/Builder/Select.php +++ b/src/Builder/Select.php @@ -2,6 +2,8 @@ namespace p810\MySQL\Builder; +use PDO; +use PDOStatement; use p810\MySQL\Exception\MissingArgumentException; use function is_array; @@ -42,6 +44,14 @@ class Select extends Builder protected $columns = '*'; /** + * @inheritdoc + */ + public function process(PDOStatement $statement) + { + return $statement->fetchAll(PDO::FETCH_OBJ); + } + + /** * Specifies which columns to return in the result set * * @param array|string $columns Either a string or an array; if an array, it can be associative to specify table prefixes diff --git a/src/Builder/Update.php b/src/Builder/Update.php index <HASH>..<HASH> 100644 --- a/src/Builder/Update.php +++ b/src/Builder/Update.php @@ -2,6 +2,7 @@ namespace p810\MySQL\Builder; +use PDOStatement; use p810\MySQL\Builder\Grammar\Expression; use function is_array; @@ -31,6 +32,14 @@ class Update extends Builder protected $table; /** + * @inheritdoc + */ + public function process(PDOStatement $statement) + { + return $statement->rowCount(); + } + + /** * Specifies the table to update data in * * @param string $table The table to update diff --git a/src/Query.php b/src/Query.php index <HASH>..<HASH> 100644 --- a/src/Query.php +++ b/src/Query.php @@ -11,11 +11,6 @@ use function method_exists; class Query { /** - * @var false|\PDOStatement - */ - public $statement; - - /** * @var \p810\MySQL\Builder\Builder */ protected $builder; @@ -63,18 +58,27 @@ class Query } /** - * Sets and executes a prepared query + * Executes a prepared query and returns the result * - * @return bool + * @param null|callable $processor An optional callback used to process the result of the query + * @param bool $callbackOnBool Whether to call the user-supplied $processor when \PDOStatement::execute() returns false + * @return mixed */ - public function execute(): bool + public function execute(?callable $processor = null, bool $callbackOnBool = false) { - $this->statement = $this->database->prepare( $this->builder->build() ); + $statement = $this->database->prepare($this->builder->build()); - if (! $this->statement instanceof PDOStatement) { + if (! $statement instanceof PDOStatement) { return false; } - return $this->statement->execute($this->builder->input); + $result = $statement->execute($this->builder->input); + $callback = $processor ?? [$this->builder, 'process']; + + if ($result || ($callbackOnBool && $processor)) { + $result = $callback($statement); + } + + return $result; } } \ No newline at end of file
Changes the way that results are handled from PDOStatement objects A default callback, Builder::process(), will be invoked with the PDOStatement returned from a prepared query, unless the user specifies a separate callback in their call to Query::execute()
p810_mysql-helper
train
1f3153b702d4f6cd0aa0c0a94febc14449a58cee
diff --git a/release.py b/release.py index <HASH>..<HASH> 100644 --- a/release.py +++ b/release.py @@ -8,7 +8,6 @@ from urh import constants open("/tmp/urh_releasing", "w").close() script_dir = os.path.dirname(__file__) if not os.path.islink(__file__) else os.path.dirname(os.readlink(__file__)) -sys.path.append(os.path.realpath(os.path.join(script_dir, "src"))) rc = pytest.main(["--exitfirst", "tests"]) diff --git a/tests/TestInstallation.py b/tests/TestInstallation.py index <HASH>..<HASH> 100644 --- a/tests/TestInstallation.py +++ b/tests/TestInstallation.py @@ -102,7 +102,7 @@ class TestInstallation(unittest.TestCase): rc = vm_helper.send_command(r"python C:\urh\src\urh\cythonext\build.py") self.assertEqual(rc, 0) - rc = vm_helper.send_command(r"set PYTHONPATH={0}\src && py.test C:\urh\tests".format(target_dir)) + rc = vm_helper.send_command(r"py.test C:\urh\tests".format(target_dir)) self.assertEqual(rc, 0) vm_helper.send_command("pip install urh") @@ -131,7 +131,7 @@ class TestInstallation(unittest.TestCase): self.assertEqual(rc, 0) # Run Unit tests - rc = vm_helper.send_command("export PYTHONPATH='{0}/src' && {1}py.test {0}/tests".format(target_dir, python_bin_dir)) + rc = vm_helper.send_command("{1}py.test {0}/tests".format(target_dir, python_bin_dir)) self.assertEqual(rc, 0) vm_helper.send_command("{0}pip3 --no-cache-dir install urh".format(python_bin_dir)) diff --git a/tests/__init__.py b/tests/__init__.py index <HASH>..<HASH> 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,8 @@ +import os +import sys + +f = os.readlink(__file__) if os.path.islink(__file__) else __file__ +path = os.path.realpath(os.path.join(f, "..", "..", "src")) + +if path not in sys.path: + sys.path.append(path)
set pythonpath for tests in init.py
jopohl_urh
train
26ff8899c39039459b67e5dbe0fe7133c3653d9a
diff --git a/src/ol/PluggableMap.js b/src/ol/PluggableMap.js index <HASH>..<HASH> 100644 --- a/src/ol/PluggableMap.js +++ b/src/ol/PluggableMap.js @@ -934,8 +934,9 @@ class PluggableMap extends BaseObject { if (frameState) { const hints = frameState.viewHints; if (hints[ViewHint.ANIMATING] || hints[ViewHint.INTERACTING]) { - maxTotalLoading = 8; - maxNewLoads = 2; + const lowOnFrameBudget = Date.now() - frameState.time > 8; + maxTotalLoading = lowOnFrameBudget ? 0 : 8; + maxNewLoads = lowOnFrameBudget ? 0 : 2; } } if (tileQueue.getTilesLoading() < maxTotalLoading) { @@ -943,6 +944,7 @@ class PluggableMap extends BaseObject { tileQueue.loadMoreTiles(maxTotalLoading, maxNewLoads); } } + if (frameState && this.hasListener(RenderEventType.RENDERCOMPLETE) && !frameState.animate && !this.tileQueue_.getTilesLoading() && !getLoading(this.getLayers().getArray())) { this.renderer_.dispatchRenderEvent(RenderEventType.RENDERCOMPLETE, frameState);
Do not load new tiles when low on frame budget
openlayers_openlayers
train
a0c4a64c028e7636facddc82eab5c3290e9af280
diff --git a/raiden/main.py b/raiden/main.py index <HASH>..<HASH> 100644 --- a/raiden/main.py +++ b/raiden/main.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from gevent import monkey +monkey.patch_all() from ui.cli import run diff --git a/raiden/ui/cli.py b/raiden/ui/cli.py index <HASH>..<HASH> 100644 --- a/raiden/ui/cli.py +++ b/raiden/ui/cli.py @@ -168,6 +168,9 @@ def app(address, # pylint: disable=too-many-arguments,too-many-locals else: rpc_host, rpc_port = split_endpoint(endpoint) + # user may have provided registry and discovery contracts with leading 0x + registry_contract_address = registry_contract_address.strip('0x') + discovery_contract_address = discovery_contract_address.strip('0x') blockchain_service = BlockChainService( privatekey, decode_hex(registry_contract_address),
Some changes to enable running raiden as an app
raiden-network_raiden
train
0b5f97f8fa82deac552d42bab72a2722bb6bd430
diff --git a/tests/Test/Synapse/SocialLogin/SocialLoginServiceTest.php b/tests/Test/Synapse/SocialLogin/SocialLoginServiceTest.php index <HASH>..<HASH> 100644 --- a/tests/Test/Synapse/SocialLogin/SocialLoginServiceTest.php +++ b/tests/Test/Synapse/SocialLogin/SocialLoginServiceTest.php @@ -21,6 +21,20 @@ class SocialLoginServiceTest extends PHPUnit_Framework_TestCase $this->socialLoginService->setUserService($this->mockUserService); } + public function setupMockUserService() + { + $this->mockUserService = $this->getMockBuilder('Synapse\User\UserService') + ->disableOriginalConstructor() + ->getMock(); + } + + public function setupMockSocialLoginMapper() + { + $this->mockSocialLoginMapper = $this->getMockBuilder('Synapse\SocialLogin\SocialLoginMapper') + ->disableOriginalConstructor() + ->getMock(); + } + public function withSocialLoginMapperReturningEntity() { $this->mockSocialLoginMapper->expects($this->any()) @@ -39,20 +53,6 @@ class SocialLoginServiceTest extends PHPUnit_Framework_TestCase ->will($this->returnValue($userEntity)); } - public function setupMockUserService() - { - $this->mockUserService = $this->getMockBuilder('Synapse\User\UserService') - ->disableOriginalConstructor() - ->getMock(); - } - - public function setupMockSocialLoginMapper() - { - $this->mockSocialLoginMapper = $this->getMockBuilder('Synapse\SocialLogin\SocialLoginMapper') - ->disableOriginalConstructor() - ->getMock(); - } - public function createUserEntity() { return new UserEntity([
Refs #<I> - reordered method definition
synapsestudios_synapse-base
train
6c9e8477efb07672b3a52ea00e2f78e8cee63677
diff --git a/lib/edsl/watir_elements.rb b/lib/edsl/watir_elements.rb index <HASH>..<HASH> 100644 --- a/lib/edsl/watir_elements.rb +++ b/lib/edsl/watir_elements.rb @@ -3,7 +3,7 @@ require 'facets/string/snakecase' module EDSL # This module extends the DSL to include the various Watir elements module WatirElements - SPECIAL_ELEMENTS = %i[button a radio_set input select textarea].freeze + SPECIAL_ELEMENTS = %i[button a radio_set input select textarea ul footer frameset head header ol].freeze TEXT_ELEMENTS = Watir.tag_to_class.keys.reject { |k| SPECIAL_ELEMENTS.include?(k) }.map { |t| t.to_s.snakecase }.freeze TEXT_ELEMENTS.each do |tag| @@ -18,5 +18,8 @@ module EDSL SETABLE_ELEMENTS = %i[radio checkbox].freeze SETABLE_ELEMENTS.each { |tag| EDSL.define_accessor(tag, how: tag, default_method: :set?, assign_method: :set) } + + GENERIC_ELEMENTS = %i[ul footer frameset head header ol] + GENERIC_ELEMENTS.each { |tag| EDSL.define_accessor(tag, how: tag) } end end
Break out some of the watir elements that need to just return an element
Donavan_edsl
train
bbb79f47a758a36b7228058357c86b07af2feadc
diff --git a/message/output/email/message_output_email.php b/message/output/email/message_output_email.php index <HASH>..<HASH> 100644 --- a/message/output/email/message_output_email.php +++ b/message/output/email/message_output_email.php @@ -47,7 +47,10 @@ class message_output_email extends message_output { } //check user preference for where user wants email sent - $eventdata->userto->email = get_user_preferences('message_processor_email_email', $eventdata->userto->email, $eventdata->userto->id); + $preferedemail = get_user_preferences('message_processor_email_email', null, $eventdata->userto->id); + if (!empty($preferedemail)) { + $eventdata->userto->email = $preferedemail; + } $result = email_to_user($eventdata->userto, $eventdata->userfrom, $eventdata->subject, $eventdata->fullmessage, $eventdata->fullmessagehtml);
messaging MDL-<I> Last minute fix of bug during integration, empty user preference causing error
moodle_moodle
train
c14da7bd7c1cd32879071c36e283b2b1ba656d72
diff --git a/ella/core/views.py b/ella/core/views.py index <HASH>..<HASH> 100644 --- a/ella/core/views.py +++ b/ella/core/views.py @@ -278,10 +278,14 @@ def category_detail(request, category): }, context_instance=RequestContext(request) ) + def export_test(*args, **kwargs): return [] -@cache_this(method_key_getter, export_test, timeout=60*60) +def export_key(*args, **kwargs): + return 'export:%d:%d' % (settings.SITE_ID, kwargs.get('count', 0)) + +@cache_this(export_key, export_test, timeout=60*60) def export(request, count, models=None): if models is None: from ella.articles.models import Article
Fixed wrong cache generation for export banner - it didn't include SITE_ID. git-svn-id: <URL>
ella_ella
train
f8cf963bb5f7f2b26b9119298477faf6c60c4143
diff --git a/lancet/__init__.py b/lancet/__init__.py index <HASH>..<HASH> 100644 --- a/lancet/__init__.py +++ b/lancet/__init__.py @@ -409,6 +409,13 @@ class StaticArgs(BaseArgs): support for len(). """ + HTML = param.Callable(default=str, doc=''' + Callable to process HTML markup as returned by the 'html' + method. Default behaviour is to return the markup as a + string but if set to IPython.display.HTML, specifiers will + automatically displayed in tabular form in IPython notebook + when when the html method is called.''') + specs = param.List(default=[], constant=True, doc=''' The static list of specifications (ie. dictionaries) to be returned by the specifier. Float values are rounded to @@ -462,6 +469,39 @@ class StaticArgs(BaseArgs): alphagroups = [sorted(ddict[k]) for k in sorted(ddict)] return [el for group in alphagroups for el in group] + def _html_row(self, spec, columns): + row_strings = [] + for value in [spec[col] for col in columns]: + html_repr = value.html(html_fn=str) if hasattr(value, 'html') else str(value) + row_strings.append('<td>'+html_repr+'</td>') + return ' '.join(['<tr>'] + row_strings + ['</tr>']) + + def html(self, cols=None, html_fn=None, max_rows=None): + """ + Generate a HTML table for the specifier. + """ + html_fn = self.HTML if html_fn is None else html_fn + max_rows = len(self) if max_rows is None else max_rows + columns = self.varying_keys() if cols is None else cols + + all_varying = self.varying_keys() + if not all(col in all_varying for col in columns): + raise Exception('Columns must belong to the varying keys') + + summary = '<tr><td><b>%r<br>[%d items]</b></td></tr>' % (self.__class__.__name__, len(self)) + cspecs = [{'Key':k, 'Value':v} for (k,v) in self.constant_items()] + crows = [self._html_row(spec, ['Key', 'Value']) for spec in cspecs] + cheader_str = '<tr><td><b>Constant Key</b></td><td><b>Value</b></td></tr>' + + vrows = [self._html_row(spec,columns) for spec in self.specs[:max_rows]] + vheader_str= ' '.join(['<tr>'] + ['<td><b>'+str(col)+'</b></td>' for col in columns ] +['</tr>']) + ellipses = ' '.join(['<tr>'] + ['<td>...</td>' for col in columns ] +['</tr>']) + ellipse_str = ellipses if (max_rows < len(self)) else '' + + html_elements = ['<table>', summary, cheader_str] + crows + [vheader_str] + vrows + [ellipse_str, '</table>'] + html = '\n'.join(html_elements) + return html_fn(html) + def __len__(self): return len(self.specs) def _repr_pretty_(self, p, cycle): p.text(str(self))
HTML table output and display added to StaticArg specifiers Intended for IPython, specifiers can be immediately displayed in tabular form using the 'html' method. For this to work, the HTML parameter needs to be set to the HTML class imported from IPython.display. This avoids making IPython a core dependency.
ioam_lancet
train
fa723128e00b3a3dda550c30460e4b69c99586f8
diff --git a/packages/build-tools/tasks/pattern-lab-tasks.js b/packages/build-tools/tasks/pattern-lab-tasks.js index <HASH>..<HASH> 100644 --- a/packages/build-tools/tasks/pattern-lab-tasks.js +++ b/packages/build-tools/tasks/pattern-lab-tasks.js @@ -142,7 +142,7 @@ async function watch() { // The watch event ~ same engine gulp uses https://www.npmjs.com/package/chokidar const watcher = chokidar.watch(watchedFiles, { - ignoreInitial: false, + ignoreInitial: true, cwd: process.cwd(), ignored: ['**/node_modules/**', '**/vendor/**'], });
refactor: update Pattern Lab to ignore files being initially watched -- prevents a double compile the first time the build tools boot up
bolt-design-system_bolt
train
1601e1a443b8b5dd229ab55434451a1f161f15c0
diff --git a/bika/lims/browser/analysisrequest/analysisrequests.py b/bika/lims/browser/analysisrequest/analysisrequests.py index <HASH>..<HASH> 100644 --- a/bika/lims/browser/analysisrequest/analysisrequests.py +++ b/bika/lims/browser/analysisrequest/analysisrequests.py @@ -193,7 +193,7 @@ class AnalysisRequestsView(BikaListingView): 'toggle': False}, 'state_title': { 'title': _('State'), - 'sortable': False, + 'sortable': True, 'index': 'review_state'}, 'getProfilesTitle': { 'title': _('Profile'),
Make review_state in AR listing sortable
senaite_senaite.core
train
65f6ce3b86a70d7a36b502b6012493b4ea75e57c
diff --git a/includes/functions/functions_mediadb.php b/includes/functions/functions_mediadb.php index <HASH>..<HASH> 100644 --- a/includes/functions/functions_mediadb.php +++ b/includes/functions/functions_mediadb.php @@ -231,8 +231,16 @@ function get_medialist($currentdir = false, $directory = "", $linkonly = false, $media["LINKS"] = array (); $media["CHANGE"] = ""; // Extract Format and Type from GEDCOM record - $media["FORM"] = strtolower(get_gedcom_value("FORM", 2, $row->m_gedrec)); - $media["TYPE"] = strtolower(get_gedcom_value("FORM:TYPE", 2, $row->m_gedrec)); + if (preg_match('/\n\d FORM (.+)/', $row->m_gedrec, $match)) { + $media['FORM']=$match[1]; + } else { + $media['FORM']=''; + } + if (preg_match('/\n\d TYPE (.+)/', $row->m_gedrec, $match)) { + $media['TYPE']=$match[1]; + } else { + $media['TYPE']=''; + } // Build a sortable key for the medialist $firstChar = substr($media["XREF"], 0, 1);
Fix: random media block: recognise FORM and TYPE tags at any level, not just 1 FILE/2 FORM/3 TYPE, for compatibility with GEDCOM <I> and other applications
fisharebest_webtrees
train
0a06392acb62b736ff9da3af29913c57da23d199
diff --git a/tests/test_directed_random_walk.py b/tests/test_directed_random_walk.py index <HASH>..<HASH> 100644 --- a/tests/test_directed_random_walk.py +++ b/tests/test_directed_random_walk.py @@ -13,7 +13,7 @@ def test_directed_random_walk(): directedHyperGraph.read('tests/data/dirhypergraph.txt') assert len(directedHyperGraph.nodes) == 5 - assert len(directedHyperGraph.hyperedges) == 5 + assert len(directedHyperGraph.hyperedges) == 4 ''' Test the incidence matrix @@ -21,8 +21,8 @@ def test_directed_random_walk(): directedHyperGraph.build_incidence_matrix() head= np.matrix('0 0 0 0; 1 0 0 0; 1 0 0 0; 0 1 0 0; 0 0 1 1') tail = np.matrix('1 0 1 0; 0 1 0 0; 0 0 1 0; 0 0 0 1; 0 0 0 0') - assert np.shape(directedHyperGraph.H_minus)==(5, 5) - assert np.shape(directedHyperGraph.H_plus)==(5, 5) + assert np.shape(directedHyperGraph.H_minus)==(5, 4) + assert np.shape(directedHyperGraph.H_plus)==(5, 4) P = directedHyperGraph.build_transition_matrix() print(P)
reverted to old graph for testing
Murali-group_halp
train
92bc469f581044948975e5eadface88829342bcd
diff --git a/pydoop/mapreduce/api.py b/pydoop/mapreduce/api.py index <HASH>..<HASH> 100644 --- a/pydoop/mapreduce/api.py +++ b/pydoop/mapreduce/api.py @@ -66,6 +66,18 @@ class JobConf(dict): '1' >>> jc.get_int('a') 1 + + .. warning:: + + For the most part, a JobConf object behaves like a :class:`dict`. + For backwards compatibility, however, there are two important exceptions: + + #. objects are constructed from a ``[key1, value1, key2, value2, + ...]`` sequence + + #. if ``k`` is not in ``jc``, ``jc.get(k)`` raises :exc:`RuntimeError` + instead of returning :obj:`None` (``jc.get(k, None)`` returns + :obj:`None` as in :class:`dict`). """ def __init__(self, values):
JobConf docs: added warning about the differences with dict [ci skip]
crs4_pydoop
train