hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
aa92920f7fabe45b4a915afb5ccec31985789cf1
diff --git a/src/Rct567/DomQuery/DomQuery.php b/src/Rct567/DomQuery/DomQuery.php index <HASH>..<HASH> 100644 --- a/src/Rct567/DomQuery/DomQuery.php +++ b/src/Rct567/DomQuery/DomQuery.php @@ -778,7 +778,6 @@ class DomQuery implements \IteratorAggregate, \Countable, \ArrayAccess $result = $this->createChildInstance(); if (isset($this->document) && $this->length > 0) { - $result->setDomDocument($this->document); foreach ($this->nodes as $node) { if (!is_null($node->nextSibling)) { @@ -806,7 +805,6 @@ class DomQuery implements \IteratorAggregate, \Countable, \ArrayAccess $result = $this->createChildInstance(); if (isset($this->document) && $this->length > 0) { - $result->setDomDocument($this->document); foreach ($this->nodes as $node) { // get all previous sibling of all nodes if (!is_null($node->previousSibling)) {
removed unnecessary setDomDocument
Rct567_DomQuery
train
4879a8347099c2729896b7742914300e702bbc5d
diff --git a/lib/hmmlearn/hmm.py b/lib/hmmlearn/hmm.py index <HASH>..<HASH> 100644 --- a/lib/hmmlearn/hmm.py +++ b/lib/hmmlearn/hmm.py @@ -667,29 +667,43 @@ class GMMHMM(_BaseHMM): nf = self.n_features nm = self.n_mix + def compute_cv(): + return np.cov(X.T) + self.min_covar * np.eye(nf) + # Default values for covariance prior parameters self._init_covar_priors() self._fix_priors_shape() main_kmeans = cluster.KMeans(n_clusters=nc, random_state=self.random_state) + cv = None # covariance matrix labels = main_kmeans.fit_predict(X) - kmeanses = [] + main_centroid = np.mean(main_kmeans.cluster_centers_, axis=0) + means = [] for label in range(nc): kmeans = cluster.KMeans(n_clusters=nm, random_state=self.random_state) - kmeans.fit(X[np.where(labels == label)]) - kmeanses.append(kmeans) + X_cluster = X[np.where(labels == label)] + if X_cluster.shape[0] >= nm: + kmeans.fit(X_cluster) + means.append(kmeans.cluster_centers_) + else: + if cv is None: + cv = compute_cv() + m_cluster = np.random.multivariate_normal(main_centroid, + cov=cv, + size=nm) + means.append(m_cluster) if self._needs_init("w", "weights_"): self.weights_ = np.full((nc, nm), 1 / nm) if self._needs_init("m", "means_"): - self.means_ = np.stack( - [kmeans.cluster_centers_ for kmeans in kmeanses]) + self.means_ = np.stack(means) if self._needs_init("c", "covars_"): - cv = np.cov(X.T) + self.min_covar * np.eye(nf) + if cv is None: + cv = compute_cv() if not cv.shape: cv.shape = (1, 1) if self.covariance_type == 'tied': diff --git a/lib/hmmlearn/tests/test_gmm_hmm_new.py b/lib/hmmlearn/tests/test_gmm_hmm_new.py index <HASH>..<HASH> 100644 --- a/lib/hmmlearn/tests/test_gmm_hmm_new.py +++ b/lib/hmmlearn/tests/test_gmm_hmm_new.py @@ -5,6 +5,8 @@ from . import assert_log_likelihood_increasing from . import normalized from ..hmm import GMMHMM +from numpy.testing import assert_array_almost_equal, assert_array_less + def sample_from_parallelepiped(low, high, n_samples, random_state): (n_features,) = low.shape @@ -194,3 +196,18 @@ class TestGMMHMMWithTiedCovars(GMMHMMTestMixin): class TestGMMHMMWithFullCovars(GMMHMMTestMixin): covariance_type = 'full' + + +class TestGMMHMM_KmeansInit: + def test_kmeans(self): + # Generate two isolated cluster. + # The second cluster has no. of points less than n_mix. + np.random.seed(0) + data1 = np.random.uniform(low=0, high=1, size=(100, 2)) + data2 = np.random.uniform(low=5, high=6, size=(5, 2)) + data = np.r_[data1, data2] + model = GMMHMM(n_components=2, n_mix=10, n_iter=5) + model.fit(data) # _init() should not fail here + # test whether the means are bounded by the data lower- and upperbounds + assert_array_less(0, model.means_) + assert_array_less(model.means_, 6)
fixed GMMHMM._init KMeans not constrained with the min cluster size (#<I>)
hmmlearn_hmmlearn
train
fd25cb8d9939761814058cfc5249393ba0123dec
diff --git a/src/Persistence.php b/src/Persistence.php index <HASH>..<HASH> 100644 --- a/src/Persistence.php +++ b/src/Persistence.php @@ -52,7 +52,7 @@ abstract class Persistence case 'pdo_sqlsrv': case 'pdo_oci': case 'oci8': - $persistence = new Persistence\Sql($dsn, $dsn['user'], $dsn['password'], $args); + $persistence = new Persistence\Sql($dsn, $dsn['user'] ?? null, $dsn['password'] ?? null, $args); return $persistence; default:
Connection DSN can omit password (#<I>)
atk4_data
train
0985694a4b2fd1df66874f6ccfa5f41ac6458d7c
diff --git a/connection.go b/connection.go index <HASH>..<HASH> 100644 --- a/connection.go +++ b/connection.go @@ -21,6 +21,8 @@ import ( "runtime" "sync" + "golang.org/x/net/context" + "github.com/jacobsa/bazilfuse" "github.com/jacobsa/fuse/fuseops" ) @@ -31,6 +33,9 @@ type Connection struct { wrapped *bazilfuse.Conn opsInFlight sync.WaitGroup + // The context from which all op contexts inherit. + parentCtx context.Context + // For logging purposes only. nextOpID uint32 } @@ -38,11 +43,13 @@ type Connection struct { // Responsibility for closing the wrapped connection is transferred to the // result. You must call c.close() eventually. func newConnection( + parentCtx context.Context, logger *log.Logger, wrapped *bazilfuse.Conn) (c *Connection, err error) { c = &Connection{ - logger: logger, - wrapped: wrapped, + logger: logger, + wrapped: wrapped, + parentCtx: parentCtx, } return @@ -115,7 +122,8 @@ func (c *Connection) ReadOp() (op fuseops.Op, err error) { c.log(opID, calldepth+1, format, v...) } - if op = fuseops.Convert(bfReq, logForOp, &c.opsInFlight); op == nil { + op = fuseops.Convert(c.parentCtx, bfReq, logForOp, &c.opsInFlight) + if op == nil { c.log(opID, 1, "-> ENOSYS: %v", bfReq) bfReq.RespondError(ENOSYS) continue diff --git a/fuseops/common_op.go b/fuseops/common_op.go index <HASH>..<HASH> 100644 --- a/fuseops/common_op.go +++ b/fuseops/common_op.go @@ -25,12 +25,13 @@ import ( // A helper for embedding common behavior. type commonOp struct { - ctx context.Context opType string r bazilfuse.Request log func(int, string, ...interface{}) opsInFlight *sync.WaitGroup - report reqtrace.ReportFunc + + ctx context.Context + report reqtrace.ReportFunc } func describeOpType(t reflect.Type) (desc string) { @@ -40,13 +41,13 @@ func describeOpType(t reflect.Type) (desc string) { } func (o *commonOp) init( + ctx context.Context, opType reflect.Type, r bazilfuse.Request, log func(int, string, ...interface{}), opsInFlight *sync.WaitGroup) { // Initialize basic fields. o.opType = describeOpType(opType) - o.ctx = context.Background() o.r = r o.log = log o.opsInFlight = opsInFlight diff --git a/fuseops/convert.go b/fuseops/convert.go index <HASH>..<HASH> 100644 --- a/fuseops/convert.go +++ b/fuseops/convert.go @@ -19,6 +19,8 @@ import ( "sync" "time" + "golang.org/x/net/context" + "github.com/jacobsa/bazilfuse" ) @@ -28,6 +30,7 @@ import ( // This function is an implementation detail of the fuse package, and must not // be called by anyone else. func Convert( + parentCtx context.Context, r bazilfuse.Request, logForOp func(int, string, ...interface{}), opsInFlight *sync.WaitGroup) (o Op) { @@ -213,7 +216,7 @@ func Convert( return } - co.init(reflect.TypeOf(o), r, logForOp, opsInFlight) + co.init(parentCtx, reflect.TypeOf(o), r, logForOp, opsInFlight) return } diff --git a/mounted_file_system.go b/mounted_file_system.go index <HASH>..<HASH> 100644 --- a/mounted_file_system.go +++ b/mounted_file_system.go @@ -61,6 +61,10 @@ func (mfs *MountedFileSystem) Join(ctx context.Context) error { // Optional configuration accepted by Mount. type MountConfig struct { + // The context from which every op read from the connetion by the sever + // should inherit. If nil, context.Background() will be used. + OpContext context.Context + // OS X only. // // Normally on OS X we mount with the novncache option @@ -127,8 +131,14 @@ func Mount( return } + // Choose a parent context for ops. + opContext := config.OpContext + if opContext == nil { + opContext = context.Background() + } + // Create our own Connection object wrapping it. - connection, err := newConnection(logger, bfConn) + connection, err := newConnection(opContext, logger, bfConn) if err != nil { bfConn.Close() err = fmt.Errorf("newConnection: %v", err)
Allow setting a parent context for all ops.
jacobsa_fuse
train
6d2695260851f128aac3297f3a7c217c1dd51d9c
diff --git a/spec/payload/base_spec.rb b/spec/payload/base_spec.rb index <HASH>..<HASH> 100644 --- a/spec/payload/base_spec.rb +++ b/spec/payload/base_spec.rb @@ -73,4 +73,28 @@ describe Magnum::Payload::Base do to raise_error "Not implemented in base class" end end + + describe "#attributes_hash" do + let(:hash) { described_class.new("foo" => "bar").attributes_hash } + + before do + Magnum::Payload::Base.any_instance.stub(:parse!) + end + + it "returns payload hash" do + expect(hash).to be_a Hash + end + + it "it includes attributes" do + expect(hash.keys).to eq [ + "commit", + "branch", + "author", + "committer", + "message", + "commit_url", + "compare_url" + ] + end + end end \ No newline at end of file
Add tests to attributes_hash method
magnumci_magnum-payload
train
e69ed667c1b1bf41b6ddecf80578b97be6f95638
diff --git a/examples/render_goroutines/render_goroutines.go b/examples/render_goroutines/render_goroutines.go index <HASH>..<HASH> 100644 --- a/examples/render_goroutines/render_goroutines.go +++ b/examples/render_goroutines/render_goroutines.go @@ -7,6 +7,7 @@ import ( "math/rand" "os" "sync" + "time" "github.com/veandco/go-sdl2/sdl" ) @@ -46,6 +47,7 @@ func run() int { r := byte(rand.Int()) g := byte(rand.Int()) b := byte(rand.Int()) + time.Sleep(1 * time.Millisecond) // Call the render function in the 'render' thread synchronously sdl.CallQueue <- func() {
examples: render_goroutines: make the goroutines takes a bit more time by sleeping
veandco_go-sdl2
train
acce73ddb756380265dfde038f35e381df693038
diff --git a/tests/jasmine/app/appSpec.js b/tests/jasmine/app/appSpec.js index <HASH>..<HASH> 100644 --- a/tests/jasmine/app/appSpec.js +++ b/tests/jasmine/app/appSpec.js @@ -379,48 +379,4 @@ describe('nitro:app', () => { ]); }); }); - - describe('when including release package', () => { - beforeAll((done) => { - helpers.run(path.join(__dirname, '../../../generators/app')) - .inDir(path.join(os.tmpdir(), './temp-test')) - .withOptions({ 'skip-install': true }) - .withPrompts({ release: true }) - .on('end', done); - }); - - it('package.json contains exporter dependency', () => { - assert.fileContent([ - ['package.json', /nitro-release/], - ]); - }); - - it('config does not contain default exporter properties', () => { - assert.fileContent([ - ['config/default.js', /release:/], - ]); - }); - }); - - describe('when not including release package', () => { - beforeAll((done) => { - helpers.run(path.join(__dirname, '../../../generators/app')) - .inDir(path.join(os.tmpdir(), './temp-test')) - .withOptions({ 'skip-install': true }) - .withPrompts({ release: false }) - .on('end', done); - }); - - it('package.json does not contain exporter dependency', () => { - assert.noFileContent([ - ['package.json', /nitro-release/], - ]); - }); - - it('config does not contain default exporter properties', () => { - assert.noFileContent([ - ['config/default.js', /release:/], - ]); - }); - }); });
generator: remove tests for removed release package
namics_generator-nitro
train
cf6830e1839e52f90eb10404917a032272f2fec6
diff --git a/test/sax/sax_test.rb b/test/sax/sax_test.rb index <HASH>..<HASH> 100755 --- a/test/sax/sax_test.rb +++ b/test/sax/sax_test.rb @@ -13,13 +13,7 @@ $: << File.join(File.dirname(__FILE__), ".") require 'stringio' require 'bigdecimal' - -use_minitest = RUBY_VERSION.start_with?('2.1.') && RUBY_ENGINE != 'rbx' -if use_minitest - require 'minitest/autorun' -else - require 'test/unit' -end +require 'test/unit' require 'optparse' require 'ox' @@ -31,8 +25,6 @@ opts = OptionParser.new opts.on("-h", "--help", "Show this display") { puts opts; Process.exit!(0) } opts.parse(ARGV) -test_case = (use_minitest) ? ::Minitest::Test : ::Test::Unit::TestCase - $ox_sax_options = { :encoding=>nil, :indent=>2, @@ -52,7 +44,7 @@ $ox_sax_options = { :strip_namespace=>false } -class SaxBaseTest < test_case +class SaxBaseTest < ::Test::Unit::TestCase include SaxTestHelpers def test_sax_io_pipe diff --git a/test/sax/smart_test.rb b/test/sax/smart_test.rb index <HASH>..<HASH> 100755 --- a/test/sax/smart_test.rb +++ b/test/sax/smart_test.rb @@ -12,12 +12,7 @@ $: << File.join(File.dirname(__FILE__), "../../ext") $: << File.join(File.dirname(__FILE__), ".") require 'stringio' -use_minitest = RUBY_VERSION.start_with?('2.1.') && RUBY_ENGINE != 'rbx' -if use_minitest - require 'minitest/autorun' -else - require 'test/unit' -end +require 'test/unit' require 'optparse' require 'helpers' require 'ox' @@ -27,9 +22,7 @@ opts = OptionParser.new opts.on("-h", "--help", "Show this display") { puts opts; Process.exit!(0) } opts.parse(ARGV) -test_case = (use_minitest) ? ::Minitest::Test : ::Test::Unit::TestCase - -class SaxSmartTest < test_case +class SaxSmartTest < ::Test::Unit::TestCase include SaxTestHelpers NORMALELEMENTS = { diff --git a/test/tests.rb b/test/tests.rb index <HASH>..<HASH> 100755 --- a/test/tests.rb +++ b/test/tests.rb @@ -11,8 +11,7 @@ $: << File.join(File.dirname(__FILE__), "../lib") $: << File.join(File.dirname(__FILE__), "../ext") require 'rubygems' if RUBY_VERSION.start_with?('1.8.') -require 'minitest' -require 'minitest/autorun' +require 'test/unit' require 'optparse' require 'date' require 'bigdecimal' @@ -70,7 +69,7 @@ $ox_generic_options = { :overlay=>nil, } -class Func < ::Minitest::Test +class Func < ::Test::Unit::TestCase unless respond_to?(:assert_raise) alias assert_raise assert_raises
Use test-unit instead of minitest
ohler55_ox
train
919c0e959a8e64aa0e37448e085c44dae826c514
diff --git a/src/toolchains/cc.py b/src/toolchains/cc.py index <HASH>..<HASH> 100644 --- a/src/toolchains/cc.py +++ b/src/toolchains/cc.py @@ -1,4 +1,4 @@ -import os.path +import os from collections import Iterable from node import Node @@ -21,6 +21,10 @@ def _strlistify(thing): return (str(i) for i in _listify(thing)) class CcCompiler(object): + def __init__(self): + self._cc_name = os.getenv('CC', 'cc') + self._cxx_name = os.getenv('CXX', 'c++') + def command_name(self, lang): if not isinstance(lang, basestring): is_cxx = any(i == 'c++' for i in lang) @@ -28,9 +32,9 @@ class CcCompiler(object): is_cxx = lang == 'c++' if is_cxx: - return ('c++', 'cxx') + return (self._cxx_name, 'cxx') else: - return ('cc', 'cc') + return (self._cc_name, 'cc') def compile_command(self, cmd, input, output, dep=None, prevars=None, postvars=None):
Support setting the compiler name via CC/CXX
jimporter_bfg9000
train
c1d02e14989a0a5d4f0bac4ec650435037bee2c1
diff --git a/pyocd/__main__.py b/pyocd/__main__.py index <HASH>..<HASH> 100644 --- a/pyocd/__main__.py +++ b/pyocd/__main__.py @@ -699,7 +699,7 @@ class PyOCDTool(object): server_listening_callback=self.server_listening) gdbs.append(gdb) gdb = gdbs[0] - while gdb.isAlive(): + while gdb.is_alive(): gdb.join(timeout=0.5) except (KeyboardInterrupt, Exception): for gdb in gdbs: diff --git a/pyocd/gdbserver/gdbserver.py b/pyocd/gdbserver/gdbserver.py index <HASH>..<HASH> 100644 --- a/pyocd/gdbserver/gdbserver.py +++ b/pyocd/gdbserver/gdbserver.py @@ -233,13 +233,13 @@ class GDBServer(threading.Thread): self.start() def restart(self): - if self.isAlive(): + if self.is_alive(): self.detach_event.set() def stop(self): - if self.isAlive(): + if self.is_alive(): self.shutdown_event.set() - while self.isAlive(): + while self.is_alive(): pass LOG.info("GDB server thread killed") diff --git a/pyocd/tools/gdb_server.py b/pyocd/tools/gdb_server.py index <HASH>..<HASH> 100644 --- a/pyocd/tools/gdb_server.py +++ b/pyocd/tools/gdb_server.py @@ -294,7 +294,7 @@ class GDBServerTool(object): server_listening_callback=self.server_listening) gdbs.append(gdb) gdb = gdbs[0] - while gdb.isAlive(): + while gdb.is_alive(): gdb.join(timeout=0.5) except KeyboardInterrupt: for gdb in gdbs:
Resolve isAlive deprecation warning.
mbedmicro_pyOCD
train
c27a0ee4b5f03947f4a9d3e6fe9620fafaf4348d
diff --git a/src/urlMatcherFactory.js b/src/urlMatcherFactory.js index <HASH>..<HASH> 100644 --- a/src/urlMatcherFactory.js +++ b/src/urlMatcherFactory.js @@ -79,7 +79,7 @@ function UrlMatcher(pattern, caseInsensitiveMatch) { function addParameter(id, type) { if (!/^\w+(-+\w+)*$/.test(id)) throw new Error("Invalid parameter name '" + id + "' in pattern '" + pattern + "'"); if (params[id]) throw new Error("Duplicate parameter name '" + id + "' in pattern '" + pattern + "'"); - params[id] = type; + params[id] = angular.isNumber(type) ? new Type() : type; } function quoteRegExp(string) { @@ -238,7 +238,7 @@ UrlMatcher.prototype.validates = function (params) { result = result && self.params[key].is(val); }); return result; -} +}; /** * @ngdoc function diff --git a/test/stateSpec.js b/test/stateSpec.js index <HASH>..<HASH> 100644 --- a/test/stateSpec.js +++ b/test/stateSpec.js @@ -91,6 +91,9 @@ describe('state', function () { } } }) + .state('badParam', { + url: "/bad/{param:int}" + }) .state('first', { url: '^/first/subpath' }) .state('second', { url: '^/second' }) @@ -712,6 +715,7 @@ describe('state', function () { 'about.person.item', 'about.sidebar', 'about.sidebar.item', + 'badParam', 'dynamicController', 'first', 'home', @@ -779,6 +783,29 @@ describe('state', function () { expect($state.current.name).toBe(''); })); + describe("typed parameter handling", function() { + + it('should initialize parameters without a hacky empty test', inject(function ($urlMatcherFactory, $state) { + new UrlMatcher(""); + })); + + it('should ignore bad url parameters', inject(function ($state, $rootScope, $location, $urlMatcherFactory) { + $location.path("/bad/5"); + $rootScope.$broadcast("$locationChangeSuccess"); + $rootScope.$apply(); + expect($state.current.name).toBe("badParam"); + + $state.transitionTo("about"); + $rootScope.$apply(); + expect($state.current.name).toBe('about'); + + $location.path("/bad/foo"); + $rootScope.$broadcast("$locationChangeSuccess"); + $rootScope.$apply(); + expect($state.current.name).toBe("about"); + })); + }); + it('should revert to last known working url on state change failure', inject(function ($state, $rootScope, $location, $q) { $state.transitionTo("about"); $q.flush();
chore($state): validate rejection of bad params
angular-ui_ui-router
train
cdcc4610badd147e12e15e56814a552094f30882
diff --git a/mockserver-netty/src/main/java/org/mockserver/proxy/relay/UpstreamProxyRelayHandler.java b/mockserver-netty/src/main/java/org/mockserver/proxy/relay/UpstreamProxyRelayHandler.java index <HASH>..<HASH> 100644 --- a/mockserver-netty/src/main/java/org/mockserver/proxy/relay/UpstreamProxyRelayHandler.java +++ b/mockserver-netty/src/main/java/org/mockserver/proxy/relay/UpstreamProxyRelayHandler.java @@ -30,7 +30,7 @@ public class UpstreamProxyRelayHandler extends SimpleChannelInboundHandler<FullH if (future.isSuccess()) { ctx.channel().read(); } else { - logger.error("Exception while returning writing " + request, future.cause()); + logger.error("Exception while returning response for request \"" + request.getMethod() + " " + request.getUri() + "\"", future.cause()); future.channel().close(); } }
fixing "io.netty.util.IllegalReferenceCountException" error caused by toString on byte buffer in response
jamesdbloom_mockserver
train
e56de8aacda1038158ed63d755ef52b6030934d1
diff --git a/salt/exceptions.py b/salt/exceptions.py index <HASH>..<HASH> 100644 --- a/salt/exceptions.py +++ b/salt/exceptions.py @@ -2,39 +2,43 @@ This module is a central location for all salt exceptions ''' -class SaltClientError(Exception): +class SaltException(Exception): ''' - Problem reading the master root key + Base exception class; all Salt-specific exceptions should subclass this ''' pass +class SaltClientError(SaltException): + ''' + Problem reading the master root key + ''' + pass -class AuthenticationError(Exception): +class AuthenticationError(SaltException): ''' If sha256 signature fails during decryption ''' pass - -class CommandNotFoundError(Exception): +class CommandNotFoundError(SaltException): ''' Used in modules or grains when a required binary is not available ''' pass -class LoaderError(Exception): +class LoaderError(SaltException): ''' Problems loading the right renderer ''' pass -class MinionError(Exception): +class MinionError(SaltException): ''' Minion problems reading uris such as salt:// or http:// ''' pass -class SaltInvocationError(Exception): +class SaltInvocationError(SaltException): ''' Used when the wrong number of arguments are sent to modules or invalid arguments are specified on the command line
Moved all Salt exceptions to inherit from a common exception class
saltstack_salt
train
daa8a82f1169909d696692b02fb9282e078772ef
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -28,6 +28,7 @@ }, "devDependencies": { "jsdom": "^7.0.0", + "parse5": "^3.0.2", "tap": "^10.3.0" }, "engines": { diff --git a/smartquotes.js b/smartquotes.js index <HASH>..<HASH> 100644 --- a/smartquotes.js +++ b/smartquotes.js @@ -68,9 +68,9 @@ for (i = 0; i < childNodes.length; i++) { node = childNodes[i]; - if (node.nodeType === TEXT_NODE) { + if (node.nodeType === TEXT_NODE || node.nodeName === '#text') { textNodes.push([node, text.length]); - text += node.nodeValue; + text += node.nodeValue || node.value; } else if (node.childNodes && node.childNodes.length) { text += handleElement(node); } @@ -81,6 +81,8 @@ var nodeInfo = textNodes[i]; if (nodeInfo[0].nodeValue) { nodeInfo[0].nodeValue = text.substr(nodeInfo[1], nodeInfo[0].nodeValue.length); + } else if (nodeInfo[0].value) { + nodeInfo[0].value = text.substr(nodeInfo[1], nodeInfo[0].value.length); } } return text; diff --git a/test/smartquotes.js b/test/smartquotes.js index <HASH>..<HASH> 100644 --- a/test/smartquotes.js +++ b/test/smartquotes.js @@ -1,5 +1,6 @@ var jsdom = require('jsdom'); var test = require('tap').test; +var parse5 = require('parse5'); var smartquotes = require('../'); // a list of test strings and expected converted values @@ -75,3 +76,10 @@ test('smartquotes()', function (t) { } }); }); + +test('parse5 support with smartquotes.element', function(t) { + var document = parse5.parse('"test text"'); + smartquotes.element(document); + t.match(parse5.serialize(document), /\u201ctest text\u201d/); + t.end(); +});
Adds support for parse5 parsed documents (#<I>)
kellym_smartquotes.js
train
5275c641023014fe419a74655579dd47b3f8a793
diff --git a/example/userdb/manage.py b/example/userdb/manage.py index <HASH>..<HASH> 100755 --- a/example/userdb/manage.py +++ b/example/userdb/manage.py @@ -1,4 +1,8 @@ #!/usr/bin/env python +import os, sys + +sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')) + from django.core.management import execute_manager import imp try:
added devil in PYTHONPATH
wuher_devil
train
e85830f39c2e65f2f5098f51fc2f7adf50537c59
diff --git a/lib/beaker-pe/install/pe_utils.rb b/lib/beaker-pe/install/pe_utils.rb index <HASH>..<HASH> 100644 --- a/lib/beaker-pe/install/pe_utils.rb +++ b/lib/beaker-pe/install/pe_utils.rb @@ -1515,7 +1515,7 @@ module Beaker #Installs PE with a PE managed external postgres def do_install_pe_with_pe_managed_external_postgres(hosts, opts) - pe_infrastructure = select_hosts({:roles => ['master', 'compile_master', 'dashboard', 'database', 'pe_postgres']}, hosts) + pe_infrastructure = select_hosts({:roles => ['master', 'dashboard', 'database', 'pe_postgres']}, hosts) non_infrastructure = hosts.reject{|host| pe_infrastructure.include? host} is_upgrade = (original_pe_ver(hosts[0]) != hosts[0][:pe_ver])
EExternal postgres to work with compile master During external postgres installation, puppet agent is not getting installed on a compile_master node since it is added to the infrastructure node array. Removing compile_master from infrastructure nodes array will add that node to the agents array and puppet will be installed.
puppetlabs_beaker-pe
train
d661a10b8d09ed865bf0c9c708c810ac3a1318cd
diff --git a/lib/cacheable.rb b/lib/cacheable.rb index <HASH>..<HASH> 100644 --- a/lib/cacheable.rb +++ b/lib/cacheable.rb @@ -180,7 +180,7 @@ module Cacheable hash_args = sanitized_args[sanitized_args.length] result = ::Cacheable.cas(self, #{symbol.inspect}, #{options[:ttl]}) do |current_hash| - current_hash ||= Hash.new + current_hash = Hash.new unless current_hash.is_a?(Hash) if current_hash.has_key?(hash_args) current_hash[hash_args] else
apparently ||= doesn't always catch nils
seamusabshere_cacheable
train
01b96693ece2941d9fe28b19401aae6f909c590e
diff --git a/juju/model.py b/juju/model.py index <HASH>..<HASH> 100644 --- a/juju/model.py +++ b/juju/model.py @@ -1325,7 +1325,10 @@ class BundleHandler(object): Annotations holds the annotations as key/value pairs. """ entity_id = self.resolve(id_) - entity = self.model.state.get_entity(entity_type, entity_id) + try: + entity = self.model.state.get_entity(entity_type, entity_id) + except KeyError: + entity = await self._wait_for_new(entity_type, entity_id) return await entity.set_annotations(annotations)
Wait for entity if it doesn't exist yet
juju_python-libjuju
train
63c1d4ee838504f111e05fcc54dabca6fc31e84d
diff --git a/src/PuliApplicationConfig.php b/src/PuliApplicationConfig.php index <HASH>..<HASH> 100644 --- a/src/PuliApplicationConfig.php +++ b/src/PuliApplicationConfig.php @@ -73,6 +73,9 @@ class PuliApplicationConfig extends DefaultApplicationConfig ->setDisplayName('Puli') ->setVersion(self::VERSION) + // Let Puli plugins extend the CLI + ->setEventDispatcher($puli->getEnvironment()->getEventDispatcher()) + // Enable debug for unreleased versions only. Split the string to // prevent its replacement during release ->setDebug('@pack'.'age_version@' === self::VERSION)
Injected Puli's event dispatcher into the console application
puli_cli
train
f94c41e5b6364b4cc53a135aa4eb930d3247e830
diff --git a/core/src/test/java/com/google/bitcoin/core/PeerGroupTest.java b/core/src/test/java/com/google/bitcoin/core/PeerGroupTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/com/google/bitcoin/core/PeerGroupTest.java +++ b/core/src/test/java/com/google/bitcoin/core/PeerGroupTest.java @@ -102,8 +102,6 @@ public class PeerGroupTest extends TestWithPeerGroup { @Test public void listener() throws Exception { - final SettableFuture<Void> firstDisconnectFuture = SettableFuture.create(); - final SettableFuture<Void> secondDisconnectFuture = SettableFuture.create(); peerGroup.startAndWait(); peerGroup.addEventListener(listener); @@ -452,7 +450,7 @@ public class PeerGroupTest extends TestWithPeerGroup { peerGroup.addEventListener(listener); peerGroup.addPeerDiscovery(new PeerDiscovery() { public InetSocketAddress[] getPeers(long unused, TimeUnit unused2) throws PeerDiscoveryException { - return addresses.toArray(new InetSocketAddress[0]); + return addresses.toArray(new InetSocketAddress[addresses.size()]); } public void shutdown() {
PeerGroupTest: Clear some minor static analysis warnings.
bitcoinj_bitcoinj
train
a344204cc7fd2efd34374510a8a747013841b7f0
diff --git a/builtin/providers/aws/resource_aws_rds_cluster_instance_test.go b/builtin/providers/aws/resource_aws_rds_cluster_instance_test.go index <HASH>..<HASH> 100644 --- a/builtin/providers/aws/resource_aws_rds_cluster_instance_test.go +++ b/builtin/providers/aws/resource_aws_rds_cluster_instance_test.go @@ -2,11 +2,10 @@ package aws import ( "fmt" - "math/rand" "strings" "testing" - "time" + "github.com/hashicorp/terraform/helper/acctest" "github.com/hashicorp/terraform/helper/resource" "github.com/hashicorp/terraform/terraform" @@ -119,16 +118,16 @@ func testAccCheckAWSClusterInstanceExists(n string, v *rds.DBInstance) resource. var testAccAWSClusterInstanceConfig = fmt.Sprintf(` resource "aws_rds_cluster" "default" { cluster_identifier = "tf-aurora-cluster-test-%d" - availability_zones = ["us-west-2a","us-west-2b","us-west-2c"] - database_name = "mydb" - master_username = "foo" - master_password = "mustbeeightcharaters" + availability_zones = ["us-west-2a", "us-west-2b", "us-west-2c"] + database_name = "mydb" + master_username = "foo" + master_password = "mustbeeightcharaters" } resource "aws_rds_cluster_instance" "cluster_instances" { - identifier = "aurora-cluster-test-instance" - cluster_identifier = "${aws_rds_cluster.default.id}" - instance_class = "db.r3.large" + identifier = "tf-cluster-instance-%d" + cluster_identifier = "${aws_rds_cluster.default.id}" + instance_class = "db.r3.large" } -`, rand.New(rand.NewSource(time.Now().UnixNano())).Int()) +`, acctest.RandInt(), acctest.RandInt())
provider/aws: format and randomize RDS Cluster Instance tests
hashicorp_terraform
train
9ccf4d12888ba04fbd408c8c49c90a3914a32965
diff --git a/activerecord/test/cases/associations/nested_through_associations_test.rb b/activerecord/test/cases/associations/nested_through_associations_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/cases/associations/nested_through_associations_test.rb +++ b/activerecord/test/cases/associations/nested_through_associations_test.rb @@ -153,6 +153,7 @@ class NestedThroughAssociationsTest < ActiveRecord::TestCase end def test_has_many_through_has_one_with_has_many_through_source_reflection_preload + ActiveRecord::Base.connection.table_alias_length # preheat cache members = assert_queries(4) { Member.includes(:organization_member_details).to_a.sort_by(&:id) } groucho_details, other_details = member_details(:groucho), member_details(:some_other_guy)
Warm up cache to prevent assertion failure.
rails_rails
train
c9ed741f1c8eb38544680067ec67232932685a2a
diff --git a/src/kit/app/EditorSessionMixin.js b/src/kit/app/EditorSessionMixin.js index <HASH>..<HASH> 100644 --- a/src/kit/app/EditorSessionMixin.js +++ b/src/kit/app/EditorSessionMixin.js @@ -183,20 +183,6 @@ export default function (DocumentSession) { return this._registerObserver('finalize', args) } - on (stage, ...args) { - if (_shouldDisplayDeprecatedWarning()) { - console.error("DEPRECATED: use 'editorState.addObserver(...)' instead.") - } - return this._registerObserver(stage, args) - } - - off (observer) { - if (_shouldDisplayDeprecatedWarning()) { - console.error("DEPRECATED: use 'editorState.off(...)' instead.") - } - this.editorState.removeObserver(observer) - } - _onDocumentChange (change, info) { this.editorState._setUpdate('document', { change, info }) this.editorState.hasUnsavedChanges = true
Don't allow EditorSession.on() for flow events anymore.
substance_texture
train
09875d3b888de531f8b7a604a159bcf02c90f9e0
diff --git a/src/openapi/ActionRouteParser.php b/src/openapi/ActionRouteParser.php index <HASH>..<HASH> 100644 --- a/src/openapi/ActionRouteParser.php +++ b/src/openapi/ActionRouteParser.php @@ -33,7 +33,7 @@ class ActionRouteParser extends BasePathParser public function __construct(Controller $controller, $actionName, $absoluteRoute, $controllerMapRoute) { $this->controllerSpecs = new ControllerSpecs($controller); - $this->actionSpecs = new ControllerActionSpecs($controller, $actionName); + $this->actionSpecs = new ControllerActionSpecs($controller, $actionName, 'get'); $this->absoluteRoute = $absoluteRoute; $this->controllerMapRoute = $controllerMapRoute; } diff --git a/src/openapi/UrlRuleRouteParser.php b/src/openapi/UrlRuleRouteParser.php index <HASH>..<HASH> 100644 --- a/src/openapi/UrlRuleRouteParser.php +++ b/src/openapi/UrlRuleRouteParser.php @@ -129,7 +129,7 @@ class UrlRuleRouteParser extends BasePathParser return false; } - $actionSpecs = new ControllerActionSpecs($this->controller, $this->getActionNameFromRoute($urlRule->route)); + $actionSpecs = new ControllerActionSpecs($this->controller, $this->getActionNameFromRoute($urlRule->route), $verbName); $actionObject = $actionSpecs->getActionObject(); if (!$actionObject) { diff --git a/src/openapi/specs/BaseSpecs.php b/src/openapi/specs/BaseSpecs.php index <HASH>..<HASH> 100644 --- a/src/openapi/specs/BaseSpecs.php +++ b/src/openapi/specs/BaseSpecs.php @@ -36,6 +36,19 @@ abstract class BaseSpecs implements SpecInterface abstract public function getReflection(); /** + * Get the context verbname: + * + * + get + * + post + * + delete + * + put + * + option + * + * @return string + */ + abstract public function getVerbName(); + + /** * @return BaseAction */ abstract public function getActionObject(); @@ -154,6 +167,8 @@ abstract class BaseSpecs implements SpecInterface 429 => new Response(['description' => 'Too many requests. The request was rejected due to rate limiting.']), 500 => new Response(['description' => 'Internal server error. This could be caused by internal program errors.']) ]; + + // @TODO: determine status codes based on $this->getVerbName(); } protected function modelContextToResponse($contextModel, $isArray = false) diff --git a/src/openapi/specs/ControllerActionSpecs.php b/src/openapi/specs/ControllerActionSpecs.php index <HASH>..<HASH> 100644 --- a/src/openapi/specs/ControllerActionSpecs.php +++ b/src/openapi/specs/ControllerActionSpecs.php @@ -19,10 +19,21 @@ class ControllerActionSpecs extends BaseSpecs protected $actioName; - public function __construct(Controller $controller, $actionName) + protected $verbName; + + public function __construct(Controller $controller, $actionName, $verbName) { $this->controller = $controller; $this->actioName = $actionName; + $this->verbName = $verbName; + } + + /** + * {@inheritDoc} + */ + public function getVerbName() + { + return strtolower($this->verbName); } /** diff --git a/src/openapi/specs/ControllerSpecs.php b/src/openapi/specs/ControllerSpecs.php index <HASH>..<HASH> 100644 --- a/src/openapi/specs/ControllerSpecs.php +++ b/src/openapi/specs/ControllerSpecs.php @@ -26,6 +26,14 @@ class ControllerSpecs extends BaseSpecs /** * {@inheritDoc} */ + public function getVerbName() + { + return 'get'; + } + + /** + * {@inheritDoc} + */ public function getControllerObject() { return $this->controller;
add option to find verb in action context for responses
luyadev_luya-module-admin
train
0876aba7458d6258b602c97c2a27a0f4b31034e0
diff --git a/scrapelib.py b/scrapelib.py index <HASH>..<HASH> 100644 --- a/scrapelib.py +++ b/scrapelib.py @@ -654,8 +654,8 @@ class Scraper(object): _default_scraper = Scraper(follow_robots=False, requests_per_minute=0) -def urlopen(url): - return _default_scraper.urlopen(url) +def urlopen(url, method='GET', body=None): + return _default_scraper.urlopen(url, method, body) def scrapeshell():
allow method/body params to scrapelib.urlopen
jamesturk_scrapelib
train
4433ab880d518e985a249ad2c5298cd49d666b23
diff --git a/wayback-core/src/test/java/org/archive/wayback/util/htmllex/ParseContextTest.java b/wayback-core/src/test/java/org/archive/wayback/util/htmllex/ParseContextTest.java index <HASH>..<HASH> 100644 --- a/wayback-core/src/test/java/org/archive/wayback/util/htmllex/ParseContextTest.java +++ b/wayback-core/src/test/java/org/archive/wayback/util/htmllex/ParseContextTest.java @@ -28,6 +28,8 @@ package org.archive.wayback.util.htmllex; import java.net.URI; import java.net.URL; +import org.htmlparser.util.Translate; + import junit.framework.TestCase; /** @@ -37,6 +39,19 @@ import junit.framework.TestCase; public class ParseContextTest extends TestCase { /** + * + */ + public void testTranslate() { + String orig = "http://foo.com/main?arg1=1&lang=2"; + String xlated = Translate.decode(orig); + System.out.format("Orig(%s) xlated(%s)\n",orig,xlated); + String orig2 = "&#32; gaz.cgi?foo=bar&lang=2"; + String xlated2 = Translate.decode(orig2); + System.out.format("Orig2(%s) xlated2(%s)\n",orig2,xlated2); + + } + + /** * Test method for {@link org.archive.wayback.util.htmllex.ParseContext#contextualizeUrl(java.lang.String)}. */ public void testContextualizeUrl() {
simple example of Translate.decode() not doing what I think it should do... no assertions as yet git-svn-id: <URL>
iipc_openwayback
train
fbe5df45a7aef3ac6538ece8f6a309ad19c17d90
diff --git a/lib/billy/proxy_connection.rb b/lib/billy/proxy_connection.rb index <HASH>..<HASH> 100644 --- a/lib/billy/proxy_connection.rb +++ b/lib/billy/proxy_connection.rb @@ -109,13 +109,8 @@ module Billy def handle_unsuccessful_response(url, status) error_level = Billy.config.non_successful_error_level error_message = "puffing-billy: Received response status code #{status} for #{Helpers.format_url(url)}" - case error_level - when :error - close_connection - raise error_message - else - Billy.log(error_level, error_message) - end + Billy.log(error_level, error_message) + close_connection if error_level == :error end def proxy_request diff --git a/spec/lib/proxy_spec.rb b/spec/lib/proxy_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/proxy_spec.rb +++ b/spec/lib/proxy_spec.rb @@ -183,17 +183,15 @@ shared_examples_for 'a cache' do end context 'non_successful_cache_disabled requests' do - before { Billy.config.non_successful_cache_disabled = true } + before do + rack_app_url = URI(http_error.url_prefix) + Billy.config.whitelist = ["#{rack_app_url.host}:#{rack_app_url.port}"] + Billy.config.non_successful_cache_disabled = true + end it 'should not cache non-successful response when enabled' do - # The test server in spec/support/test_server.rb is hard-coded to return a 200 - # Need a way to simulate a non-successful response for this testyy - - # Using this method never creates a file - # proxy.stub("#{url}/foo").and_return(:text => 'GET /foo', :code => 500) - # http.get('/foo') - - # File.exists?(cached_file).should be_false + http_error.get('/foo') + File.exists?(cached_file).should be_false end it 'should cache successful response when enabled' do @@ -202,11 +200,14 @@ shared_examples_for 'a cache' do end context 'non_successful_error_level requests' do - before { Billy.config.non_successful_error_level = :error } + before do + rack_app_url = URI(http_error.url_prefix) + Billy.config.whitelist = ["#{rack_app_url.host}:#{rack_app_url.port}"] + Billy.config.non_successful_error_level = :error + end it 'should raise error for non-successful responses when :error' do - # Need a way to simulate a non-successful response for this testyy - # expect{http.get('/foo')}.to raise_error(PutErrorHere) + expect{http_error.get('/foo')}.to raise_error(Faraday::Error::ConnectionFailed) end end @@ -255,6 +256,10 @@ describe Billy::Proxy do :proxy => { :uri => proxy.url }, :keepalive => false, :timeout => 0.5 + @http_error = Faraday.new @error_url, + :proxy => { :uri => proxy.url }, + :keepalive => false, + :timeout => 0.5 end context 'proxying' do @@ -274,13 +279,13 @@ describe Billy::Proxy do context 'stubbing' do context 'HTTP' do - let!(:url) { @http_url } + let!(:url) { @http_url } let!(:http) { @http } it_should_behave_like 'a request stub' end context 'HTTPS' do - let!(:url) { @https_url } + let!(:url) { @https_url } let!(:http) { @https } it_should_behave_like 'a request stub' end @@ -294,20 +299,23 @@ describe Billy::Proxy do end context 'HTTP' do - let!(:url) { @http_url } - let!(:http) { @http } + let!(:url) { @http_url } + let!(:http) { @http } + let!(:http_error) { @http_error } it_should_behave_like 'a cache' end context 'HTTPS' do - let!(:url) { @https_url } - let!(:http) { @https } + let!(:url) { @https_url } + let!(:http) { @https } + let!(:http_error) { @http_error } it_should_behave_like 'a cache' end context 'with a cache scope' do - let!(:url) { @http_url } - let!(:http) { @http } + let!(:url) { @http_url } + let!(:http) { @http } + let!(:http_error) { @http_error } before do proxy.cache.scope_to "my_cache" diff --git a/spec/support/test_server.rb b/spec/support/test_server.rb index <HASH>..<HASH> 100644 --- a/spec/support/test_server.rb +++ b/spec/support/test_server.rb @@ -28,11 +28,16 @@ module Billy https_server = start_server(echo, true) q.push https_server.backend.get_port + + echo_error = echo_app_setup(500) + error_server = start_server(echo_error) + q.push error_server.backend.get_port end end @http_url = "http://localhost:#{q.pop}" @https_url = "https://localhost:#{q.pop}" + @error_url = "http://localhost:#{q.pop}" end def echo_app_setup(response_code = 200)
Add failing response server, fix tests, remove raise
oesmith_puffing-billy
train
2e518b1e1ad546425d97941cc621c7acf501ff22
diff --git a/app/Libs/Elements/Generate/InputGeneration.php b/app/Libs/Elements/Generate/InputGeneration.php index <HASH>..<HASH> 100644 --- a/app/Libs/Elements/Generate/InputGeneration.php +++ b/app/Libs/Elements/Generate/InputGeneration.php @@ -90,20 +90,17 @@ class InputGeneration if( ! is_null( $orig ) ) { $orig = rtrim( $orig, DIRECTORY_SEPARATOR ) . DIRECTORY_SEPARATOR; } - try { if( ! file_exists( $path ) ) { mkdir( $path, 0655 ); - if( ! is_null( $orig ) && $path !== $orig ) { - return $this->createFolder( $orig, null ); - } + return $this->createFolder( $orig, null ); } } catch( Exception $e ) { if( $e->getMessage() === 'mkdir(): No such file or directory' ) { if( ! preg_match( '/^' . preg_quote( app_path(), '/' ) . '/', $path ) ) { die( "This has gone too far!!" ); } - return $this->createFolder( dirname( $path ), $path ); + return $this->createFolder( dirname( $path ), is_null( $orig ) ? $path : $orig ); } } } @@ -116,20 +113,66 @@ class InputGeneration { $elementClassName = self::nameToClassName( $inputName, 'Element' ); if( $this->region->isLocal ) { - $fileName = $this->region->folder . "{$this->region->nameToClassFormat}" . DIRECTORY_SEPARATOR . "Elements" . DIRECTORY_SEPARATOR . "{$elementClassName}.php"; - $namespaceWithoutClassName = $this->region->namespaceWithoutClassName . "\\{$this->region->nameToClassFormat}\Elements"; - $namespaceWithClassName = $namespaceWithoutClassName . "\\{$elementClassName}"; - return (object) [ - 'inputName' => $inputName, - 'nameToClassFormat' => self::nameToClassName( $inputName, '' ), - 'namespaceWithoutClassName' => $namespaceWithoutClassName, - 'namespaceWithClassName' => $namespaceWithClassName, - 'className' => $elementClassName, - 'fileName' => $fileName, - 'folder' => dirname( $fileName ) . DIRECTORY_SEPARATOR, - 'isLocal' => self::isLocalNamespace( $namespaceWithClassName ), - ]; + return $this->createLocalTempElement( $inputName, $elementClassName ); } + return $this->createExternalTempElement( $inputName, $elementClassName ); + } + + /** + * @param $inputName + * @param $elementClassName + * @return object + */ + protected function createLocalTempElement( $inputName, $elementClassName ) + { + $fileName = $this->region->folder . "{$this->region->nameToClassFormat}" . DIRECTORY_SEPARATOR . "Elements" . DIRECTORY_SEPARATOR . "{$elementClassName}.php"; + $namespaceWithoutClassName = $this->region->namespaceWithoutClassName . "\\{$this->region->nameToClassFormat}\Elements"; + $namespaceWithClassName = $namespaceWithoutClassName . "\\{$elementClassName}"; + $nameToClassFormat = self::nameToClassName( $inputName, '' ); + $className = $elementClassName; + $folder = dirname( $fileName ) . DIRECTORY_SEPARATOR; + $isLocal = self::isLocalNamespace( $namespaceWithClassName ); + + return (object) compact( 'inputName', 'nameToClassFormat', 'namespaceWithoutClassName', 'namespaceWithClassName', 'className', 'fileName', 'folder', 'isLocal' ); + } + + + /** + * @param $inputName + * @param $elementClassName + * @return object + */ + protected function createExternalTempElement( $inputName, $elementClassName ) + { + $namespaceWithoutClassName = $this->getGlobalRegionPrefix( $this->region->namespaceWithClassName ) . '\Elements'; + $namespaceWithClassName = $namespaceWithoutClassName . "\\{$elementClassName}"; + $fileName = $this->localNamespaceToFileName( $namespaceWithClassName ); + $folder = dirname( $fileName ) . DIRECTORY_SEPARATOR; + $nameToClassFormat = self::nameToClassName( $inputName, '' ); + $isLocal = true; + $className = $elementClassName; + + return (object) compact( 'inputName', 'nameToClassFormat', 'namespaceWithoutClassName', 'namespaceWithClassName', 'className', 'fileName', 'folder', 'isLocal' ); + } + + /** + * @param $namespace + * @return string + */ + protected function getGlobalRegionPrefix( $namespace ) + { + $segments = explode( '\\', $namespace ); + $segments[0] = rtrim( self::appNamespace(), '\\' ); + return preg_replace( '/Region$/', '', implode( '\\', $segments ) ); + } + + /** + * @param $namespace + * @return string + */ + protected function localNamespaceToFileName( $namespace ) + { + return app_path( str_replace( '\\', DIRECTORY_SEPARATOR, preg_replace( '/^' . preg_quote( self::appNamespace(), '\\' ) . '/', '', $namespace ) ) ) . '.php'; } /**
update for creating elements for regions that are not local to the application
lukesnowden_laraview
train
eaf78ffc2db594e7863637d31124363589362851
diff --git a/go/chat/localizer.go b/go/chat/localizer.go index <HASH>..<HASH> 100644 --- a/go/chat/localizer.go +++ b/go/chat/localizer.go @@ -267,11 +267,12 @@ type localizerPipeline struct { offline bool - started bool - stopCh chan struct{} - cancelChs map[string]chan struct{} - suspendWg sync.WaitGroup - jobQueue chan *localizerPipelineJob + started bool + stopCh chan struct{} + cancelChs map[string]chan struct{} + suspendCount int + suspendWaiters []chan struct{} + jobQueue chan *localizerPipelineJob // testing useGateCh bool @@ -350,7 +351,7 @@ func (s *localizerPipeline) suspend(ctx context.Context) bool { if !s.started { return false } - s.suspendWg.Add(1) + s.suspendCount++ if len(s.cancelChs) == 0 { return false } @@ -382,10 +383,32 @@ func (s *localizerPipeline) resume(ctx context.Context) bool { defer s.Trace(ctx, func() error { return nil }, "resume")() s.Lock() defer s.Unlock() - s.suspendWg.Done() + if s.suspendCount == 0 { + s.Debug(ctx, "resume: spurious resume call without suspend") + return false + } + s.suspendCount-- + if s.suspendCount == 0 { + for _, cb := range s.suspendWaiters { + close(cb) + } + s.suspendWaiters = nil + } return false } +func (s *localizerPipeline) registerWaiter() chan struct{} { + s.Lock() + defer s.Unlock() + cb := make(chan struct{}) + if s.suspendCount == 0 { + close(cb) + return cb + } + s.suspendWaiters = append(s.suspendWaiters, cb) + return cb +} + func (s *localizerPipeline) localizeJobPulled(job *localizerPipelineJob, stopCh chan struct{}) { id, cancelCh := s.registerJobPull(job.ctx) defer s.finishJobPull(id) @@ -397,7 +420,7 @@ func (s *localizerPipeline) localizeJobPulled(job *localizerPipelineJob, stopCh } else { s.Debug(job.ctx, "localizeJobPulled: waiting for resume") go func() { - s.suspendWg.Wait() + <-s.registerWaiter() close(waitCh) }() }
use channels instead of waitgroup for localizer suspend/resume (#<I>)
keybase_client
train
fcd1b2b613d3ecb6f9c4c7ccdd7ac9e8f0e80df7
diff --git a/expfactory/cli/build.py b/expfactory/cli/build.py index <HASH>..<HASH> 100644 --- a/expfactory/cli/build.py +++ b/expfactory/cli/build.py @@ -71,7 +71,7 @@ def main(args,parser,subparser): app = "%s%s\n" %(app, commands) # The final installation step - app = "%sWORKDIR /scif/apps\n RUN expfactory install -f %s\n\n" %(app,config['github']) + app = "%sWORKDIR /scif/apps\n RUN expfactory install %s\n\n" %(app,config['github']) apps = "%s%s\n" %(apps,app) else: diff --git a/expfactory/cli/install.py b/expfactory/cli/install.py index <HASH>..<HASH> 100644 --- a/expfactory/cli/install.py +++ b/expfactory/cli/install.py @@ -80,7 +80,7 @@ def main(args,parser,subparser): in_container = True # Running, live container - elif os.environ.get('SINGULARITY_CONTAINER') is not None: + elif os.environ.get('EXPFACTORY_CONTAINER') is not None: in_container = True if in_container is True: @@ -106,9 +106,11 @@ def main(args,parser,subparser): with open(instruct,'w') as filey: filey.writelines(config['instructions']) - if os.path.exists(dest): + if not os.path.exists(dest): + os.system('mv %s %s' %(source, dest)) + else: if args.force is False: bot.error('%s is not empty! Use --force to delete and re-create.' %folder) sys.exit(1) - os.system('cp -R %s/* %s' %(source, dest)) + os.system('cp -R %s/* %s' %(source, dest))
modified: ../../../../cli/build.py modified: ../../../../cli/install.py
expfactory_expfactory
train
e37c448787f98643d0b862d6bc514c2678dab9df
diff --git a/src/de/lmu/ifi/dbs/elki/persistent/DefaultPageHeader.java b/src/de/lmu/ifi/dbs/elki/persistent/DefaultPageHeader.java index <HASH>..<HASH> 100644 --- a/src/de/lmu/ifi/dbs/elki/persistent/DefaultPageHeader.java +++ b/src/de/lmu/ifi/dbs/elki/persistent/DefaultPageHeader.java @@ -114,4 +114,13 @@ public class DefaultPageHeader implements PageHeader { public int getPageSize() { return pageSize; } + + /** + * Returns the number of pages necessary for the header + * + * @return the number of pages + */ + public int getReservedPages() { + return size()/getPageSize()+1; + } } diff --git a/src/de/lmu/ifi/dbs/elki/persistent/PageHeader.java b/src/de/lmu/ifi/dbs/elki/persistent/PageHeader.java index <HASH>..<HASH> 100644 --- a/src/de/lmu/ifi/dbs/elki/persistent/PageHeader.java +++ b/src/de/lmu/ifi/dbs/elki/persistent/PageHeader.java @@ -52,4 +52,11 @@ public interface PageHeader { * @return the size of a page in Bytes */ int getPageSize(); + + /** + * Returns the number of pages necessary for the header + * + * @return the number of pages + */ + public int getReservedPages(); } diff --git a/src/de/lmu/ifi/dbs/elki/persistent/PersistentPageFile.java b/src/de/lmu/ifi/dbs/elki/persistent/PersistentPageFile.java index <HASH>..<HASH> 100644 --- a/src/de/lmu/ifi/dbs/elki/persistent/PersistentPageFile.java +++ b/src/de/lmu/ifi/dbs/elki/persistent/PersistentPageFile.java @@ -8,6 +8,7 @@ import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.RandomAccessFile; +import de.lmu.ifi.dbs.elki.index.tree.spatial.rstarvariants.rstar.RStarTreeNode; import de.lmu.ifi.dbs.elki.logging.Logging; /** @@ -70,7 +71,7 @@ public class PersistentPageFile<P extends Page<P>> extends PageFile<P> { // reading empty nodes in Stack int i = 0; while(file.getFilePointer() + pageSize <= file.length()) { - int offset = header.size() + pageSize * i; + int offset = (header.getReservedPages() + i) * pageSize; byte[] buffer = new byte[pageSize]; file.seek(offset); file.read(buffer); @@ -126,7 +127,7 @@ public class PersistentPageFile<P extends Page<P>> extends PageFile<P> { // get from file and put to cache if(page == null) { readAccess++; - int offset = header.size() + pageSize * pageID; + int offset = (header.getReservedPages() + pageID) * pageSize; byte[] buffer = new byte[pageSize]; file.seek(offset); file.read(buffer); @@ -160,7 +161,7 @@ public class PersistentPageFile<P extends Page<P>> extends PageFile<P> { // delete from file writeAccess++; byte[] array = pageToByteArray(null); - int offset = header.size() + pageSize * pageID; + int offset = (header.getReservedPages() + pageID) * pageSize; file.seek(offset); file.write(array); } @@ -181,7 +182,7 @@ public class PersistentPageFile<P extends Page<P>> extends PageFile<P> { page.setDirty(false); writeAccess++; byte[] array = pageToByteArray(page); - int offset = header.size() + pageSize * page.getID(); + int offset = (header.getReservedPages() + page.getID()) * pageSize; file.seek(offset); file.write(array); } @@ -235,7 +236,9 @@ public class PersistentPageFile<P extends Page<P>> extends PageFile<P> { return null; } else if(type == FILLED_PAGE) { - return (P) ois.readObject(); + RStarTreeNode page = new RStarTreeNode(); + page.readExternal(ois); + return (P) page; } else { throw new IllegalArgumentException("Unknown type: " + type); @@ -276,7 +279,7 @@ public class PersistentPageFile<P extends Page<P>> extends PageFile<P> { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeInt(FILLED_PAGE); - oos.writeObject(page); + page.writeExternal(oos); oos.close(); baos.close(); byte[] array = baos.toByteArray();
Use full pages for header to achieve proper page offsets.
elki-project_elki
train
889127231ce91eeb1851a5707da38332412b0419
diff --git a/targz.go b/targz.go index <HASH>..<HASH> 100644 --- a/targz.go +++ b/targz.go @@ -248,7 +248,7 @@ func extract(filePath string, directory string) error { writer := bufio.NewWriter(file) - buffer := make([]byte, 1024) + buffer := make([]byte, 4096) for { n, err := tarReader.Read(buffer) if err != nil && err != io.EOF {
Change buffer size to <I>
walle_targz
train
3d3eed119fcc9a47cc31fa7c44085e19cf7c4eac
diff --git a/src/UnwrapWritableStream.php b/src/UnwrapWritableStream.php index <HASH>..<HASH> 100644 --- a/src/UnwrapWritableStream.php +++ b/src/UnwrapWritableStream.php @@ -105,7 +105,7 @@ class UnwrapWritableStream extends EventEmitter implements WritableStreamInterfa public function write($data) { if ($this->ending) { - return; + return false; } // forward to inner stream if possible diff --git a/tests/UnwrapWritableTest.php b/tests/UnwrapWritableTest.php index <HASH>..<HASH> 100644 --- a/tests/UnwrapWritableTest.php +++ b/tests/UnwrapWritableTest.php @@ -253,6 +253,55 @@ class UnwrapWritableTest extends TestCase $this->loop->run(); } + public function testWriteReturnsFalseWhenPromiseIsPending() + { + $promise = new \React\Promise\Promise(function () { }); + $stream = Stream\unwrapWritable($promise); + + $ret = $stream->write('nope'); + + $this->assertFalse($ret); + } + + public function testWriteReturnsTrueWhenUnwrappedStreamReturnsTrueForWrite() + { + $input = $this->getMockBuilder('React\Stream\WritableStreamInterface')->getMock(); + $input->expects($this->once())->method('isWritable')->willReturn(true); + $input->expects($this->once())->method('write')->willReturn(true); + + $promise = \React\Promise\resolve($input); + $stream = Stream\unwrapWritable($promise); + + $ret = $stream->write('hello'); + + $this->assertTrue($ret); + } + + public function testWriteReturnsFalseWhenUnwrappedStreamReturnsFalseForWrite() + { + $input = $this->getMockBuilder('React\Stream\WritableStreamInterface')->getMock(); + $input->expects($this->once())->method('isWritable')->willReturn(true); + $input->expects($this->once())->method('write')->willReturn(false); + + $promise = \React\Promise\resolve($input); + $stream = Stream\unwrapWritable($promise); + + $ret = $stream->write('nope'); + + $this->assertFalse($ret); + } + + public function testWriteAfterCloseReturnsFalse() + { + $promise = new \React\Promise\Promise(function () { }); + $stream = Stream\unwrapWritable($promise); + + $stream->close(); + $ret = $stream->write('nope'); + + $this->assertFalse($ret); + } + public function testEmitsErrorAndClosesWhenInputEmitsError() { $input = new ThroughStream(); @@ -267,6 +316,20 @@ class UnwrapWritableTest extends TestCase $this->assertFalse($stream->isWritable()); } + public function testEmitsDrainWhenPromiseResolvesWithStreamWhenForwardingData() + { + $input = $this->getMockBuilder('React\Stream\WritableStreamInterface')->getMock(); + $input->expects($this->once())->method('isWritable')->willReturn(true); + $input->expects($this->once())->method('write')->with('hello')->willReturn(true); + + $deferred = new Deferred(); + $stream = Stream\unwrapWritable($deferred->promise()); + $stream->write('hello'); + + $stream->on('drain', $this->expectCallableOnce()); + $deferred->resolve($input); + } + public function testDoesNotEmitDrainWhenStreamBufferExceededAfterForwardingData() { $input = $this->getMockBuilder('React\Stream\WritableStreamInterface')->getMock();
Writing to closed unwrapped stream should return false (backpressure)
reactphp_promise-stream
train
ecbb279e2dd4b582356adcc876505751b7c970f5
diff --git a/src/loki-indexed-adapter.js b/src/loki-indexed-adapter.js index <HASH>..<HASH> 100644 --- a/src/loki-indexed-adapter.js +++ b/src/loki-indexed-adapter.js @@ -349,7 +349,7 @@ var lokiIndexedAdapter = (function() { return function(e) { var lres = e.target.result; - if (typeof(lres) === 'undefined') { + if (lres === null || typeof(lres) === 'undefined') { lres = { id: 0, success: false
Fixed issue with Safari returning null instead of undeifined when a key in appKey is not found
techfort_LokiJS
train
d3650ba6c36836b5ba1067e16240cf67e8b0253b
diff --git a/lib/rules/no-extra-parens.js b/lib/rules/no-extra-parens.js index <HASH>..<HASH> 100644 --- a/lib/rules/no-extra-parens.js +++ b/lib/rules/no-extra-parens.js @@ -112,7 +112,8 @@ module.exports = function(context) { } function dryCallNew(node) { - if (isParenthesised(node.callee) && precedence(node.callee) >= precedence(node)) { + if (isParenthesised(node.callee) && precedence(node.callee) >= precedence(node) && + !(node.type === "CallExpression" && node.callee.type === "FunctionExpression")) { report(node.callee); } if (node.arguments.length === 1) { diff --git a/tests/lib/rules/no-extra-parens.js b/tests/lib/rules/no-extra-parens.js index <HASH>..<HASH> 100644 --- a/tests/lib/rules/no-extra-parens.js +++ b/tests/lib/rules/no-extra-parens.js @@ -107,7 +107,13 @@ eslintTester.addRuleTest("lib/rules/no-extra-parens", { "var foo = (function() { return bar(); }())", "var o = { foo: (function() { return bar(); }()) };", "o.foo = (function(){ return bar(); }());", - "(function(){ return bar(); }()), (function(){ return bar(); }())" + "(function(){ return bar(); }()), (function(){ return bar(); }())", + + // IIFE is allowed to have outer parens (#1004) + "var foo = (function() { return bar(); })()", + "var o = { foo: (function() { return bar(); })() };", + "o.foo = (function(){ return bar(); })();", + "(function(){ return bar(); })(), (function(){ return bar(); })()" ], invalid: [ invalid("(0)", "Literal"), @@ -148,6 +154,7 @@ eslintTester.addRuleTest("lib/rules/no-extra-parens", { invalid("(0)[a]", "Literal"), invalid("(0.0).a", "Literal"), invalid("(0xBEEF).a", "Literal"), - invalid("(1e6).a", "Literal") + invalid("(1e6).a", "Literal"), + invalid("new (function(){})", "FunctionExpression") ] });
Fix: add additional IIFE exception in no-extra-parens (fixes #<I>)
eslint_eslint
train
c9337d6860f881a80ba659fd76bfc1d31ba38b86
diff --git a/holidays.py b/holidays.py index <HASH>..<HASH> 100755 --- a/holidays.py +++ b/holidays.py @@ -2338,7 +2338,7 @@ class Germany(HolidayBase): def __init__(self, **kwargs): self.country = 'DE' - self.prov = kwargs.pop('prov', 'SH') + self.prov = kwargs.pop('prov', None) HolidayBase.__init__(self, **kwargs) def _populate(self, year):
Only return nationwide holidays for Germany when no province is passed explicitly (#<I>)
dr-prodigy_python-holidays
train
cd0ab1806a981afd34ec4239e33d0e2cad0d3f15
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,6 +10,7 @@ setup( 'djorm-ext-expressions>=0.5'], url='https://github.com/jneight/django-earthdistance', description='Add support for PostgreSQL earthdistance extension to Django', + long_description=open("README.rst").read(), packages=find_packages(), include_package_data=True, license='Apache 2.0',
Added README.rst to pypi page
jneight_django-earthdistance
train
932d599c981271846c5e659ca8b2b13729915337
diff --git a/internal/uidriver/glfw/ui.go b/internal/uidriver/glfw/ui.go index <HASH>..<HASH> 100644 --- a/internal/uidriver/glfw/ui.go +++ b/internal/uidriver/glfw/ui.go @@ -1103,7 +1103,7 @@ func currentMonitor(window *glfw.Window) *glfw.Monitor { // Getting a monitor from a window position is not reliable in general (e.g., when a window is put across // multiple monitors, or, before SetWindowPosition is called.). // Get the monitor which the current window belongs to. This requires OS API. - if m := currentMonitorByOS(); m != nil { + if m := currentMonitorByOS(window); m != nil { return m } diff --git a/internal/uidriver/glfw/ui_darwin.go b/internal/uidriver/glfw/ui_darwin.go index <HASH>..<HASH> 100644 --- a/internal/uidriver/glfw/ui_darwin.go +++ b/internal/uidriver/glfw/ui_darwin.go @@ -55,7 +55,7 @@ func (u *UserInterface) adjustWindowPosition(x, y int) (int, int) { return x, y } -func currentMonitorByOS() *glfw.Monitor { +func currentMonitorByOS(w *glfw.Window) *glfw.Monitor { x := C.int(0) y := C.int(0) // Note: [NSApp mainWindow] is nil when it doesn't have its border. Use w here. diff --git a/internal/uidriver/glfw/ui_unix.go b/internal/uidriver/glfw/ui_unix.go index <HASH>..<HASH> 100644 --- a/internal/uidriver/glfw/ui_unix.go +++ b/internal/uidriver/glfw/ui_unix.go @@ -32,7 +32,7 @@ func (u *UserInterface) adjustWindowPosition(x, y int) (int, int) { return x, y } -func currentMonitorByOS() *glfw.Monitor { +func currentMonitorByOS(_ *glfw.Window) *glfw.Monitor { // TODO: Implement this correctly. (#1119). return nil } diff --git a/internal/uidriver/glfw/ui_windows.go b/internal/uidriver/glfw/ui_windows.go index <HASH>..<HASH> 100644 --- a/internal/uidriver/glfw/ui_windows.go +++ b/internal/uidriver/glfw/ui_windows.go @@ -104,7 +104,7 @@ func (u *UserInterface) glfwScale() float64 { } func (u *UserInterface) adjustWindowPosition(x, y int) (int, int) { - mx, my := u.currentMonitor().GetPos() + mx, my := currentMonitor(u.window).GetPos() // As the video width/height might be wrong, // adjust x/y at least to enable to handle the window (#328) if x < mx { @@ -120,7 +120,7 @@ func (u *UserInterface) adjustWindowPosition(x, y int) (int, int) { return x, y } -func currentMonitorByOS() *glfw.Monitor { +func currentMonitorByOS(_ *glfw.Window) *glfw.Monitor { // TODO: Should we return nil here? w, err := getActiveWindow() if err != nil {
uidriver/glfw: Bug fix compilation failure on macOS and Windows
hajimehoshi_ebiten
train
7e98941de7702106bfbeee8cb27abc0564949028
diff --git a/lib/bench.js b/lib/bench.js index <HASH>..<HASH> 100644 --- a/lib/bench.js +++ b/lib/bench.js @@ -375,7 +375,9 @@ var Benchmark = INHERIT({ rs.forEach(function(i) { i.forEach(function(j) { - objs.push(j); + j.forEach(function(k) { + objs.push(k); + }); }); });
Flatten object, because there was added new `map()` level (`Q.all()`)
bem-archive_bem-tools
train
9793e81eec3c46b117b929b52b3df223516de014
diff --git a/src/main/java/com/semanticcms/news/view/NewsView.java b/src/main/java/com/semanticcms/news/view/NewsView.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/semanticcms/news/view/NewsView.java +++ b/src/main/java/com/semanticcms/news/view/NewsView.java @@ -79,7 +79,7 @@ public class NewsView extends View { HttpServletResponse response, Page page ) { - return "What's New" + TITLE_SEPARATOR + page.getPageRef().getBook().getTitle(); + return "What's New" + TITLE_SEPARATOR + page.getTitle(); } /**
Fixed news title, was based on book instead of page.
aoindustries_semanticcms-news-view
train
93e3e81f806efcf1d6460e3c2e972cfee96955ee
diff --git a/onecodex/models/analysis.py b/onecodex/models/analysis.py index <HASH>..<HASH> 100644 --- a/onecodex/models/analysis.py +++ b/onecodex/models/analysis.py @@ -91,6 +91,10 @@ class Classifications(Analyses): @classmethod def _transform_api_results(cls, results): + # For forward-compatibility with adding the rollups server-side + if all(x.get(Metric.AbundanceWChildren) is not None for x in results["table"]): + return results + results["table"] = cls._append_abundance_rollups(results["table"]) return results diff --git a/tests/test_classifications.py b/tests/test_classifications.py index <HASH>..<HASH> 100644 --- a/tests/test_classifications.py +++ b/tests/test_classifications.py @@ -1,3 +1,4 @@ +import mock import pytest pytest.importorskip("pandas") # noqa @@ -182,3 +183,18 @@ def test_abundance_rollups(sample_tree_old, sample_tree_new): assert table["1"]["abundance_w_children"] == 1.0 assert all_genera_abund == 1.0 assert all_species_abund == 1.0 + + +def test_abundance_forward_compatibility(sample_tree_old): + """Tests that if an `abundance_w_children` field exists in the API, do not run `_append_abundance_rollups`.""" + abundance_fxn = "onecodex.models.analysis.Classifications._append_abundance_rollups" + with mock.patch(abundance_fxn) as calc_rollups: + Classifications._transform_api_results({"table": sample_tree_old}) + assert calc_rollups.call_count == 1 + + # Add cumulative abundance to the first field, which is what we check for API forward compat + for item in sample_tree_old: + item["abundance_w_children"] = 0.0 + with mock.patch(abundance_fxn) as calc_rollups: + Classifications._transform_api_results({"table": sample_tree_old}) + assert calc_rollups.call_count == 0
Make client-side cumulative abundance calculations forward-compatible with field being added to API
onecodex_onecodex
train
8caba09993299fcff6169b877779c93215da8b88
diff --git a/modules/backend/lang/de/lang.php b/modules/backend/lang/de/lang.php index <HASH>..<HASH> 100644 --- a/modules/backend/lang/de/lang.php +++ b/modules/backend/lang/de/lang.php @@ -150,6 +150,7 @@ return [ 'saving' => 'Wird gespeichert...', 'delete' => 'Löschen', 'deleting' => 'Löschen...', + 'confirm_delete' => 'Wollen Sie diesen Eintrag wirklich löschen?', 'undefined_tab' => 'Divers', 'field_off' => 'Aus', 'field_on' => 'An', @@ -157,7 +158,7 @@ return [ 'cancel' => 'Abbrechen', 'close' => 'Schließen', 'ok' => 'OK', - 'or' => 'or', + 'or' => 'oder', 'confirm_tab_close' => 'Wollen Sie den Tab wirklich schließen? Ungespeicherte Änderungen gehen verloren.', 'behavior_not_ready' => 'Formularverhalten kann nicht initialisiert werden, überprüfen Sie den Aufruf von makeLists() in Ihrem Controller.', 'preview_no_files_message' => 'Keine Dateien wurden hochgeladen', @@ -170,6 +171,7 @@ return [ 'concurrency_file_changed_title' => 'Datei wurde geändert', 'concurrency_file_changed_description' => 'Die Datei, welche Sie bearbeiten, wurde auf von einem anderen Benutzer geändert. Sie können die Datei entweder erneut laden, wodurch Ihre Änderungen verloren gehen oder Sie überschreiben die Datei auf dem Server', 'reload' => 'Erneut laden', + 'return_to_list' => 'Zurück zur Liste' ], 'relation' => [ 'missing_definition' => "Verhalten (behaviour) der Verbindung umfasst keine Definition für ':field'.", diff --git a/modules/cms/lang/de/lang.php b/modules/cms/lang/de/lang.php index <HASH>..<HASH> 100644 --- a/modules/cms/lang/de/lang.php +++ b/modules/cms/lang/de/lang.php @@ -122,7 +122,7 @@ return [ 'new' => 'Neues Partial' ], 'content' => [ - 'not_found_name' => "Die Inhaltsdatei ':name' wurde nicht gefundne.", + 'not_found_name' => "Die Inhaltsdatei ':name' wurde nicht gefunden.", 'menu_label' => 'Inhalt', 'no_list_records' => 'Keine Inhaltsdateien gefunden', 'delete_confirm_multiple' => 'Wollen Sie die ausgewählten Inhalte und Verzeichnisse wirklich löschen?', @@ -169,7 +169,7 @@ return [ 'create_directory' => 'Verzeichnis erstellen', 'rename' => 'Umbenennen', 'delete' => 'Löschen', - 'move' => 'Bewegen', + 'move' => 'Verschieben', 'new' => 'Neue Datei', 'rename_popup_title' => 'Umbenennen', 'rename_new_name' => 'Neuer Name', @@ -188,13 +188,13 @@ return [ 'error_uploading_file' => 'Fehler beim Hochladen der Datei ":name": :error', 'move_please_select' => 'Bitte auswählen', 'move_destination' => 'Zielverzeichnis', - 'move_popup_title' => 'Assets bewegen', - 'move_button' => 'Bewegen', + 'move_popup_title' => 'Assets verschieben', + 'move_button' => 'Verschieben', 'selected_files_not_found' => 'Ausgewählte Dateien nicht gefunden', 'select_destination_dir' => 'Bitte wählen Sie ein Zielverzeichnis aus', 'destination_not_found' => 'Zielverzeichnis wurde nicht gefunden', - 'error_moving_file' => 'Fehler beim Bewegen der Datei :file', - 'error_moving_directory' => 'Fehler beim Bewegen des Verzeichnisses :dir', + 'error_moving_file' => 'Fehler beim Verschieben der Datei :file', + 'error_moving_directory' => 'Fehler beim Verschieben des Verzeichnisses :dir', 'error_deleting_directory' => 'Fehler beim Löschen des Originalverzeichnisses :dir', 'path' => 'Pfad' ], @@ -284,4 +284,4 @@ return [ 'image_size' => 'Dimensionen:', 'selected_size' => 'Ausgewählt:' ] -]; \ No newline at end of file +];
Fixed german translation in backend module (#<I>) * Fixed german translation in backend module * Added confirm_delete translation * Added return_to_list translation * Fixed typo * Use 'Verschieben' as german translation
octobercms_october
train
2dfaaa8201c664efb99e54bc7a0b3b3ea07028bd
diff --git a/src/lib/modules/ens/ENSFunctions.js b/src/lib/modules/ens/ENSFunctions.js index <HASH>..<HASH> 100644 --- a/src/lib/modules/ens/ENSFunctions.js +++ b/src/lib/modules/ens/ENSFunctions.js @@ -1,10 +1,10 @@ +import { ZERO_ADDRESS } from '../../utils/addressUtils'; /*global web3*/ const namehash = require('eth-ens-namehash'); // Price of ENS registration contract functions const ENS_GAS_PRICE = 700000; const reverseAddressSuffix = '.addr.reverse'; -const voidAddr = '0x0000000000000000000000000000000000000000'; const NoDecodeAddrErr = 'Error: Couldn\'t decode address from ABI: 0x'; const NoDecodeStringErr = 'ERROR: The returned value is not a convertible string: 0x0'; @@ -66,7 +66,7 @@ function lookupAddress(address, ens, utils, createResolverContract, callback) { if (err) { return cb(err); } - if (resolverAddress === voidAddr) { + if (resolverAddress === ZERO_ADDRESS) { return cb('Address not associated to a resolver'); } createResolverContract(resolverAddress, (_, resolverContract) => { @@ -90,7 +90,7 @@ function resolveName(name, ens, createResolverContract, callback) { if (err) { return cb(err); } - if (resolverAddress === voidAddr) { + if (resolverAddress === ZERO_ADDRESS) { return cb('Name not yet registered'); } createResolverContract(resolverAddress, (_, resolverContract) => { diff --git a/src/lib/modules/ens/index.js b/src/lib/modules/ens/index.js index <HASH>..<HASH> 100644 --- a/src/lib/modules/ens/index.js +++ b/src/lib/modules/ens/index.js @@ -5,6 +5,7 @@ const async = require('async'); const embarkJsUtils = require('embarkjs').Utils; const reverseAddrSuffix = '.addr.reverse'; const ENSFunctions = require('./ENSFunctions'); +import { ZERO_ADDRESS } from '../../utils/addressUtils'; const MAINNET_ID = '1'; const ROPSTEN_ID = '3'; @@ -177,7 +178,7 @@ class ENS { if (err) { return cb(err); } - if (resolverAddress === '0x0000000000000000000000000000000000000000') { + if (resolverAddress === ZERO_ADDRESS) { return cb('Name not yet registered'); } next(null, resolverAddress); @@ -455,7 +456,7 @@ class ENS { if (err) { return next(err); } - if(resolverAddress === '0x0000000000000000000000000000000000000000') { + if(resolverAddress === ZERO_ADDRESS) { return next('Name not yet registered'); } next(null, resolverAddress);
refactor(@embark/ens): use ZERO_ADDRESS constant
embark-framework_embark
train
f8afb1712a2292d722d50acf99fff627e54d430f
diff --git a/policy.go b/policy.go index <HASH>..<HASH> 100644 --- a/policy.go +++ b/policy.go @@ -69,6 +69,6 @@ func whitelistPolicy(allowed []string, def Policy) PolicyFunc { // WithPolicy adds given policy to a connection when passed as option to NewConn() func WithPolicy(p Policy) func(*Conn) { return func(c *Conn) { - c.proxyHeaderPolicy = p + c.ProxyHeaderPolicy = p } } diff --git a/protocol.go b/protocol.go index <HASH>..<HASH> 100644 --- a/protocol.go +++ b/protocol.go @@ -25,7 +25,7 @@ type Conn struct { conn net.Conn header *Header once sync.Once - proxyHeaderPolicy Policy + ProxyHeaderPolicy Policy Validate Validator readErr error } @@ -168,7 +168,7 @@ func (p *Conn) readHeader() error { // let's act as if there was no error when PROXY protocol is not present. if err == ErrNoProxyProtocol { // but not if it is required that the connection has one - if p.proxyHeaderPolicy == REQUIRE { + if p.ProxyHeaderPolicy == REQUIRE { return err } @@ -177,7 +177,7 @@ func (p *Conn) readHeader() error { // proxy protocol header was found if err == nil && header != nil { - switch p.proxyHeaderPolicy { + switch p.ProxyHeaderPolicy { case REJECT: // this connection is not allowed to send one return ErrSuperfluousProxyHeader
make ProxyHeaderPolicy public to allow setting it via user defined options
pires_go-proxyproto
train
86d8cbe8c813f7a101c744a1e856d23ac0900ab3
diff --git a/src/Tobiassjosten/Silex/ResponsibleServiceProvider.php b/src/Tobiassjosten/Silex/ResponsibleServiceProvider.php index <HASH>..<HASH> 100644 --- a/src/Tobiassjosten/Silex/ResponsibleServiceProvider.php +++ b/src/Tobiassjosten/Silex/ResponsibleServiceProvider.php @@ -22,13 +22,12 @@ class ResponsibleServiceProvider implements ServiceProviderInterface if (empty($app['serializer'])) { $app->register(new SerializerServiceProvider()); } - - $app['dispatcher']->addSubscriber( - new ResponsibleListener($app['serializer']) - ); } public function boot(Application $app) { + $app['dispatcher']->addSubscriber( + new ResponsibleListener($app['serializer']) + ); } }
Register the event subscriber in the boot method The method register must never use a service.
tobiassjosten_ResponsibleServiceProvider
train
e0a2406ac8810b99edbc5f0985cce9cf9ae3fb50
diff --git a/lib/representative/abstract_xml.rb b/lib/representative/abstract_xml.rb index <HASH>..<HASH> 100644 --- a/lib/representative/abstract_xml.rb +++ b/lib/representative/abstract_xml.rb @@ -4,6 +4,11 @@ module Representative class AbstractXml < Base + def initialize(subject = nil, options = {}) + super(subject, options) + @naming_strategy = options[:naming_strategy] || :dasherize + end + # Generate an element. # # With two arguments, it generates an element with the specified text content. @@ -64,7 +69,7 @@ module Representative end end - generate_element(name.to_s.dasherize, resolved_attributes, content_string, &content_block) + generate_element(format_name(name), resolved_attributes, content_string, &content_block) end @@ -114,12 +119,28 @@ module Representative end private + + attr_reader :naming_strategy + + def format_name(name) + name = name.to_s + case naming_strategy + when :camelcase + name.camelcase(:lower) + when :dasherize + name.dasherize + when Symbol + name.send(naming_strategy) + else + naming_strategy.to_proc.call(name) + end + end def resolve_attributes(attributes) if attributes attributes.inject({}) do |resolved, (name, value_generator)| resolved_value = resolve_value(value_generator) - resolved[name.to_s.dasherize] = resolved_value unless resolved_value.nil? + resolved[format_name(name)] = resolved_value unless resolved_value.nil? resolved end end diff --git a/lib/representative/base.rb b/lib/representative/base.rb index <HASH>..<HASH> 100644 --- a/lib/representative/base.rb +++ b/lib/representative/base.rb @@ -7,6 +7,7 @@ module Representative def initialize(subject = nil, options = {}) @subjects = [subject] @inspector = options[:inspector] || ObjectInspector.new + @naming_strategy = options[:naming_strategy] end # Return the current "subject" of representation. diff --git a/spec/representative/nokogiri_spec.rb b/spec/representative/nokogiri_spec.rb index <HASH>..<HASH> 100644 --- a/spec/representative/nokogiri_spec.rb +++ b/spec/representative/nokogiri_spec.rb @@ -6,8 +6,8 @@ require "representative/xml_behaviour" describe Representative::Nokogiri do - def r - @representative ||= Representative::Nokogiri.new(@subject) + def r(options = {}) + @representative ||= Representative::Nokogiri.new(@subject, options) end def resulting_xml diff --git a/spec/representative/xml_behaviour.rb b/spec/representative/xml_behaviour.rb index <HASH>..<HASH> 100644 --- a/spec/representative/xml_behaviour.rb +++ b/spec/representative/xml_behaviour.rb @@ -23,6 +23,16 @@ shared_examples_for "an XML Representative" do resulting_xml.should == %(<full-name>Fredrick</full-name>) end + context "with an explicit element-naming strategy" do + + it "applies the naming strategy to produce element names" do + @subject.full_name = "Fredrick" + r(:naming_strategy => :camelcase).element :full_name + resulting_xml.should == %(<fullName>Fredrick</fullName>) + end + + end + describe "with attributes" do it "generates attributes on the element" do diff --git a/spec/representative/xml_spec.rb b/spec/representative/xml_spec.rb index <HASH>..<HASH> 100644 --- a/spec/representative/xml_spec.rb +++ b/spec/representative/xml_spec.rb @@ -10,8 +10,8 @@ describe Representative::Xml do @xml = Builder::XmlMarkup.new end - def r - @representative ||= Representative::Xml.new(@xml, @subject) + def r(options = {}) + @representative ||= Representative::Xml.new(@xml, @subject, options) end def resulting_xml diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,7 +1,5 @@ require 'rspec' -require "rubygems" - def undent(raw) if raw =~ /\A( +)/ indent = $1
Add the ability to control generation of XML element/attribute names.
mdub_representative
train
ba2bb14e2d75817fb046ec3a64044afdc1dbe73b
diff --git a/tofu/geom/_plot_optics.py b/tofu/geom/_plot_optics.py index <HASH>..<HASH> 100644 --- a/tofu/geom/_plot_optics.py +++ b/tofu/geom/_plot_optics.py @@ -987,9 +987,9 @@ def CrystalBragg_plot_line_tracing_on_det( l0, = dax.plot(xi[l, :], xj[l, :], ls='-', lw=1., label=lab) if plot_err: dax.plot( - xi_err[l, ...], xj_err[l, ...], + xi_err[l, ...], xj_err[l, ...], 'o-', ls='None', lw=1., c='g',# c=l0.get_color(), - marker='x', ms=1, + ms=1, #marker='x' ) if dleg is not False: @@ -1032,6 +1032,11 @@ def CrystalBragg_plot_johannerror( else: err_lamb_units = 'm' err_phi_units = angunits + if err == 'rel2': + err_lamb = 100.*err_lamb / (np.mean(lamb)) + err_phi = 100.*err_phi / (np.mean(phi)) + err_lamb_units = '%' + err_phi_units = '%' if wintit is None: wintit = _WINTIT
[#<I>] Updated plot_line_tracing_on_det and plot_johannerror methods
ToFuProject_tofu
train
b5cd46e74d07431aa73b9232a4f47a91108d6826
diff --git a/tests/SchemaTest.php b/tests/SchemaTest.php index <HASH>..<HASH> 100644 --- a/tests/SchemaTest.php +++ b/tests/SchemaTest.php @@ -110,6 +110,23 @@ class SchemaTest extends TestCase $this->assertCount(2, $tester->getFormat()); } + public function testDropSpace() + { + $mapper = $this->createMapper(); + $this->clean($mapper); + + $tester = $mapper->getSchema()->createSpace('tester', [ + 'id' => 'unsigned', + 'name' => 'string', + ])->addIndex('id'); + + $this->assertTrue($mapper->getSchema()->hasSpace($tester->getName())); + + $mapper->getSchema()->dropSpace($tester->getName()); + + $this->assertFalse($mapper->getSchema()->hasSpace($tester->getName())); + } + public function testDuplicateProperty() { $mapper = $this->createMapper();
Added test case to drop space function
tarantool-php_mapper
train
acd04953736dad346511d96e0f443adab408539d
diff --git a/source/awesome_tool/mvc/controllers/gap/segment.py b/source/awesome_tool/mvc/controllers/gap/segment.py index <HASH>..<HASH> 100644 --- a/source/awesome_tool/mvc/controllers/gap/segment.py +++ b/source/awesome_tool/mvc/controllers/gap/segment.py @@ -9,6 +9,8 @@ from gaphas.aspect import ItemHandleFinder, ItemHandleSelection, ItemPaintFocuse from awesome_tool.mvc.views.gap.connection import ConnectionView, DataFlowView +from math import pow + @Segment.when_type(ConnectionView) class TransitionSegment(LineSegment): @@ -28,7 +30,7 @@ class TransitionSegment(LineSegment): continue xp = (h1.pos.x + h2.pos.x) / 2 yp = (h1.pos.y + h2.pos.y) / 2 - if distance_point_point_fast((x, y), (xp, yp)) <= 2. / item.hierarchy_level: + if distance_point_point_fast((x, y), (xp, yp)) <= 1. / pow(2, item.hierarchy_level): segment = handles.index(h1) handles, ports = self.split_segment(segment) return handles and handles[0] diff --git a/source/awesome_tool/mvc/views/gap/state.py b/source/awesome_tool/mvc/views/gap/state.py index <HASH>..<HASH> 100644 --- a/source/awesome_tool/mvc/views/gap/state.py +++ b/source/awesome_tool/mvc/views/gap/state.py @@ -310,8 +310,8 @@ class StateView(Element): def set_font_description(): layout.set_markup('<span font_desc="%s %s">&#x%s;</span>' % (font_name, - font_size, - symbol)) + font_size, + symbol)) set_font_description() @@ -549,7 +549,8 @@ class StateView(Element): port_limitation_counter = 0 while not position_found and port_limitation_counter < 100: - position_found, new_pos_x, new_pos_y, side = self._check_pos(all_ports, port_size, side, new_port, new_pos, logic, in_port) + position_found, new_pos_x, new_pos_y, side = self._check_pos(all_ports, port_size, side, new_port, new_pos, + logic, in_port) new_pos = (new_pos_x, new_pos_y) port_limitation_counter += 1
Fix bug that caused waypoints to be added instead of moving one
DLR-RM_RAFCON
train
a90fc673faa48d97a086a75d65957dd52e081230
diff --git a/Model/JWTRequest.php b/Model/JWTRequest.php index <HASH>..<HASH> 100644 --- a/Model/JWTRequest.php +++ b/Model/JWTRequest.php @@ -51,6 +51,19 @@ class JWTRequest return $response->json(); } + + public function post($restUrl, $json) + { + $url = $this->buildURL($restUrl); + $options = ['headers' => $this->buildAuthHeader('POST', $restUrl)]; + $options['headers']['Content-Type'] = 'application/json'; + + $options['json'] = $json; + + $response = $this->client->post($url, $options); + + return $response->json(); + } public function get($restUrl) {
Update JWTRequest.php added post method
thecatontheflat_atlassian-connect-bundle
train
9177a6bca8ca598a79166691a33e2359449d932c
diff --git a/package-command.php b/package-command.php index <HASH>..<HASH> 100644 --- a/package-command.php +++ b/package-command.php @@ -5,7 +5,7 @@ if ( ! class_exists( 'WP_CLI' ) ) { } $autoload = dirname( __FILE__ ) . '/vendor/autoload.php'; -if ( file_exists( $autoload ) ) { +if ( file_exists( $autoload ) && ! class_exists( 'Package_Command' ) ) { require_once $autoload; } WP_CLI::add_command( 'package', 'Package_Command' );
If `Package_Command` already exists, don't need the autoloader
wp-cli_package-command
train
e7e88cbb5766eb5a50e6873f8bd6511580f29e27
diff --git a/libraries/lithium/console/command/create/Controller.php b/libraries/lithium/console/command/create/Controller.php index <HASH>..<HASH> 100644 --- a/libraries/lithium/console/command/create/Controller.php +++ b/libraries/lithium/console/command/create/Controller.php @@ -67,7 +67,7 @@ class Controller extends \lithium\console\command\Create { * @return string */ protected function _model($request) { - return Inflector::pluralize($request->action); + return Inflector::camelize(Inflector::pluralize($request->action)); } /** diff --git a/libraries/lithium/console/command/create/Model.php b/libraries/lithium/console/command/create/Model.php index <HASH>..<HASH> 100644 --- a/libraries/lithium/console/command/create/Model.php +++ b/libraries/lithium/console/command/create/Model.php @@ -26,7 +26,7 @@ class Model extends \lithium\console\command\Create { * @return string */ protected function _class($request) { - return Inflector::pluralize($request->action); + return Inflector::camelize(Inflector::pluralize($request->action)); } } diff --git a/libraries/lithium/tests/cases/console/command/create/ControllerTest.php b/libraries/lithium/tests/cases/console/command/create/ControllerTest.php index <HASH>..<HASH> 100644 --- a/libraries/lithium/tests/cases/console/command/create/ControllerTest.php +++ b/libraries/lithium/tests/cases/console/command/create/ControllerTest.php @@ -127,7 +127,7 @@ class PostsController extends \lithium\action\Controller { $msg = "Posts::delete can only be called with http:post or http:delete."; throw new DispatchException($msg); } - Post::find($this->request->id)->delete(); + Posts::find($this->request->id)->delete(); $this->redirect('Posts::index'); } }
fixing test case and uppercasing li3 create statements so that the correct models and controllers are created - even when called like _li3 create post_. fixes #<I>
UnionOfRAD_framework
train
63cc6bf34efabbff0cf40d8bfe44989a42dc0b0b
diff --git a/src/Aura/Di/ForgeInterface.php b/src/Aura/Di/ForgeInterface.php index <HASH>..<HASH> 100644 --- a/src/Aura/Di/ForgeInterface.php +++ b/src/Aura/Di/ForgeInterface.php @@ -1,50 +1,50 @@ <?php /** - * + * * This file is part of the Aura Project for PHP. - * + * * @package Aura.Di - * + * * @license http://opensource.org/licenses/bsd-license.php BSD - * + * */ namespace Aura\Di; /** - * + * * Defines the interface for Forge dependencies. - * + * * @package Aura.Di - * + * */ interface ForgeInterface { /** - * + * * Gets the injected Config object. - * + * * @return ConfigInterface - * + * */ public function getConfig(); /** - * - * Creates and returns a new instance of a class using reflection and - * the configuration parameters, optionally with overriding params. - * + * + * Creates and returns a new instance of a class using + * the configuration parameters, optionally with overriding params and setters. + * * @param string $class The class to instantiate. - * + * * @param array $params An associative array of override parameters where * the key is the name of the constructor parameter and the value is the * parameter value to use. - * + * * @param array $setters An associative array of override setters where * the key is the name of the setter method to call and the value is the * value to be passed to the setter method. - * + * * @return object - * + * */ public function newInstance($class, array $params = [], array $setters = []); }
ForgeInterface doc block corrected. Mentioning reflection removed.
auraphp_Aura.Di
train
0eaf27a584addf4e99c5ccd6c7968824e0062b54
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -175,12 +175,20 @@ Type: Object A hash of options that are passed to watchify during instantiation. [Watchify Github README](https://github.com/substack/watchify#var-w--watchifyb-opts) +#### configure +Type: `Function (b)` + +An optional callback function that is invoked once before the bundle runs. This can be used for programatically configuring browserify using it's API. +`b` is the `browserify` instance for the bundle. + #### preBundleCB Type: `Function (b)` An optional callback function, that will be called before bundle completion. `b` is the `browerify` instance that will output the bundle. +__NB:__ This callback will be invoked every time the bundle is built so when used with the `watch` option set to true it will be called multiple times. Do not register transforms in this callback or they will end up being registered multiple times. + #### postBundleCB Type: `Function (err, src, next)` @@ -189,6 +197,8 @@ before writing of the bundle. The `err` and `src` arguments are provided directly from browserify. The `next` callback should be called with `(err, modifiedSrc)`; the `modifiedSrc` is what will be written to the output file. +__NB:__ This callback will be invoked every time the bundle is built so when used with the `watch` option set to true it will be called multiple times. + ## Contributing In lieu of a formal styleguide, take care to maintain the existing coding style. Add unit tests for any new or changed functionality. Lint and test your code. diff --git a/lib/runner.js b/lib/runner.js index <HASH>..<HASH> 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -123,6 +123,10 @@ GruntBrowserifyRunner.prototype = _.create(GruntBrowserifyRunner.prototype, { }); } + if (options.configure) { + options.configure(b); + } + doBundle(b, options, bundleComplete); }, diff --git a/test/browserify.test.js b/test/browserify.test.js index <HASH>..<HASH> 100644 --- a/test/browserify.test.js +++ b/test/browserify.test.js @@ -264,6 +264,19 @@ describe('grunt-browserify-runner', function () { }); }); + describe('when passing option of configure', function () { + it('calls the provided callback before bundling', function (done) { + var cb = Sinon.stub(); + var b = stubBrowserify('bundle'); + var runner = createRunner(b); + runner.run([], dest, {configure: cb}, function () { + assert.ok(cb.calledOnce); + assert.ok(cb.calledBefore(b().bundle)); + done(); + }); + }); + }); + describe('when passing option of preBundleCB', function () { it('calls the provided callback before bundling', function (done) { var cb = Sinon.stub();
Add configure option * Add configure option to runner * Update docs
jmreidy_grunt-browserify
train
6adaecc6cec58e53487cf85f26ac72859f45d201
diff --git a/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/main/java/org/xwiki/blame/internal/DefaultAnnotatedContent.java b/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/main/java/org/xwiki/blame/internal/DefaultAnnotatedContent.java index <HASH>..<HASH> 100644 --- a/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/main/java/org/xwiki/blame/internal/DefaultAnnotatedContent.java +++ b/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/main/java/org/xwiki/blame/internal/DefaultAnnotatedContent.java @@ -23,6 +23,7 @@ package org.xwiki.blame.internal; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.NoSuchElementException; import org.xwiki.blame.AnnotatedContent; import org.xwiki.blame.AnnotatedElement; @@ -75,6 +76,10 @@ public class DefaultAnnotatedContent<R, E> implements AnnotatedContent<R, E> @Override public AnnotatedElement<R, E> next() { + if (!hasNext()) { + throw new NoSuchElementException("No more annotated content"); + } + index += 1; return new DefaultAnnotatedElement<R, E>(sourceRevisions.get(index), initialContent.get(index)); } diff --git a/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/test/java/org/xwiki/blame/internal/DefaultBlameManagerTest.java b/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/test/java/org/xwiki/blame/internal/DefaultBlameManagerTest.java index <HASH>..<HASH> 100644 --- a/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/test/java/org/xwiki/blame/internal/DefaultBlameManagerTest.java +++ b/xwiki-commons-core/xwiki-commons-blame/xwiki-commons-blame-api/src/test/java/org/xwiki/blame/internal/DefaultBlameManagerTest.java @@ -23,6 +23,7 @@ package org.xwiki.blame.internal; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; +import java.util.NoSuchElementException; import org.junit.jupiter.api.Test; import org.xwiki.blame.AnnotatedContent; @@ -34,6 +35,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; @ComponentTest public class DefaultBlameManagerTest @@ -133,5 +136,12 @@ public class DefaultBlameManagerTest annotatedElement = iter.next(); assertThat(annotatedElement.getElement(), is("the lazy dog")); assertThat(annotatedElement.getRevision(), sameInstance(rev3)); + + assertThat(iter.hasNext(), is(false)); + Throwable exception = assertThrows(NoSuchElementException.class, () -> { + iter.next(); + }); + assertEquals("No more annotated content", exception.getMessage()); + } }
[Misc] Fix small bug reported by SonarQube
xwiki_xwiki-commons
train
b3e6b1d7c894216c4b26d1a9bbdd3dece2836c63
diff --git a/test/test-script-only.js b/test/test-script-only.js index <HASH>..<HASH> 100644 --- a/test/test-script-only.js +++ b/test/test-script-only.js @@ -7,7 +7,7 @@ require('./build-example')(['--scripts'], function(er) { var info = require(path.resolve('package.json')); assert.equal(info.name, 'loopback-example-app'); - var gitLsOutput = shell.exec('git ls-tree -r --name-only deploy').output; + var gitLsOutput = shell.exec('git ls-tree -r --long deploy').output; var paths = gitLsOutput.split('\n'); var bundled = paths.filter(function(path) { return path.match(/node_modules/);
test: improve debugability of test-script-only I added this in an effort to track down why it was failing, despite a manual incocation afterwards clearly showing the syslog paths that the test is failing to find. For some reason, however, I can't reproduce the error after adding this.. so.. ¯\_(ツ)_/¯
strongloop_strong-build
train
406fc80b16f1e56d30be921002cfef14c4a6b1b0
diff --git a/upload/catalog/controller/account/address.php b/upload/catalog/controller/account/address.php index <HASH>..<HASH> 100644 --- a/upload/catalog/controller/account/address.php +++ b/upload/catalog/controller/account/address.php @@ -519,7 +519,7 @@ class ControllerAccountAddress extends Controller { foreach ($custom_fields as $custom_field) { if (($custom_field['location'] == 'address') && $custom_field['required'] && empty($this->request->post['custom_field'][$custom_field['custom_field_id']])) { $this->error['custom_field'][$custom_field['custom_field_id']] = sprintf($this->language->get('error_custom_field'), $custom_field['name']); - } elseif (($custom_field['type'] == 'text' && !empty($custom_field['validation'] && $custom_field['location'] == 'address')) && !filter_var($this->request->post['custom_field'][$custom_field['custom_field_id']], FILTER_VALIDATE_REGEXP, array('options' => array('regexp' => $custom_field['validation'])))) { + } elseif (($custom_field['type'] == 'text' && !empty($custom_field['validation']) && $custom_field['location'] == 'address') && !filter_var($this->request->post['custom_field'][$custom_field['custom_field_id']], FILTER_VALIDATE_REGEXP, array('options' => array('regexp' => $custom_field['validation'])))) { $this->error['custom_field'][$custom_field['custom_field_id']] = sprintf($this->language->get('error_custom_field_validate'), $custom_field['name']); } }
Update address.php Fixed syntax error.
opencart_opencart
train
22d10a73296fcdb6c91d6255029364661ff178ae
diff --git a/tweepy/api.py b/tweepy/api.py index <HASH>..<HASH> 100644 --- a/tweepy/api.py +++ b/tweepy/api.py @@ -77,6 +77,14 @@ class API(object): allowed_param = ['id', 'count', 'page'], require_auth = True ) + + """/related_results/show/:id.format""" + related_results = bind_api( + path = '/related_results/show/{id}.json', + payload_type = 'relation', payload_list = True, + allowed_param = ['id'], + require_auth = False + ) """/statuses/:id/retweeted_by/ids.format""" retweeted_by_ids = bind_api( diff --git a/tweepy/models.py b/tweepy/models.py index <HASH>..<HASH> 100644 --- a/tweepy/models.py +++ b/tweepy/models.py @@ -276,6 +276,19 @@ class List(Model): def is_subscribed(self, id): return self._api.is_subscribed_list(self.user.screen_name, self.slug, id) +class Relation(Model): + @classmethod + def parse(cls, api, json): + result = cls(api) + for k,v in json.items(): + if k == 'value' and json['kind'] in ['Tweet', 'LookedupStatus']: + setattr(result, k, Status.parse(api, v)) + elif k == 'results': + setattr(result, k, Relation.parse_list(api, v)) + else: + setattr(result, k, v) + return result + class JSONModel(Model): @@ -308,6 +321,7 @@ class ModelFactory(object): saved_search = SavedSearch search_result = SearchResult list = List + relation = Relation json = JSONModel ids = IDModel
Add support for related_result
tweepy_tweepy
train
56bafe2a112a6cae8a4509f77b3c90307ef00d98
diff --git a/packages/victory-pie/src/helper-methods.js b/packages/victory-pie/src/helper-methods.js index <HASH>..<HASH> 100644 --- a/packages/victory-pie/src/helper-methods.js +++ b/packages/victory-pie/src/helper-methods.js @@ -137,11 +137,15 @@ const getVerticalAnchor = (orientation) => { const getLabelProps = (text, dataProps, calculatedValues) => { const { index, datum, data, slice } = dataProps; - const { style, defaultRadius, origin, width, height, labelPosition } = calculatedValues; + const { style, defaultRadius, origin, width, height } = calculatedValues; const labelRadius = Helpers.evaluateProp( calculatedValues.labelRadius, assign({ text }, dataProps) ); + const labelPosition = Helpers.evaluateProp( + calculatedValues.labelPosition, + assign({ text }, dataProps) + ); const labelStyle = assign({ padding: 0 }, style.labels); const evaluatedStyle = Helpers.evaluateStyle( labelStyle, diff --git a/packages/victory-pie/src/victory-pie.js b/packages/victory-pie/src/victory-pie.js index <HASH>..<HASH> 100644 --- a/packages/victory-pie/src/victory-pie.js +++ b/packages/victory-pie/src/victory-pie.js @@ -123,7 +123,10 @@ class VictoryPie extends React.Component { height: CustomPropTypes.nonNegative, innerRadius: PropTypes.oneOfType([CustomPropTypes.nonNegative, PropTypes.func]), labelComponent: PropTypes.element, - labelPosition: PropTypes.oneOf(["startAngle", "centroid", "endAngle"]), + labelPosition: PropTypes.oneOfType([ + PropTypes.func, + PropTypes.oneOf(["startAngle", "centroid", "endAngle"]) + ]), labelRadius: PropTypes.oneOfType([CustomPropTypes.nonNegative, PropTypes.func]), labels: PropTypes.oneOfType([PropTypes.func, PropTypes.array]), name: PropTypes.string, diff --git a/stories/victory-pie.js b/stories/victory-pie.js index <HASH>..<HASH> 100644 --- a/stories/victory-pie.js +++ b/stories/victory-pie.js @@ -45,9 +45,18 @@ storiesOf("VictoryPie", module) )) .add("with a radius prop", () => <VictoryPie radius={100} />) .add("with an origin prop", () => <VictoryPie radius={100} origin={{ x: 150, y: 150 }} />) - .add("with a label position different than centroid", () => ( + .add("with a labelPosition different than centroid", () => ( <VictoryPie labelPosition={"startAngle"} /> )) + .add("with a labelPosition as a function", () => ( + <VictoryPie + startAngle={-90} + endAngle={90} + innerRadius={90} + data={[{ x: "Cat", y: 62 }, { x: "Bird", y: 25 }]} + labelPosition={({ index }) => index === 0 ? "endAngle" : "startAngle"} + /> + )) .add("with custom data and colors", () => ( <VictoryPie style={{
allow labelPosition as a function in VictoryPie
FormidableLabs_victory
train
22ba3f08095ee6d7a382dfe912f0a867b7f52e08
diff --git a/lib/navigationlib.php b/lib/navigationlib.php index <HASH>..<HASH> 100644 --- a/lib/navigationlib.php +++ b/lib/navigationlib.php @@ -1431,6 +1431,7 @@ class global_navigation extends navigation_node { $function($this); } else if (function_exists($oldfunction)) { // We continue to support the old function name to ensure backwards compatability + debugging("Deprecated local plugin navigation callback: Please rename '{$oldfunction}' to '{$function}'. Support for the old callback will be dropped after the release of 2.4", DEBUG_DEVELOPER); $oldfunction($this); } }
MDL-<I> navigation: Added debug noticed to old (deprecated) local plugin callback
moodle_moodle
train
45d72d80b2dd1daf22ad47b6105a26c788d65881
diff --git a/runcommands/run.py b/runcommands/run.py index <HASH>..<HASH> 100644 --- a/runcommands/run.py +++ b/runcommands/run.py @@ -65,7 +65,7 @@ def run(_, echo = True all_command_run_args = {} - config_parser, file_name = make_run_args_config_parser() + config_parser = make_run_args_config_parser() for section in config_parser: match = re.search(r'^runcommands:(?P<name>.+)$', section) @@ -117,7 +117,7 @@ def run(_, def read_run_args(section, parser=None): """Read run args from file and environment.""" if parser is None: - parser, file_name = make_run_args_config_parser() + parser = make_run_args_config_parser() if isinstance(section, Command): name = section.name @@ -194,7 +194,7 @@ def read_run_args(section, parser=None): if remaining: raise RunCommandsError( 'Unknown args read from {file_name}: {remaining}' - .format(file_name=file_name, remaining=' '.join(remaining))) + .format(file_name=parser.file_name, remaining=' '.join(remaining))) args = vars(args) else: args = {} @@ -227,7 +227,8 @@ def make_run_args_config_parser(): else: file_name = None - return config_parser, file_name + config_parser.file_name = file_name + return config_parser def partition_argv(argv=None):
Attach file name to config parser In run.make_run_args_config_parser() This is used only when an existing config parser is passed in and there's an error reading run args from the file.
wylee_runcommands
train
fcd0c6b0a3848f123c23de0091f3c56103b7012d
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,11 +12,11 @@ __version__ = "1.2.6" packages = find_packages(exclude=("GPyOpt.testing",)) setup(name = 'GPyOpt', version = __version__, - author = read('AUTHORS.txt'), + author = read('AUTHORS.txt').replace('\n', ', ').replace('-', ''), author_email = "[email protected]", description = "The Bayesian Optimization Toolbox", - long_description=read('README.md'), - long_description_content_type='text/markdown', + long_description = read('README.md'), + long_description_content_type = 'text/markdown', license = "BSD 3-clause", keywords = "machine-learning gaussian-processes kernels optimization", url = "http://sheffieldml.github.io/GPyOpt/",
Fix the author names in setup.py
SheffieldML_GPyOpt
train
e03a3caf86144246347de5df8ee2087021aa3862
diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/progress.js b/bundles/org.eclipse.orion.client.ui/web/orion/progress.js index <HASH>..<HASH> 100644 --- a/bundles/org.eclipse.orion.client.ui/web/orion/progress.js +++ b/bundles/org.eclipse.orion.client.ui/web/orion/progress.js @@ -26,7 +26,11 @@ function(messages, lib, mOperationsDialog) { }); this._progressPane.addEventListener("click", function(evt) { //$NON-NLS-0$ - that._operationsDialog.show(); + if (that._operationsDialog.isShowing()) { + that._operationsDialog.hide(); + } else { + that._operationsDialog.show(); + } }); this._operationsDialog.setOperations(null, null); // initialize diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/webui/popupdialog.js b/bundles/org.eclipse.orion.client.ui/web/orion/webui/popupdialog.js index <HASH>..<HASH> 100644 --- a/bundles/org.eclipse.orion.client.ui/web/orion/webui/popupdialog.js +++ b/bundles/org.eclipse.orion.client.ui/web/orion/webui/popupdialog.js @@ -119,6 +119,13 @@ define(['i18n!orion/widgets/nls/messages', 'orion/webui/littlelib', 'orion/webui */ show: function() { this._tooltip.show(); + }, + + /** + * @return True if this dialog is visible, false otherwise + */ + isShowing: function() { + return this._tooltip.isShowing(); //$NON-NLS-0$ } }; diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/webui/tooltip.js b/bundles/org.eclipse.orion.client.ui/web/orion/webui/tooltip.js index <HASH>..<HASH> 100644 --- a/bundles/org.eclipse.orion.client.ui/web/orion/webui/tooltip.js +++ b/bundles/org.eclipse.orion.client.ui/web/orion/webui/tooltip.js @@ -239,10 +239,17 @@ define(['orion/webui/littlelib'], function(lib) { }, /** + * @return True if this tooltip is visible, false otherwise + */ + isShowing: function() { + return this._tip && this._tip.classList.contains("tooltipShowing"); //$NON-NLS-0$ + }, + + /** * Show the tooltip. */ show: function() { - if (this._tip && this._tip.classList.contains("tooltipShowing")) { //$NON-NLS-0$ + if (this.isShowing()) { //$NON-NLS-0$ return; } if (this._timeout) { @@ -280,7 +287,7 @@ define(['orion/webui/littlelib'], function(lib) { window.clearTimeout(this._timeout); this._timeout = null; } - if (!this._tip || !this._tip.classList.contains("tooltipShowing")) { //$NON-NLS-0$ + if (!this.isShowing()) { //$NON-NLS-0$ return; } if (hideDelay === undefined) {
Bug <I> - Clicking on the progress stopwatch should toggle the visibility of the operations tooltip rather than just displaying it
eclipse_orion.client
train
63d0b16e9f30249bf88c053c41161c0265803107
diff --git a/lib/protobuf/rpc/service_directory.rb b/lib/protobuf/rpc/service_directory.rb index <HASH>..<HASH> 100644 --- a/lib/protobuf/rpc/service_directory.rb +++ b/lib/protobuf/rpc/service_directory.rb @@ -12,7 +12,7 @@ module Protobuf include ::Singleton include ::Protobuf::Logger::LogMethods - DEFAULT_ADDRESS = "255.255.255.255" + DEFAULT_ADDRESS = "0.0.0.0" DEFAULT_PORT = 53000 DEFAULT_TIMEOUT = 1
Fix default service directory address Using <I> does not work on osx.
ruby-protobuf_protobuf
train
242e4500444cce6530ad8ab2a3ab3e8520bc1c06
diff --git a/ethereum.gemspec b/ethereum.gemspec index <HASH>..<HASH> 100644 --- a/ethereum.gemspec +++ b/ethereum.gemspec @@ -30,5 +30,5 @@ Gem::Specification.new do |spec| spec.add_development_dependency "rspec" spec.add_development_dependency "pry" spec.add_dependency "activesupport" - spec.add_dependency "sha3-pure-ruby" + spec.add_dependency "sha3-pure-ruby", "0.1.1" end diff --git a/lib/ethereum/http_client.rb b/lib/ethereum/http_client.rb index <HASH>..<HASH> 100644 --- a/lib/ethereum/http_client.rb +++ b/lib/ethereum/http_client.rb @@ -1,13 +1,17 @@ require 'net/http' module Ethereum class HttpClient < Client - attr_accessor :command, :id, :host, :port, :batch, :converted_transactions, :uri, :ssl + attr_accessor :command, :id, :host, :port, :batch, :converted_transactions, :uri, :ssl, :logger, :log - def initialize(host, port, ssl = false) + def initialize(host, port, ssl = false, log = true) @host = host @port = port @id = 1 @ssl = ssl + @log = log + if @log == true + @logger = Logger.new("/tmp/ethereum_ruby_http.log") + end if ssl @uri = URI("https://#{@host}:#{@port}") else @@ -27,6 +31,9 @@ module Ethereum end header = {'Content-Type' => 'application/json'} request = ::Net::HTTP::Post.new(uri, header) + if @log == true + @logger.info("Sending #{payload.to_json}") + end request.body = payload.to_json response = http.request(request) return JSON.parse(response.body) diff --git a/lib/ethereum/ipc_client.rb b/lib/ethereum/ipc_client.rb index <HASH>..<HASH> 100644 --- a/lib/ethereum/ipc_client.rb +++ b/lib/ethereum/ipc_client.rb @@ -1,12 +1,16 @@ require 'socket' module Ethereum class IpcClient < Client - attr_accessor :command, :id, :ipcpath, :batch, :converted_transactions + attr_accessor :command, :id, :ipcpath, :batch, :converted_transactions, :log, :logger - def initialize(ipcpath = "#{ENV['HOME']}/.ethereum/geth.ipc") + def initialize(ipcpath = "#{ENV['HOME']}/.ethereum/geth.ipc", log = true) @ipcpath = ipcpath @id = 1 @batch = [] + @log = log + if @log == true + @logger = Logger.new("/tmp/ethereum_ruby_ipc.log") + end end RPC_COMMANDS.each do |rpc_command| @@ -16,6 +20,9 @@ module Ethereum payload = {jsonrpc: "2.0", method: command, params: args, id: get_id} socket = UNIXSocket.new(@ipcpath) socket.write(payload.to_json) + if @log == true + @logger.info("Sending #{payload.to_json}") + end socket.close_write read = socket.read socket.close_read diff --git a/lib/ethereum/version.rb b/lib/ethereum/version.rb index <HASH>..<HASH> 100644 --- a/lib/ethereum/version.rb +++ b/lib/ethereum/version.rb @@ -1,3 +1,3 @@ module Ethereum - VERSION = "0.4.99" + VERSION = "0.5.0" end
add logging and revert sha3-pure-ruby library
EthWorks_ethereum.rb
train
88006e89c4d948bb047a43a7097b54cb5b9e2178
diff --git a/src/Field/Select.php b/src/Field/Select.php index <HASH>..<HASH> 100644 --- a/src/Field/Select.php +++ b/src/Field/Select.php @@ -218,7 +218,7 @@ class Select extends \FormHandler\Field\Field . (isset($this->tab_index) ? ' tabindex="' . $this->tab_index . '" ' : '') . (isset($this->extra) ? ' ' . $this->extra : '' ) . ($this->getDisabled() && !$this->getDisabledInExtra() ? 'disabled="disabled" ' : '') - . 'data-option-count="'.count($sOptions).'" ', + . 'data-option-count="'.count($options).'" ', $sOptions, (isset($this->extra_after) ? $this->extra_after : '') );
- Add option count to select field
FormHandler_FormHandler
train
0694dfd8dc419c3dffa6bb5236643ef10c6abaef
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -168,4 +168,4 @@ epub_title = project # epub_uid = '' # A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] \ No newline at end of file +epub_exclude_files = ['search.html'] diff --git a/moban_handlebars/engine.py b/moban_handlebars/engine.py index <HASH>..<HASH> 100644 --- a/moban_handlebars/engine.py +++ b/moban_handlebars/engine.py @@ -1,5 +1,5 @@ -import codecs import sys +import codecs import moban.utils as utils from pybars import Compiler
:hammer: code refactoring. running make format
moremoban_moban-handlebars
train
ff551248a197fd363cdaaa3695a6ba8d8046fae8
diff --git a/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java b/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java +++ b/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java @@ -207,6 +207,12 @@ final class S3ProxyHandler extends AbstractHandler { } headerIdentity = values[0]; headerSignature = values[1]; + } else if (headerAuthorization != null && + headerAuthorization.startsWith("AWS4-HMAC-SHA256 ")) { + // Fail V4 signature requests to allow clients to retry with V2. + sendSimpleErrorResponse(response, S3ErrorCode.INVALID_ARGUMENT); + baseRequest.setHandled(true); + return; } String parameterIdentity = request.getParameter("AWSAccessKeyId"); String parameterSignature = request.getParameter("Signature");
Fail V4 signature requests with InvalidArgument This allows V4 clients like s3cmd to retry with V2 signatures. References #<I>.
gaul_s3proxy
train
72c3e32eb02ad5016b5a0ddb5165b7ce45fd2ec4
diff --git a/ncclient/transport/session.py b/ncclient/transport/session.py index <HASH>..<HASH> 100644 --- a/ncclient/transport/session.py +++ b/ncclient/transport/session.py @@ -91,8 +91,10 @@ class Session(Thread): self.send(HelloHandler.build(self._client_capabilities, self._device_handler)) logger.debug('starting main loop') self.start() - # we expect server's hello message - init_event.wait() + # we expect server's hello message, if server doesn't responds in 60 seconds raise exception + init_event.wait(60) + if not init_event.is_set(): + raise SSHError("Capability exchange timed out") # received hello message or an error happened self.remove_listener(listener) if error[0]:
Update session.py timeout if the capability exchanges doesn't happen in a minute
ncclient_ncclient
train
e45d648ce94bc7f5fd594bcfbcdc3ff2fca13f71
diff --git a/cmd/admin-subnet-health.go b/cmd/admin-subnet-health.go index <HASH>..<HASH> 100644 --- a/cmd/admin-subnet-health.go +++ b/cmd/admin-subnet-health.go @@ -98,7 +98,7 @@ EXAMPLES: 3. Periodically upload health diagnostics for the MinIO cluster with alias 'play' (https://play.min.io by default) to SUBNET every 2 days {{.Prompt}} {{.HelpName}} play --schedule 2 4. Generate health diagnostics report for the MinIO cluster with alias 'play' (https://play.min.io by default) to and output them to the current working directory - {{.Prompt}} {{.HelpName}} play --offline + {{.Prompt}} {{.HelpName}} play --airgap `, } @@ -219,7 +219,7 @@ func fetchSubnetUploadFlags(ctx *cli.Context) (string, int, string, bool) { // If set, the health report file will not be uploaded // to subnet and will only be saved locally. - offline := ctx.Bool("offline") + offline := ctx.Bool("airgap") || ctx.Bool("offline") return license, schedule, name, offline } @@ -233,7 +233,7 @@ func validateFlags(uploadToSubnet bool, uploadPeriodically bool, name string) er } if globalDevMode { - return errors.New("--dev is not applicable in offline mode") + return errors.New("--dev is not applicable in airgap mode") } if uploadPeriodically { diff --git a/cmd/admin-subnet-register.go b/cmd/admin-subnet-register.go index <HASH>..<HASH> 100644 --- a/cmd/admin-subnet-register.go +++ b/cmd/admin-subnet-register.go @@ -105,7 +105,7 @@ type SubnetMFAReq struct { func mainAdminRegister(ctx *cli.Context) error { checkAdminRegisterSyntax(ctx) - offlineMode := ctx.Bool(("offline")) + offlineMode := ctx.Bool("airgap") || ctx.Bool("offline") if !offlineMode && !subnetReachable() { console.Fatalln(subnetNotReachableMsg()) } diff --git a/cmd/subnet-utils.go b/cmd/subnet-utils.go index <HASH>..<HASH> 100644 --- a/cmd/subnet-utils.go +++ b/cmd/subnet-utils.go @@ -53,7 +53,7 @@ var subnetCommonFlags = []cli.Flag{ Usage: "Specify the HTTP(S) proxy URL to use for connecting to SUBNET", }, cli.BoolFlag{ - Name: "offline", + Name: "airgap", Usage: "Use in environments without network access to SUBNET (e.g. airgapped, firewalled, etc.)", }, cli.BoolFlag{ @@ -61,6 +61,12 @@ var subnetCommonFlags = []cli.Flag{ Usage: "Development mode - talks to local SUBNET", Hidden: true, }, + cli.BoolFlag{ + // Deprecated Oct 2021. Same as airgap, retaining as hidden for backward compatibility + Name: "offline", + Usage: "Use in environments without network access to SUBNET (e.g. airgapped, firewalled, etc.)", + Hidden: true, + }, } func subnetBaseURL() string {
Rename --offline flag to --airgap (#<I>) Retain `--offline` as hidden for backward compatibility (won't show up in help)
minio_mc
train
6fb58a7e0b989419f159da16ec63f01ea1a0fc7d
diff --git a/js/huobipro.js b/js/huobipro.js index <HASH>..<HASH> 100644 --- a/js/huobipro.js +++ b/js/huobipro.js @@ -337,8 +337,7 @@ module.exports = class huobipro extends Exchange { side = typeParts[0]; type = typeParts[1]; } - let amount = this.safeFloat (trade, 'amount'); - amount = this.safeFloat (trade, 'filled-amount', amount); + let amount = this.safeFloat2 (trade, 'filled-amount', 'amount'); return { 'info': trade, 'id': this.safeString (trade, 'id'),
minor edit to filled amount in huobipro parseTrade
ccxt_ccxt
train
32b6b5dbd8037f619e39565e8f193c5d25043b42
diff --git a/config/swagger-lume.php b/config/swagger-lume.php index <HASH>..<HASH> 100644 --- a/config/swagger-lume.php +++ b/config/swagger-lume.php @@ -117,6 +117,16 @@ return [ */ 'proxy' => false, + /* + |-------------------------------------------------------------------------- + | Uncomment to pass the validatorUrl parameter to SwaggerUi init on the JS + | side. A null value here disables validation. A string will override + | the default url. If not specified, behavior is default and validation + | is enabled. + |-------------------------------------------------------------------------- + */ + // 'validatorUrl' => null, + 'headers' => [ /* |-------------------------------------------------------------------------- diff --git a/resources/views/index.blade.php b/resources/views/index.blade.php index <HASH>..<HASH> 100644 --- a/resources/views/index.blade.php +++ b/resources/views/index.blade.php @@ -45,6 +45,10 @@ window.swaggerUi = new SwaggerUi({ url: url, dom_id: "swagger-ui-container", + @if(array_key_exists('validatorUrl', get_defined_vars())) + // This differentiates between a null value and an undefined variable + validatorUrl: {!! isset($validatorUrl) ? '"' . $validatorUrl . '"' : 'null' !!}, + @endif supportedSubmitMethods: ['get', 'post', 'put', 'delete', 'patch'], onComplete: function(swaggerApi, swaggerUi){ @if(isset($requestHeaders)) diff --git a/src/routes.php b/src/routes.php index <HASH>..<HASH> 100644 --- a/src/routes.php +++ b/src/routes.php @@ -34,6 +34,15 @@ $app->get(config('swagger-lume.routes.api'), function () { (new Request)->setTrustedProxies([$proxy]); } + $extras = []; + $conf = config('swagger-lume'); + if (array_key_exists('validatorUrl', $conf)) { + // This allows for a null value, since this has potentially + // desirable side effects for swagger. See the view for more + // details. + $extras['validatorUrl'] = $conf['validatorUrl']; + } + //need the / at the end to avoid CORS errors on Homestead systems. $response = new Response( view('swagger-lume::index', [ @@ -43,7 +52,7 @@ $app->get(config('swagger-lume.routes.api'), function () { 'secure' => (new Request)->secure(), 'urlToDocs' => url(config('swagger-lume.routes.docs')), 'requestHeaders' => config('swagger-lume.headers.request'), - ]), + ],$extras), 200 );
Adding support for validatorUrl option for swagger
DarkaOnLine_SwaggerLume
train
66d1ac07c81a8965f0ba2cf36386ec1831ca5d25
diff --git a/lib/ar_mysql_column_charset.rb b/lib/ar_mysql_column_charset.rb index <HASH>..<HASH> 100644 --- a/lib/ar_mysql_column_charset.rb +++ b/lib/ar_mysql_column_charset.rb @@ -38,6 +38,31 @@ module ActiveRecord::ConnectionAdapters def migration_keys super + [:charset, :collation] end + + def utf8mb4_supported? + if @utf8mb4_supported.nil? + @utf8mb4_supported = !select("show character set like 'utf8mb4'").empty? + else + @utf8mb4_supported + end + end + + def charset_collation(charset, collation) + [charset, collation].map { |name| + case name + when nil + nil + when /\A(utf8mb4(_\w*)?)\z/ + if utf8mb4_supported? + $1 + else + "utf8#{$2}" + end + else + name.to_s + end + } + end end prepend CharsetSupport @@ -52,12 +77,14 @@ module ActiveRecord::ConnectionAdapters end def add_column_options!(sql, options) - if options[:charset] - sql << " CHARACTER SET #{options[:charset]}" + charset, collation = @conn.charset_collation(options[:charset], options[:collation]) + + if charset + sql << " CHARACTER SET #{charset}" end - if options[:collation] - sql << " COLLATE #{options[:collation]}" + if collation + sql << " COLLATE #{collation}" end super
Fall back utf8mb4 to utf8 if the server does not support it.
huginn_huginn
train
ba7826fe13f19e5bccfba901e12b0f01c50a10bf
diff --git a/src/model/fees.js b/src/model/fees.js index <HASH>..<HASH> 100644 --- a/src/model/fees.js +++ b/src/model/fees.js @@ -34,7 +34,7 @@ const mosaicDefinitionTransaction = 10 * 1000000; * * @type {number} */ -const namespaceAndMosaicCommon = 20 * 1000000; +const namespaceAndMosaicCommon = 3 * 1000000; /** * The cosignature transaction fee
include namespace and mosaics base fee
QuantumMechanics_NEM-sdk
train
3420973512ca4dbd46731871849b46392802979b
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setup( version='0.1.13', description='Custom management command that compares the MD5 sum and ' 'etag from S3 and if the two are the same skips file copy.', - long_description=open('README.md').read(), +# long_description=open('README.md').read(), author='Anton Agestam', author_email='[email protected]', packages=find_packages(),
Update setup.py removed long_description for heroku python buildpack support
antonagestam_collectfast
train
fc8ff20d1f1eed6ada323481d57ac54ae27ec6ab
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,6 +13,8 @@ install: - pip install -r requirements.txt - travis_retry python setup.py install - pip install coveralls + - pip install --upgrade pip six + - pip install --upgrade setuptools script: coverage run --source=betfairlightweight setup.py test diff --git a/betfairlightweight/metadata.py b/betfairlightweight/metadata.py index <HASH>..<HASH> 100644 --- a/betfairlightweight/metadata.py +++ b/betfairlightweight/metadata.py @@ -60,6 +60,7 @@ list_market_profit_and_loss = { """ Currency Parameters + http://docs.developer.betfair.com/docs/pages/viewpage.action?pageId=4391789 """ currency_parameters = { @@ -112,5 +113,10 @@ currency_parameters = { 'min_bet_size': 6, 'min_bet_payout': 30, 'min_bsp_liability': 30 - } + }, + 'RON': { + 'min_bet_size': 10, + 'min_bet_payout': 50, + 'min_bsp_liability': (10, 50) # (back, lay) + }, } diff --git a/betfairlightweight/resources/baseresource.py b/betfairlightweight/resources/baseresource.py index <HASH>..<HASH> 100644 --- a/betfairlightweight/resources/baseresource.py +++ b/betfairlightweight/resources/baseresource.py @@ -1,6 +1,6 @@ import datetime import ciso8601 -import json +import ujson as json from ..compat import ( basestring, diff --git a/betfairlightweight/resources/bettingresources.py b/betfairlightweight/resources/bettingresources.py index <HASH>..<HASH> 100644 --- a/betfairlightweight/resources/bettingresources.py +++ b/betfairlightweight/resources/bettingresources.py @@ -223,9 +223,11 @@ class MarketCatalogue(BaseResource): """ -__slots__ is a terrible hack with nasty, hard-to-fathom side effects that should only be used by programmers at -grandmaster and wizard levels. Unfortunately it has gained an enormous undeserved popularity amongst the novices and -apprentices, who should know better than to use this magic incantation casually. +__slots__ is a terrible hack with nasty, hard-to-fathom side +effects that should only be used by programmers at grandmaster and +wizard levels. Unfortunately it has gained an enormous undeserved +popularity amongst the novices and apprentices, who should know +better than to use this magic incantation casually. """ @@ -235,7 +237,9 @@ class PriceSize(object): :type size: float """ - __slots__ = ['price', 'size'] + __slots__ = [ + 'price', 'size' + ] def __init__(self, price, size): self.price = price @@ -266,7 +270,9 @@ class RunnerBookEX(object): :type traded_volume: list[PriceSize] """ - __slots__ = ['available_to_back', 'available_to_lay', 'traded_volume'] + __slots__ = [ + 'available_to_back', 'available_to_lay', 'traded_volume' + ] def __init__(self, availableToBack=None, availableToLay=None, tradedVolume=None): self.available_to_back = [PriceSize(**i) for i in availableToBack] @@ -346,8 +352,10 @@ class RunnerBook(object): :type total_matched: float """ - __slots__ = ['selection_id', 'status', 'total_matched', 'adjustment_factor', 'handicap', 'last_price_traded', - 'removal_date', 'sp', 'ex', 'orders', 'matches'] + __slots__ = [ + 'selection_id', 'status', 'total_matched', 'adjustment_factor', 'handicap', 'last_price_traded', 'removal_date', + 'sp', 'ex', 'orders', 'matches' + ] def __init__(self, selectionId, status, adjustmentFactor, handicap, lastPriceTraded=None, totalMatched=None, removalDate=None, sp=None, ex=None, orders=None, matches=None): diff --git a/tests/test_baseresource.py b/tests/test_baseresource.py index <HASH>..<HASH> 100644 --- a/tests/test_baseresource.py +++ b/tests/test_baseresource.py @@ -1,6 +1,6 @@ import unittest import datetime -import json +import ujson as json from betfairlightweight.resources.baseresource import BaseResource from tests.tools import create_mock_json diff --git a/tests/test_metadata.py b/tests/test_metadata.py index <HASH>..<HASH> 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -92,5 +92,10 @@ class MetadataTest(unittest.TestCase): 'min_bet_size': 6, 'min_bet_payout': 30, 'min_bsp_liability': 30 - } + }, + 'RON': { + 'min_bet_size': 10, + 'min_bet_payout': 50, + 'min_bsp_liability': (10, 50) # (back, lay) + }, }
ujson added to baseresource ROM currency added travis updated as requests has broken the internet
liampauling_betfair
train
5da9e750cf8887d32a848a5c0925bb09e3d1b98d
diff --git a/admin/settings/language.php b/admin/settings/language.php index <HASH>..<HASH> 100644 --- a/admin/settings/language.php +++ b/admin/settings/language.php @@ -7,6 +7,7 @@ if ($hassiteconfig) { // speedup for non-admins, add all caps used on this page // "languageandlocation" settingpage $temp = new admin_settingpage('langsettings', get_string('languagesettings', 'admin')); +$temp->add(new admin_setting_configcheckbox('autolang', get_string('autolang', 'admin'), get_string('configautolang', 'admin'), 1)); $temp->add(new admin_setting_configselect('lang', get_string('lang', 'admin'), get_string('configlang', 'admin'), current_language(), get_list_of_languages())); // $CFG->lang might be set in installer already, default en or en_utf8 is in setup.php $temp->add(new admin_setting_configcheckbox('langmenu', get_string('langmenu', 'admin'), get_string('configlangmenu', 'admin'), 1)); $temp->add(new admin_setting_langlist());
MDL-<I> option to disable lang autodetect from browser; merged from MOODLE_<I>_STABLE
moodle_moodle
train
71b59cc37de471024e9b8e920230f38a77cde21f
diff --git a/autofit/graphical/expectation_propagation.py b/autofit/graphical/expectation_propagation.py index <HASH>..<HASH> 100644 --- a/autofit/graphical/expectation_propagation.py +++ b/autofit/graphical/expectation_propagation.py @@ -357,7 +357,13 @@ class EPOptimiser: ) -> EPMeanField: for _ in range(max_steps): for factor, optimiser in self.factor_optimisers.items(): - model_approx, status = optimiser.optimise(factor, model_approx) + try: + model_approx, status = optimiser.optimise(factor, model_approx) + except (ValueError, ArithmeticError, RuntimeError) as e: + status = Status( + False, + f"Factor: {factor} experienced error {e}") + if self.callback(factor, model_approx, status): break # callback controls convergence else: # If no break do next iteration
Making expectation propagation more stable for breaking factor optimisations
rhayes777_PyAutoFit
train
0c895b85d3a995b32e4823c121024f8aed8a3f28
diff --git a/core-bundle/contao/drivers/DC_Folder.php b/core-bundle/contao/drivers/DC_Folder.php index <HASH>..<HASH> 100644 --- a/core-bundle/contao/drivers/DC_Folder.php +++ b/core-bundle/contao/drivers/DC_Folder.php @@ -1800,24 +1800,24 @@ class DC_Folder extends \DataContainer implements \listable, \editable switch ($type) { case 'Added'; - $arrMessages[] = '<p class="tl_new">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncAdded'], $file) . '</p>'; + $arrMessages[] = '<p class="tl_new">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncAdded'], specialchars($file)) . '</p>'; break; case 'Changed'; - $arrMessages[] = '<p class="tl_info">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncChanged'], $file) . '</p>'; + $arrMessages[] = '<p class="tl_info">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncChanged'], specialchars($file)) . '</p>'; break; case 'Unchanged'; - $arrMessages[] = '<p class="tl_confirm hidden">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncUnchanged'], $file) . '</p>'; + $arrMessages[] = '<p class="tl_confirm hidden">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncUnchanged'], specialchars($file)) . '</p>'; break; case 'Moved'; list($source, $target) = explode(' to ', $file, 2); - $arrMessages[] = '<p class="tl_info">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncMoved'], $source, $target) . '</p>'; + $arrMessages[] = '<p class="tl_info">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncMoved'], specialchars($source), specialchars($target)) . '</p>'; break; case 'Deleted'; - $arrMessages[] = '<p class="tl_error">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncDeleted'], $file) . '</p>'; + $arrMessages[] = '<p class="tl_error">' . sprintf($GLOBALS['TL_LANG']['tl_files']['syncDeleted'], specialchars($file)) . '</p>'; break; }
[Core] Encode file names when showing the file sync log (see #<I>)
contao_contao
train
1af2d62be0011b1a5de659de280b811bfbcf0ff7
diff --git a/Auth/OpenID/Consumer.php b/Auth/OpenID/Consumer.php index <HASH>..<HASH> 100644 --- a/Auth/OpenID/Consumer.php +++ b/Auth/OpenID/Consumer.php @@ -518,7 +518,7 @@ class Auth_OpenID_DiffieHellmanSHA256ConsumerSession extends */ class Auth_OpenID_PlainTextConsumerSession { var $session_type = 'no-encryption'; - var $allowed_assoc_types = array('HMAC-SHA1'); + var $allowed_assoc_types = array('HMAC-SHA1', 'HMAC-SHA256'); function getRequest() { diff --git a/Tests/Auth/OpenID/Consumer.php b/Tests/Auth/OpenID/Consumer.php index <HASH>..<HASH> 100644 --- a/Tests/Auth/OpenID/Consumer.php +++ b/Tests/Auth/OpenID/Consumer.php @@ -2218,6 +2218,43 @@ class TestCreateAssociationRequest extends PHPUnit_TestCase { $args->toPostArgs()); } + function test_noEncryptionSendsTypeHMACSHA256() + { + $session_type = 'no-encryption'; + $this->assoc_type = 'HMAC-SHA256'; + + list($session, $args) = $this->consumer->_createAssociateRequest( + $this->endpoint, $this->assoc_type, $session_type); + + $this->assertTrue(is_a($session, 'Auth_OpenID_PlainTextConsumerSession')); + + $expected = Auth_OpenID_Message::fromOpenIDArgs( + array('ns' => Auth_OpenID_OPENID2_NS, + 'session_type'=>$session_type, + 'mode'=>'associate', + 'assoc_type'=>$this->assoc_type)); + + $this->assertEquals($expected->toPostArgs(), + $args->toPostArgs()); + + $response = Auth_OpenID_Message::fromOpenIDArgs( + array('ns' => Auth_OpenID_OPENID2_NS, + 'session_type'=>$session_type, + 'assoc_type'=>$this->assoc_type, + 'expires_in' => '10000000000', + 'mac_key' => 'ZM9v', + 'assoc_handle' => 'turnme' + ) + ); + + $assoc = $this->consumer->_extractAssociation($response, $session); + + $this->assertTrue($assoc !== null); + $this->assertTrue(is_a($assoc, 'Auth_OpenID_Association')); + $this->assertTrue($assoc->assoc_type = $this->assoc_type); + $this->assertTrue($assoc->session_type = $session_type); + } + function test_noEncryptionCompatibility() { $this->endpoint->use_compatibility = true;
[project @ Add HMAC-SHA<I> to plaintext consumer session assoc types]
openid_php-openid
train
ff273b68ff1b016e4369370599abae82ad0c004a
diff --git a/grade/edit/tree/calculation.php b/grade/edit/tree/calculation.php index <HASH>..<HASH> 100644 --- a/grade/edit/tree/calculation.php +++ b/grade/edit/tree/calculation.php @@ -30,7 +30,7 @@ require_once 'calculation_form.php'; $courseid = required_param('courseid', PARAM_INT); $id = required_param('id', PARAM_INT); $section = optional_param('section', 'calculation', PARAM_ALPHA); -$idnumbers = optional_param('idnumbers', null, PARAM_RAW); +$idnumbers = optional_param_array('idnumbers', null, PARAM_RAW); $url = new moodle_url('/grade/edit/tree/calculation.php', array('id'=>$id, 'courseid'=>$courseid)); if ($section !== 'calculation') { diff --git a/grade/import/xml/grade_import_form.php b/grade/import/xml/grade_import_form.php index <HASH>..<HASH> 100644 --- a/grade/import/xml/grade_import_form.php +++ b/grade/import/xml/grade_import_form.php @@ -41,6 +41,7 @@ class grade_import_form extends moodleform { $mform->disabledIf('userfile', 'url', 'noteq', ''); $mform->addElement('text', 'url', get_string('fileurl', 'gradeimport_xml'), 'size="80"'); + $mform->setType('url', PARAM_URL); $mform->disabledIf('url', 'userfile', 'noteq', ''); if (!empty($CFG->gradepublishing)) {
MDL-<I> grade: Notice during import and calculation
moodle_moodle
train
76e39ebcf584042fab4f224a6bd2c903bb0c8aff
diff --git a/doc/source/whatsnew/v1.0.0.rst b/doc/source/whatsnew/v1.0.0.rst index <HASH>..<HASH> 100644 --- a/doc/source/whatsnew/v1.0.0.rst +++ b/doc/source/whatsnew/v1.0.0.rst @@ -604,6 +604,7 @@ Reshaping - Bug :meth:`Series.pct_change` where supplying an anchored frequency would throw a ValueError (:issue:`28664`) - Bug where :meth:`DataFrame.equals` returned True incorrectly in some cases when two DataFrames had the same columns in different orders (:issue:`28839`) - Bug in :meth:`DataFrame.replace` that caused non-numeric replacer's dtype not respected (:issue:`26632`) +- Bug in :func:`melt` where supplying mixed strings and numeric values for ``id_vars`` or ``value_vars`` would incorrectly raise a ``ValueError`` (:issue:`29718`) Sparse diff --git a/pandas/core/reshape/melt.py b/pandas/core/reshape/melt.py index <HASH>..<HASH> 100644 --- a/pandas/core/reshape/melt.py +++ b/pandas/core/reshape/melt.py @@ -11,6 +11,7 @@ from pandas.core.dtypes.generic import ABCMultiIndex from pandas.core.dtypes.missing import notna from pandas.core.arrays import Categorical +import pandas.core.common as com from pandas.core.frame import DataFrame, _shared_docs from pandas.core.indexes.base import Index from pandas.core.reshape.concat import concat @@ -47,7 +48,7 @@ def melt( else: # Check that `id_vars` are in frame id_vars = list(id_vars) - missing = Index(np.ravel(id_vars)).difference(cols) + missing = Index(com.flatten(id_vars)).difference(cols) if not missing.empty: raise KeyError( "The following 'id_vars' are not present" @@ -69,7 +70,7 @@ def melt( else: value_vars = list(value_vars) # Check that `value_vars` are in frame - missing = Index(np.ravel(value_vars)).difference(cols) + missing = Index(com.flatten(value_vars)).difference(cols) if not missing.empty: raise KeyError( "The following 'value_vars' are not present in" diff --git a/pandas/tests/reshape/test_melt.py b/pandas/tests/reshape/test_melt.py index <HASH>..<HASH> 100644 --- a/pandas/tests/reshape/test_melt.py +++ b/pandas/tests/reshape/test_melt.py @@ -317,6 +317,22 @@ class TestMelt: ): multi.melt(["A"], ["F"], col_level=0) + def test_melt_mixed_int_str_id_vars(self): + # GH 29718 + df = DataFrame({0: ["foo"], "a": ["bar"], "b": [1], "d": [2]}) + result = melt(df, id_vars=[0, "a"], value_vars=["b", "d"]) + expected = DataFrame( + {0: ["foo"] * 2, "a": ["bar"] * 2, "variable": list("bd"), "value": [1, 2]} + ) + tm.assert_frame_equal(result, expected) + + def test_melt_mixed_int_str_value_vars(self): + # GH 29718 + df = DataFrame({0: ["foo"], "a": ["bar"]}) + result = melt(df, value_vars=[0, "a"]) + expected = DataFrame({"variable": [0, "a"], "value": ["foo", "bar"]}) + tm.assert_frame_equal(result, expected) + class TestLreshape: def test_pairs(self):
BUG: Fix melt with mixed int/str columns (#<I>)
pandas-dev_pandas
train
e25283602816af9614c5f8594f45bb09defa09de
diff --git a/benchmarks/src/main/java/org/tinylog/benchmarks/logging/log4j2__/LifeCycle.java b/benchmarks/src/main/java/org/tinylog/benchmarks/logging/log4j2__/LifeCycle.java index <HASH>..<HASH> 100644 --- a/benchmarks/src/main/java/org/tinylog/benchmarks/logging/log4j2__/LifeCycle.java +++ b/benchmarks/src/main/java/org/tinylog/benchmarks/logging/log4j2__/LifeCycle.java @@ -58,7 +58,7 @@ public class LifeCycle extends AbstractLifeCycle { builder.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); builder.append("<Configuration>"); builder.append("<Appenders>"); - builder.append("<File name=\"file\" fileName=\"" + file + "\""); + builder.append("<File name=\"file\" fileName=\"" + file + "\" immediateFlush=\"" + !async + "\""); builder.append(" bufferedIO=\"" + async + "\" bufferSize=\"" + BUFFER_SIZE + "\">"); builder.append("<PatternLayout><Pattern>");
Deactivate immediate flush for async Log4j2 benchmarks
pmwmedia_tinylog
train
c7a2800e095693bb008c7c99445b99286cfd2ff5
diff --git a/test/index.test.js b/test/index.test.js index <HASH>..<HASH> 100644 --- a/test/index.test.js +++ b/test/index.test.js @@ -210,9 +210,10 @@ describe('Feathers Objection Service', () => { }) }) - it('allows eager queries', () => { + it('allows eager queries with pick', () => { return companies.find({ query: { $eager: 'ceos', $pick: ['ceos'] } }).then(data => { expect(data[0].ceos).to.be.ok + expect(data[0].ceo).to.be.undefined }) })
Added test assert and fixed test name
feathersjs-ecosystem_feathers-objection
train
355624380d9428e21e73b29232e74dda2020ac2e
diff --git a/lib/runner.js b/lib/runner.js index <HASH>..<HASH> 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -305,6 +305,7 @@ Runner.prototype.run = function(){ // uncaught exception process.on('uncaughtException', function(err){ self.fail(self.test, err); + self.emit('test end', self.test); self.emit('end'); });
Fixed "test end" event for uncaughtExceptions. Closes #<I>
mochajs_mocha
train
b4304f8e79f8227fc7841ecc7cb80b94cbbd493d
diff --git a/staging/src/k8s.io/client-go/rest/request.go b/staging/src/k8s.io/client-go/rest/request.go index <HASH>..<HASH> 100644 --- a/staging/src/k8s.io/client-go/rest/request.go +++ b/staging/src/k8s.io/client-go/rest/request.go @@ -823,6 +823,23 @@ func (r *Request) transformResponse(resp *http.Response, req *http.Request) Resu } } +// truncateBody decides if the body should be truncated, based on the glog Verbosity. +func truncateBody(body string) string { + max := 0 + switch { + case bool(glog.V(9)): + max = 10240 + case bool(glog.V(8)): + max = 1024 + } + + if len(body) <= max { + return body + } + + return body[:max] + fmt.Sprintf(" [truncated %d chars]", len(body)-max) +} + // glogBody logs a body output that could be either JSON or protobuf. It explicitly guards against // allocating a new string for the body output unless necessary. Uses a simple heuristic to determine // whether the body is printable. @@ -831,9 +848,9 @@ func glogBody(prefix string, body []byte) { if bytes.IndexFunc(body, func(r rune) bool { return r < 0x0a }) != -1 { - glog.Infof("%s:\n%s", prefix, hex.Dump(body)) + glog.Infof("%s:\n%s", prefix, truncateBody(hex.Dump(body))) } else { - glog.Infof("%s: %s", prefix, string(body)) + glog.Infof("%s: %s", prefix, truncateBody(string(body))) } } }
client-go: Truncate body based on Verbosity level
kubernetes_kubernetes
train
55393a556be814b99f1c249163ba68c20c5c3b59
diff --git a/lib/mongo_ha/retryable.rb b/lib/mongo_ha/retryable.rb index <HASH>..<HASH> 100644 --- a/lib/mongo_ha/retryable.rb +++ b/lib/mongo_ha/retryable.rb @@ -1,4 +1,5 @@ require 'mongo/retryable' +require 'timeout' module Mongo module Retryable @@ -7,8 +8,10 @@ module Mongo begin attempt += 1 yield(server || cluster.next_primary) - rescue Error::SocketError, Error::SocketTimeoutError => e + rescue Error::SocketError, Error::SocketTimeoutError, Timeout::Error => e server = nil + # Mongo also raises the generic Timeout, so check the backtrace to make sure it was from mongo. + raise(e) if e.is_a?(::Timeout::Error) && e.backtrace && !e.backtrace.first.include?("/mongo/") raise(e) if attempt > cluster.max_read_retries || (session && session.in_transaction?) log_retry(e) cluster.scan! diff --git a/lib/mongo_ha/version.rb b/lib/mongo_ha/version.rb index <HASH>..<HASH> 100644 --- a/lib/mongo_ha/version.rb +++ b/lib/mongo_ha/version.rb @@ -1,3 +1,3 @@ module MongoHA - VERSION = '2.6.0'.freeze + VERSION = '2.6.1'.freeze end
Also retry on generic Timeout when it comes from Mongo
reidmorrison_mongo_ha
train
b796a0ba32f560ae7e2e0d7b030ea1c6d57a0ee2
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -38,11 +38,12 @@ module.exports = function(options){ var rethrow = jade.runtime.rethrow.toString(); var merge = jade.runtime.merge.toString(); var runtime = (escape + attrs + rethrow + merge).replace(/exports\./g, ''); + var jadeObj = "var jade = {attrs:attrs, escape:escape, interp:null};"; // needed to make things work... js = 'if(window.' + options.templatesArray + ' === undefined){window.' + options.templatesArray + '={};}\n' + js; js = js.replace('function anonymous', options.templatesArray + '[\'' + templateName + '\'] = function'); - js = js.replace(/buf = \[\];/, 'buf = [];' + runtime + '\n'); // make the runtime functions available + js = js.replace(/buf = \[\];/, 'buf = [];' + runtime + jadeObj + '\n'); // make the runtime functions available js = js.replace(/^attrs.*$/gm, ''); // attrs line references "jade" object that doesn't exist return js;
hacky-hack way of fixing for latest version of jade. please use asset-rack instead
troygoode_connect-assets-jade
train
95bc903315c3c23b288005e6cbc47fbe60c955d9
diff --git a/src/lib/connectors/jquery.js b/src/lib/connectors/jquery.js index <HASH>..<HASH> 100644 --- a/src/lib/connectors/jquery.js +++ b/src/lib/connectors/jquery.js @@ -20,7 +20,7 @@ JqueryConnector.prototype.request = function (params, cb) { var ajax = { url: this.host.makeUrl(params), data: params.body, - method: params.method, + type: params.method, dataType: 'text', headers: this.host.getHeaders(params.headers), done: cb
Issue<I> - jQuery.ajax HTTP method set as "type:", not "method:".
elastic_elasticsearch-js
train
4f58a739b55a09fd9eb5923b6f6bc90cebaf11ee
diff --git a/components/global/variables_dark.js b/components/global/variables_dark.js index <HASH>..<HASH> 100644 --- a/components/global/variables_dark.js +++ b/components/global/variables_dark.js @@ -35,7 +35,7 @@ export default { '--ring-tag-background-color': '#3e4d59', '--ring-removed-background-color': '#8f5247', '--ring-warning-background-color': '#593d01', - '--ring-added-background-color': '#294436', + '--ring-added-background-color': '#365947', /* Code */ '--ring-code-background-color': '#2b2b2b',
RG-<I> update "added text" color in dark theme
JetBrains_ring-ui
train
3e789ec12fbc570674ffe6582f949cdbbbe403c8
diff --git a/scripts/buildPackages.js b/scripts/buildPackages.js index <HASH>..<HASH> 100644 --- a/scripts/buildPackages.js +++ b/scripts/buildPackages.js @@ -17,6 +17,17 @@ const packages = [ 'Shadows', 'ThemeManager' ] + }, + { + filename: 'style.js', + styleComponents: [ + 'Colors', + 'Typography', + 'BorderRadiuses', + 'Shadows', + 'Spacings', + 'ThemeManager' + ] } ];
Create a style package (#<I>)
wix_react-native-ui-lib
train
0a7b1af1e84e93c5a9d894614c433dc1c5030126
diff --git a/opal/corelib/number.rb b/opal/corelib/number.rb index <HASH>..<HASH> 100644 --- a/opal/corelib/number.rb +++ b/opal/corelib/number.rb @@ -807,11 +807,7 @@ class Number < Numeric end def positive? - if self == 0 - false - else - `self == Infinity || 1 / self > 0` - end + `self != 0 && (self == Infinity || 1 / self > 0)` end def negative?
Optimize zero check in Number#positive?
opal_opal
train
11ea058cba00d40a0fb34fb94ac71f39be7a6f68
diff --git a/readme/markdown.py b/readme/markdown.py index <HASH>..<HASH> 100644 --- a/readme/markdown.py +++ b/readme/markdown.py @@ -21,5 +21,8 @@ from .clean import clean def render(raw): rendered = markdown.markdown( raw, - extensions=['markdown.extensions.fenced_code']) + extensions=[ + 'markdown.extensions.fenced_code', + 'markdown.extensions.smart_strong', + ]) return clean(rendered or raw), bool(rendered) diff --git a/tests/test_markdown.py b/tests/test_markdown.py index <HASH>..<HASH> 100755 --- a/tests/test_markdown.py +++ b/tests/test_markdown.py @@ -81,6 +81,14 @@ def read(fn): assert out == expected_html +def test_smart_strong(): + markdown_markup = 'Text with double__underscore__words.' + out, rendered = render(markdown_markup) + expected_html = '<p>Text with double__underscore__words.</p>' + assert rendered + assert out == expected_html + + def test_headings_and_paragraphs(): _do_test_with_files('headings_and_paragraphs')
Add support for markdown.extensions.smart_strong Allows markup like: Text with double__underscore__words. And it will render the double underscores within the words instead of taking them to mean to format as strong.
pypa_readme_renderer
train
1859d02e14a6dde3591e64a26fed14f792491978
diff --git a/lib/sambal.rb b/lib/sambal.rb index <HASH>..<HASH> 100644 --- a/lib/sambal.rb +++ b/lib/sambal.rb @@ -74,6 +74,14 @@ module Sambal end end + def logger + @logger ||= Logger.new(STDOUT) + end + + def logger=(l) + @logger = l + end + def file_context(path) if (path_parts = path.split('/')).length>1 file = path_parts.pop @@ -146,6 +154,30 @@ module Sambal ensure t.close end + + def rmdir(dir) + cd dir + begin + ls.each do |name, meta| + if meta[:type]==:file + response = del name + elsif meta[:type]==:directory && !(name =~ /^\.+$/) + response = rmdir(name) + end + raise InternalError.new response.message if response && response.failure? + end + cd '..' + response = ask_wrapped 'rmdir', dir + next_line = response.split("\n")[1] + if next_line =~ /^smb:.*\\>/ + Response.new(response, true) + else + Response.new(response, false) + end + rescue InternalError => e + Response.new(e.message, false) + end + end def del(file) begin diff --git a/spec/sambal/client_spec.rb b/spec/sambal/client_spec.rb index <HASH>..<HASH> 100644 --- a/spec/sambal/client_spec.rb +++ b/spec/sambal/client_spec.rb @@ -157,6 +157,21 @@ describe Sambal::Client do @sambal_client.ls.should have_key("#{testfile}") end + it "should recursively delete a directory" do + @sambal_client.cd('/') + @sambal_client.cd(test_directory) + @sambal_client.put_content("some content", "file_to_delete").should be_successful + @sambal_client.cd('/') + @sambal_client.rmdir("#{test_directory}").should be_successful + @sambal_client.cd('/') + @sambal_client.ls.should_not have_key("#{test_directory}") + end + + it "should not be successful when recursively deleting a nonexistant directory" do + @sambal_client.cd('/') + @sambal_client.rmdir("this_doesnt_exist").should_not be_successful + end + it "should not be successful when command fails" do result = @sambal_client.put("jhfahsf iasifasifh", "jsfijsf ijidjag") result.should_not be_successful
Add rmdir which can also recursively delete a directory. Unfortunately it seems as if smbclient cannot do this on it's own so the implementation rests on ruby code.
johnae_sambal
train
7e9b55bda5d018c7b9aa5f30b481dac0ccd2173a
diff --git a/src/Leevel/Throttler/RateLimiter.php b/src/Leevel/Throttler/RateLimiter.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Throttler/RateLimiter.php +++ b/src/Leevel/Throttler/RateLimiter.php @@ -126,7 +126,7 @@ class RateLimiter */ public function getRemaining(): int { - return $this->getRemainingReal() > 0 ?: 0; + return ($remainingReal = $this->getRemainingReal()) > 0 ? $remainingReal : 0; } /** diff --git a/src/Leevel/Throttler/Throttler.php b/src/Leevel/Throttler/Throttler.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Throttler/Throttler.php +++ b/src/Leevel/Throttler/Throttler.php @@ -78,8 +78,8 @@ class Throttler implements IThrottler $key = $this->getRequestKey($key); if (isset($this->rateLimiter[$key])) { return $this->rateLimiter[$key] - ->limit($limit) - ->time($time); + ->setLimit($limit) + ->setTime($time); } return $this->rateLimiter[$key] = new RateLimiter(
fix(throttler): fix throttler
hunzhiwange_framework
train
aaf320462433e91e2b8538c8a0517f84371d47bb
diff --git a/canvasapi/account.py b/canvasapi/account.py index <HASH>..<HASH> 100644 --- a/canvasapi/account.py +++ b/canvasapi/account.py @@ -135,6 +135,40 @@ class Account(CanvasObject): return AccountNotification(self._requester, response.json()) + def update_global_notification(self, account_notification, notification_id, **kwargs): + """ + Updates a global notification. + + :calls: `PUT /api/v1/accounts/:account_id/account_notifications/:id \ + <https://canvas.instructure.com/doc/api/account_notifications.html#method.account_notifications.update>`_ + + :param notification_id: The notification ID of the desired notification. + :type: int + + :rtype: :class:`canvasapi.account.AccountNotification` + """ + required_key_list = ["subject", "message", "start_at", "end_at"] + required_keys_present = all( + (x in account_notification for x in required_key_list) + ) + + if isinstance(account_notification, dict) and required_keys_present: + kwargs["account_notification"] = account_notification + else: + raise RequiredFieldMissing( + ( + "account_notification must be a dictionary with keys " + "'subject', 'message', 'start_at', and 'end_at'." + ) + ) + + response = self._requester.request( + "PUT", + "accounts/{}/account_notifications/{}".format(self.id, notification_id), + _kwargs=combine_kwargs(**kwargs), + ) + return AccountNotification(self._requester, response.json()) + def close_notification_for_user(self, user, notification): """ If the user no long wants to see a notification, it can be diff --git a/tests/fixtures/account.json b/tests/fixtures/account.json index <HASH>..<HASH> 100644 --- a/tests/fixtures/account.json +++ b/tests/fixtures/account.json @@ -665,6 +665,17 @@ "data": {}, "status_code": 200 }, + "update_notification": { + "method": "PUT", + "endpoint": "accounts/1/account_notifications/1", + "data": { + "subject": "subject", + "message": "Message", + "start_at": "2015-04-01T00:00:00Z", + "end_at": "2018-04-01T00:00:00Z" + }, + "status_code": 200 + }, "create_group_category": { "method": "POST", "endpoint": "accounts/1/group_categories", diff --git a/tests/test_account.py b/tests/test_account.py index <HASH>..<HASH> 100644 --- a/tests/test_account.py +++ b/tests/test_account.py @@ -77,6 +77,21 @@ class TestAccount(unittest.TestCase): self.assertIsInstance(closed_notif, AccountNotification) self.assertTrue(hasattr(closed_notif, "subject")) + # update_global_notification() + def test_update_global_notification(self, m): + register_uris({"account": ["update_notification"]}, m) + + notif_dict = { + "subject": "subject", + "message": "Message", + "start_at": "2015-04-01T00:00:00Z", + "end_at": "2018-04-01T00:00:00Z", + } + + updated_notif = self.account.update_global_notification(notif_dict, 1) + + self.assertIsInstance(updated_notif, AccountNotification) + def test_close_notification_for_user_obj(self, m): register_uris({"account": ["close_notification"]}, m)
wrote update global notification function, test function, and fixture
ucfopen_canvasapi
train
37e19b9248bfaaab7aa3411b89bdc056d08db8c3
diff --git a/tests/test_client.py b/tests/test_client.py index <HASH>..<HASH> 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -294,8 +294,9 @@ class TestCacheStoreTransportUsage: assert cached_external_element is external_element @pytest.mark.parametrize("external_reference_tag", ("import", "include")) + @pytest.mark.parametrize("main_WSDL_cached", (False, True)) def test_using_cached_XSD_schema_should_avoid_store_and_transport(self, - external_reference_tag): + external_reference_tag, main_WSDL_cached): """ When an imported or included XSD schema is located in the client's cache, it should be read from there instead of fetching its data from @@ -331,35 +332,46 @@ class TestCacheStoreTransportUsage: c1 = suds.client.Client("suds://wsdl", cachingpolicy=0, cache=cache, documentStore=store1, transport=MockTransport()) assert [x for x, y in cache.mock_operation_log] == ["get", "put"] * 2 - id1 = cache.mock_operation_log[0][1][0] - assert id1 == cache.mock_operation_log[1][1][0] - id2 = cache.mock_operation_log[2][1][0] - assert id2 == cache.mock_operation_log[3][1][0] + id_wsdl = cache.mock_operation_log[0][1][0] + assert id_wsdl == cache.mock_operation_log[1][1][0] + id_xsd = cache.mock_operation_log[2][1][0] + assert id_xsd == cache.mock_operation_log[3][1][0] assert len(cache.mock_data) == 2 - wsdl_document = cache.mock_data[id1] + wsdl_document = cache.mock_data[id_wsdl] assert c1.wsdl.root is wsdl_document.root() - # Making sure id2 refers to the actual external XSD is a bit tricky due - # to the fact that the WSDL object merged in the external XSD content - # and lost the reference to the external XSD object itself. As a - # workaround we make sure that the XSD schema XML element read from the - # XSD object cached as id2 matches the one read from the WSDL object's - # XSD schema. - cached_external_element = cache.mock_data[id2].root().children[0] + # Making sure id_xsd refers to the actual external XSD is a bit tricky + # due to the fact that the WSDL object merged in the external XSD + # content and lost the reference to the external XSD object itself. As + # a workaround we make sure that the XSD schema XML element read from + # the XSD object cached as id_xsd matches the one read from the WSDL + # object's XSD schema. + xsd_imported_document = cache.mock_data[id_xsd] + cached_external_element = xsd_imported_document.root().children[0] external_element = c1.wsdl.schema.elements[external_element_id].root assert cached_external_element is external_element # Make certain the same external XSD document is fetched from the cache # and not using the document store or the transport. - del cache.mock_data[id1] - assert len(cache.mock_data) == 1 cache.mock_operation_log = [] - store2 = MockDocumentStore(wsdl=wsdl) + if main_WSDL_cached: + cache.mock_put_config = MockCache.FAIL + store2 = MockDocumentStore(mock_fail=True) + else: + del cache.mock_data[id_wsdl] + assert len(cache.mock_data) == 1 + store2 = MockDocumentStore(wsdl=wsdl) c2 = suds.client.Client("suds://wsdl", cachingpolicy=0, cache=cache, documentStore=store2, transport=MockTransport()) - assert [(x, y[0]) for x, y in cache.mock_operation_log] == [ - ("get", id1), ("put", id1), ("get", id2)] + expected_cache_operations = [("get", id_wsdl)] + if not main_WSDL_cached: + expected_cache_operations.append(("put", id_wsdl)) + expected_cache_operations.append(("get", id_xsd)) + cache_operations = [(x, y[0]) for x, y in cache.mock_operation_log] + assert cache_operations == expected_cache_operations + if not main_WSDL_cached: + assert store2.mock_log == ["suds://wsdl"] assert len(cache.mock_data) == 2 - assert store2.mock_log == ["suds://wsdl"] + assert cache.mock_data[id_xsd] is xsd_imported_document external_element = c2.wsdl.schema.elements[external_element_id].root assert cached_external_element is external_element
test using a cached XSD schema with an already cached main WSDL Updated the test_using_cached_XSD_schema_should_avoid_store_and_transport() test to be able to run with and without first removing the main WSDL schema from the cache before checking that the cached XSD schema gets used. Minor stylistic changes.
suds-community_suds
train
d896acf12654c0eb9d4b1b3fcd308a3e4514682c
diff --git a/src/Kunstmaan/NodeBundle/Entity/NodeIterator.php b/src/Kunstmaan/NodeBundle/Entity/NodeIterator.php index <HASH>..<HASH> 100644 --- a/src/Kunstmaan/NodeBundle/Entity/NodeIterator.php +++ b/src/Kunstmaan/NodeBundle/Entity/NodeIterator.php @@ -23,7 +23,7 @@ class NodeIterator implements \RecursiveIterator } /** - * @return \?RecursiveIterator + * @return \RecursiveIterator */ #[\ReturnTypeWillChange] public function getChildren() @@ -31,26 +31,41 @@ class NodeIterator implements \RecursiveIterator return new NodeIterator($this->_data->current()->getChildren()); } + /** + * @return Node + */ public function current() { return $this->_data->current(); } + /** + * @return void + */ public function next() { $this->_data->next(); } + /** + * @return int + */ public function key() { return $this->_data->key(); } + /** + * @return bool + */ public function valid() { return $this->_data->current() instanceof Node; } + /** + * @return void + */ public function rewind() { $this->_data->first();
[NodeBundle] Add docblock return types for node iterator class
Kunstmaan_KunstmaanBundlesCMS
train
e3724939d2d10d41fc5f36bbfcac70e9deb50e30
diff --git a/eemeter/derivatives.py b/eemeter/derivatives.py index <HASH>..<HASH> 100644 --- a/eemeter/derivatives.py +++ b/eemeter/derivatives.py @@ -68,7 +68,10 @@ def _compute_error_bands_metered_savings( num_parameters = float(totals_metrics.num_parameters) base_obs = float(totals_metrics.observed_length) - post_obs = float(results["reporting_observed"].dropna().shape[0]) + if interval.startswith("billing") & len(results.dropna().index) > 0: + post_obs = float(round((results.index[-1] - results.index[0]).days / 30.0)) + else: + post_obs = float(results["reporting_observed"].dropna().shape[0]) degrees_of_freedom = float(base_obs - num_parameters) single_tailed_confidence_level = 1 - ((1 - confidence_level) / 2) @@ -258,8 +261,16 @@ def _compute_error_bands_modeled_savings( base_obs_baseline = float(totals_metrics_baseline.observed_length) base_obs_reporting = float(totals_metrics_reporting.observed_length) - post_obs_baseline = float(results["modeled_baseline_usage"].dropna().shape[0]) - post_obs_reporting = float(results["modeled_reporting_usage"].dropna().shape[0]) + + if interval_baseline.startswith("billing") & len(results.dropna().index) > 0: + post_obs_baseline = float(round((results.index[-1] - results.index[0]).days / 30.0)) + else: + post_obs_baseline = float(results["modeled_baseline_usage"].dropna().shape[0]) + + if interval_reporting.startswith("billing") & len(results.dropna().index) > 0: + post_obs_reporting = float(round((results.index[-1] - results.index[0]).days / 30.0)) + else: + post_obs_reporting = float(results["modeled_reporting_usage"].dropna().shape[0]) degrees_of_freedom_baseline = float(base_obs_baseline - num_parameters_baseline) degrees_of_freedom_reporting = float(base_obs_reporting - num_parameters_reporting) diff --git a/tests/test_derivatives.py b/tests/test_derivatives.py index <HASH>..<HASH> 100644 --- a/tests/test_derivatives.py +++ b/tests/test_derivatives.py @@ -127,6 +127,24 @@ def baseline_model_billing(il_electricity_cdd_hdd_billing_monthly): @pytest.fixture +def reporting_model_billing(il_electricity_cdd_hdd_billing_monthly): + meter_data = il_electricity_cdd_hdd_billing_monthly["meter_data"] + meter_data.value = meter_data.value - 50 + temperature_data = il_electricity_cdd_hdd_billing_monthly["temperature_data"] + blackout_start_date = il_electricity_cdd_hdd_billing_monthly["blackout_start_date"] + baseline_meter_data, warnings = get_baseline_data( + meter_data, end=blackout_start_date + ) + baseline_data = create_caltrack_billing_design_matrix( + baseline_meter_data, temperature_data + ) + model_results = fit_caltrack_usage_per_day_model( + baseline_data, use_billing_presets=True, weights_col="n_days_kept" + ) + return model_results + + [email protected] def reporting_meter_data_billing(): index = pd.date_range("2011-01-01", freq="MS", periods=13, tz="UTC") return pd.DataFrame({"value": 1}, index=index) @@ -506,6 +524,38 @@ def test_modeled_savings_cdd_hdd_hourly( @pytest.fixture +def normal_year_temperature_data(): + index = pd.date_range("2015-01-01", freq="D", periods=365, tz="UTC") + np.random.seed(0) + return pd.Series(np.random.rand(365) * 30 + 45, index=index).asfreq("H").ffill() + + +def test_modeled_savings_cdd_hdd_billing( + baseline_model_billing, + reporting_model_billing, + normal_year_temperature_data +): + + results, error_bands = modeled_savings( + baseline_model_billing, + reporting_model_billing, + pd.date_range("2015-01-01", freq="D", periods=365, tz="UTC"), + normal_year_temperature_data + ) + assert list(results.columns) == [ + "modeled_baseline_usage", + "modeled_reporting_usage", + "modeled_savings", + ] + assert round(results.modeled_savings.sum(), 2) == 587.44 + assert sorted(error_bands.keys()) == [ + "FSU Error Band", + "FSU Error Band: Baseline", + "FSU Error Band: Reporting", + ] + + [email protected] def reporting_meter_data_billing_not_aligned(): index = pd.date_range("2001-01-01", freq="MS", periods=13, tz="UTC") return pd.DataFrame({"value": None}, index=index)
Fix in FSU calculation using billing data
openeemeter_eemeter
train
cb75fef7c156e1a845086ebcfd79ac1eb38661af
diff --git a/inverse_covariance/tests/model_average_test.py b/inverse_covariance/tests/model_average_test.py index <HASH>..<HASH> 100644 --- a/inverse_covariance/tests/model_average_test.py +++ b/inverse_covariance/tests/model_average_test.py @@ -65,12 +65,19 @@ class TestModelAverage(object): assert len(ma.lams_) == 0 assert len(ma.subsets_) == 0 - for e in ma.estimators_: + for eidx, e in enumerate(ma.estimators_): assert isinstance(e, params_in['estimator'].__class__) + # sklearn doesnt have this but ours do if hasattr(e, 'is_fitted'): assert e.is_fitted == True + # check that all lambdas used where different + if not ma.use_scalar_penalty and eidx > 0: + if hasattr(e, 'lam'): + prev_e = ma.estimators_[eidx - 1] + assert np.linalg.norm((prev_e.lam - e.lam).flat) > 0 + if ma.normalize == True: assert np.max(ma.proportion_) <= 1.0 else:
Add another model average test to ensure all lambdas used in non-scalar mode were different
skggm_skggm
train