hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
f60013c7c16741b2d969c7dcd274191574600398
diff --git a/src/commands/test/run.js b/src/commands/test/run.js index <HASH>..<HASH> 100644 --- a/src/commands/test/run.js +++ b/src/commands/test/run.js @@ -6,10 +6,6 @@ var browserTestsRunner = require('./util/browser-tests-runner'); var path = require('path'); module.exports = function run(options, devTools) { - if(options.load) { - require(path.resolve(process.cwd(), options.load)); - } - return loadTests(devTools.cwd, options.patterns, devTools) .then((tests) => { var promise = Promise.resolve(); diff --git a/src/commands/test/util/server-tests-runner/index.js b/src/commands/test/util/server-tests-runner/index.js index <HASH>..<HASH> 100644 --- a/src/commands/test/util/server-tests-runner/index.js +++ b/src/commands/test/util/server-tests-runner/index.js @@ -14,11 +14,16 @@ exports.run = function(allTests, options, devTools) { var testsJSON = JSON.stringify(filteredTests); var mochaBin = require.resolve('mocha/bin/mocha'); var mochaTestsServer = require.resolve('./mocha-tests-server.js'); + var args = [mochaTestsServer]; var env = Object.assign({}, process.env); env.MARKO_DEVTOOLS_TESTS = testsJSON; env.MARKO_DEVTOOLS_ROOT = devTools.__dirname; - return spawn(mochaBin, [mochaTestsServer], { + if(options.load) { + args.unshift('--require', options.load); + } + + return spawn(mochaBin, args, { cwd: devTools.cwd, env, stdio: 'inherit'
load --load option within the mocha subprocess, this needs to be made to work for the browser as well
marko-js_cli
train
de637b8cfb1023d5f3b7d5ccf8c27812744c31d8
diff --git a/src/package/dependencies.js b/src/package/dependencies.js index <HASH>..<HASH> 100644 --- a/src/package/dependencies.js +++ b/src/package/dependencies.js @@ -3,7 +3,7 @@ import {info} from '@travi/cli-messages'; import install from './install'; export default async function ({contributors}) { - info('Installing devDependencies', {level: 'secondary'}); + info('Installing dependencies'); await install(uniq([ 'npm-run-all', diff --git a/src/package/install.js b/src/package/install.js index <HASH>..<HASH> 100644 --- a/src/package/install.js +++ b/src/package/install.js @@ -1,7 +1,12 @@ -import exec from '../../third-party-wrappers/exec-as-promised'; +import {info} from '@travi/cli-messages'; +import execa from 'execa'; export default async function (dependencies) { if (dependencies.length) { - await exec(`. ~/.nvm/nvm.sh && nvm use && npm install ${dependencies.join(' ')} --save-dev`, {silent: false}); + const dependenciesType = 'dev'; + + info(`Installing ${dependenciesType} dependencies`); + + await execa.shell(`. ~/.nvm/nvm.sh && nvm use && npm install ${dependencies.join(' ')} --save-${dependenciesType}`); } } diff --git a/test/integration/features/step_definitions/nvm-steps.js b/test/integration/features/step_definitions/nvm-steps.js index <HASH>..<HASH> 100644 --- a/test/integration/features/step_definitions/nvm-steps.js +++ b/test/integration/features/step_definitions/nvm-steps.js @@ -1,8 +1,10 @@ +import execa from 'execa'; import {Before, Given} from 'cucumber'; import any from '@travi/any'; import * as exec from '../../../../third-party-wrappers/exec-as-promised'; Before(function () { + this.sinonSandbox.stub(execa, 'shell'); this.sinonSandbox.stub(exec, 'default'); }); diff --git a/test/unit/package/install-test.js b/test/unit/package/install-test.js index <HASH>..<HASH> 100644 --- a/test/unit/package/install-test.js +++ b/test/unit/package/install-test.js @@ -1,7 +1,7 @@ import {assert} from 'chai'; import sinon from 'sinon'; import any from '@travi/any'; -import * as exec from '../../../third-party-wrappers/exec-as-promised'; +import execa from 'execa'; import npmInstall from '../../../src/package/install'; suite('npm install', () => { @@ -10,7 +10,7 @@ suite('npm install', () => { setup(() => { sandbox = sinon.createSandbox(); - sandbox.stub(exec, 'default'); + sandbox.stub(execa, 'shell'); }); teardown(() => sandbox.restore()); @@ -18,7 +18,7 @@ suite('npm install', () => { test('that `npm install` is not run when no dependencies need to be installed', async () => { await npmInstall([]); - assert.notCalled(exec.default); + assert.notCalled(execa.shell); }); suite('devDependencies', () => { @@ -28,9 +28,8 @@ suite('npm install', () => { await npmInstall(devDependencies); assert.calledWith( - exec.default, - `. ~/.nvm/nvm.sh && nvm use && npm install ${devDependencies.join(' ')} --save-dev`, - {silent: false} + execa.shell, + `. ~/.nvm/nvm.sh && nvm use && npm install ${devDependencies.join(' ')} --save-dev` ); }); });
feat(execa): migrated the dependency installation to use execa for #<I>
travi_javascript-scaffolder
train
05f4fad6690ddb904271123eb9629f5e8b3b0c93
diff --git a/tests/Statistics/EffectSizeTest.php b/tests/Statistics/EffectSizeTest.php index <HASH>..<HASH> 100644 --- a/tests/Statistics/EffectSizeTest.php +++ b/tests/Statistics/EffectSizeTest.php @@ -10,7 +10,7 @@ class EffectSizeTest extends \PHPUnit_Framework_TestCase { $η² = EffectSize::etaSquared($SSB, $SST); - $this->assertEquals($expected, $η², 0.0000000001); + $this->assertEquals($expected, $η², '', 0.0000000001); } public function dataProviderForEtaSquared() @@ -25,6 +25,34 @@ class EffectSizeTest extends \PHPUnit_Framework_TestCase [26.196, 301.70, 0.08682797480941], [0.090, 301.70, 0.00029830957905], [271.2, 301.70, 0.89890619821014], + // Test data: http://www.uccs.edu/lbecker/glm_effectsize.html + [24, 610, 0.03934426229508], + [112, 610, 0.18360655737705], + [144, 610, 0.23606557377049], + ]; + } + + /** + * @dataProvider dataProviderForPartialEtaSquared + */ + public function testPartialEtaSquared($SSB, $SSE, $expected) + { + $η²p = EffectSize::partialEtaSquared($SSB, $SSE); + + $this->assertEquals($expected, $η²p, '', 0.000000001); + } + + public function dataProviderForPartialEtaSquared() + { + return [ + // Test data: http://jalt.org/test/bro_28.htm + [158.372, 3068.553, 0.049078302], + [0.344, 3003.548, 0.000114518], + [137.572, 3003.548, 0.043797116], + // Test data: http://www.uccs.edu/lbecker/glm_effectsize.html + [24, 330, 0.06779661016949], + [112, 330, 0.25339366515837], + [144, 330, 0.30379746835443], ]; } }
Add unit tests for partial eta-squared effect size.
markrogoyski_math-php
train
20c401dfb925c5b9c95510587dcbf27e561e3a07
diff --git a/afns/apps/markdown/templatetags/markdown.py b/afns/apps/markdown/templatetags/markdown.py index <HASH>..<HASH> 100644 --- a/afns/apps/markdown/templatetags/markdown.py +++ b/afns/apps/markdown/templatetags/markdown.py @@ -1,5 +1,3 @@ -import markdown - from django import template from django.conf import settings from django.utils.encoding import smart_str, force_unicode @@ -16,4 +14,4 @@ def markdown(value, arg=''): raise template.TemplateSyntaxError("Error in 'markdown' filter: The Python markdown library isn't installed.") return force_unicode(value) else: - return mark_safe(markdown.markdown(force_unicode(value), ['smartypants','onion'], safe_mode=False)) \ No newline at end of file + return mark_safe(markdown.markdown(force_unicode(value), ['smartypants','onion']), safe_mode=False) \ No newline at end of file
I trust that I'll have the right markdown lib installed
theonion_django-bulbs
train
623226bb1d75c83cebfa61cc3c8f652c7449bfd4
diff --git a/build.js b/build.js index <HASH>..<HASH> 100644 --- a/build.js +++ b/build.js @@ -167,7 +167,7 @@ const runBuildTest_e2e = async (template = "treemap") => { await bundle.write(outputOptions); }; -const runBuildTest_gh59 = async () => { +const runBuildTest_gh59 = async (template) => { const input = { index: "test/gh59/src/index", "components/index": "test/gh59/src/components/index", @@ -181,7 +181,7 @@ const runBuildTest_gh59 = async () => { require("./")({ title: "test gh59", filename: `stats.gh59${fileExt}`, - template: "treemap", + template, ...simpleOptions, }), ], @@ -198,7 +198,7 @@ const runBuildTest_gh59 = async () => { await bundle.write(outputOptions); }; -const runBuildTest_gh69 = async () => { +const runBuildTest_gh69 = async (template) => { const input = "test/gh69/main.js"; const inputOptions = { @@ -207,7 +207,7 @@ const runBuildTest_gh69 = async () => { require("./")({ title: "test gh69", filename: `stats.gh69${fileExt}`, - template: "treemap", + template, ...simpleOptions, }), ], @@ -224,17 +224,20 @@ const runBuildTest_gh69 = async () => { await bundle.write(outputOptions); }; +const buildAll = (action) => + Promise.all(templatesToBuild.map((t) => action(t))); + const run = async () => { await Promise.all(TEMPLATE.map((t) => runBuild(t))); if (argv.dev) { - await Promise.all(templatesToBuild.map((t) => runBuildDev(t))); + await buildAll(runBuildDev); } if (argv.e2e) { - await Promise.all(templatesToBuild.map((t) => runBuildTest_e2e(t))); + await buildAll(runBuildTest_e2e); } if (argv.test) { - await runBuildTest_gh59(); - await runBuildTest_gh69(); + await buildAll(runBuildTest_gh59); + await buildAll(runBuildTest_gh69); } }; diff --git a/plugin/index.js b/plugin/index.js index <HASH>..<HASH> 100644 --- a/plugin/index.js +++ b/plugin/index.js @@ -103,7 +103,6 @@ module.exports = function (opts) { tree = buildTree(id, Object.entries(modules), mapper); } else { const modules = Object.entries(bundle.modules); - if (modules.length === 0) continue; //TODO this is not exactly right tree = buildTree(id, modules, mapper); } diff --git a/src/script-sunburst.js b/src/script-sunburst.js index <HASH>..<HASH> 100644 --- a/src/script-sunburst.js +++ b/src/script-sunburst.js @@ -263,9 +263,9 @@ const Main = ({ width, height, data: { tree, nodes, options = {} } }) => { for (const prop of availableSizeProperties) { value[prop] = 0; } - const children = node.children; - if (children != null) { - let i = children.length; + if (node.data.children != null) { + const children = node.children; + let i = node.data.children.length; while (--i >= 0) { for (const prop of availableSizeProperties) { value[prop] += children[i].originalValue[prop]; diff --git a/src/script-treemap.js b/src/script-treemap.js index <HASH>..<HASH> 100644 --- a/src/script-treemap.js +++ b/src/script-treemap.js @@ -398,9 +398,13 @@ const Main = ({ for (const prop of availableSizeProperties) { value[prop] = 0; } - const children = node.children; - if (children != null) { - let i = children.length; + + // use node.data.children because if it is empty d3 will skip this node + // and it will look like it is actually a leaf - which technically it is but not exactly + // it is just a chunk without deps - usually just with imports + if (node.data.children != null) { + const children = node.children; + let i = node.data.children.length; while (--i >= 0) { for (const prop of availableSizeProperties) { value[prop] += children[i].originalValue[prop];
More accurate fix for #<I>
btd_rollup-plugin-visualizer
train
d87b5b7f19f774523c9048f07a255fbc9569ccba
diff --git a/core/src/main/java/org/infinispan/distribution/ch/AbstractConsistentHash.java b/core/src/main/java/org/infinispan/distribution/ch/AbstractConsistentHash.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/infinispan/distribution/ch/AbstractConsistentHash.java +++ b/core/src/main/java/org/infinispan/distribution/ch/AbstractConsistentHash.java @@ -91,7 +91,7 @@ public abstract class AbstractConsistentHash implements ConsistentHash { * @param key the key to get the grouping for * @return the group, or if no group is applicable, the key */ - protected Object getGrouping(Object key) { + protected Object getGrouping(final Object key) { String group = groupManager != null ? groupManager.getGroup(key) : null; return group != null ? group : key; } diff --git a/core/src/main/java/org/infinispan/distribution/ch/AbstractWheelConsistentHash.java b/core/src/main/java/org/infinispan/distribution/ch/AbstractWheelConsistentHash.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/infinispan/distribution/ch/AbstractWheelConsistentHash.java +++ b/core/src/main/java/org/infinispan/distribution/ch/AbstractWheelConsistentHash.java @@ -229,7 +229,7 @@ public abstract class AbstractWheelConsistentHash extends AbstractConsistentHash return hashIds; } - public int getNormalizedHash(Object key) { + public int getNormalizedHash(final Object key) { return Util.getNormalizedHash(key, hashFunction); } diff --git a/core/src/main/java/org/infinispan/distribution/ch/DefaultConsistentHash.java b/core/src/main/java/org/infinispan/distribution/ch/DefaultConsistentHash.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/infinispan/distribution/ch/DefaultConsistentHash.java +++ b/core/src/main/java/org/infinispan/distribution/ch/DefaultConsistentHash.java @@ -41,12 +41,12 @@ public class DefaultConsistentHash extends AbstractWheelConsistentHash { setHashFunction(hash); } - public List<Address> locate(Object key, int replCount) { + public List<Address> locate(final Object key, final int replCount) { return locateInternal(key, replCount, null); } @Override - public boolean isKeyLocalToAddress(Address target, Object key, int replCount) { + public boolean isKeyLocalToAddress(final Address target, final Object key, final int replCount) { return locateInternal(key, replCount, target) == null; } @@ -54,17 +54,16 @@ public class DefaultConsistentHash extends AbstractWheelConsistentHash { * Locate <code>replCount</code> owners for key <code>key</code> and return the list. * If one of the owners is identical to <code>target</code>, return <code>null</code> instead. */ - private List<Address> locateInternal(Object key, int replCount, Address target) { - int actualReplCount = Math.min(replCount, caches.size()); - int normalizedHash; - normalizedHash = getNormalizedHash(getGrouping(key)); - - List<Address> owners = new ArrayList<Address>(replCount); + private List<Address> locateInternal(final Object key,final int replCount, final Address target) { + final int actualReplCount = Math.min(replCount, caches.size()); + final int normalizedHash = getNormalizedHash(getGrouping(key)); + final List<Address> owners = new ArrayList<Address>(actualReplCount); + final boolean virtualNodesEnabled = isVirtualNodesEnabled(); for (Iterator<Address> it = getPositionsIterator(normalizedHash); it.hasNext();) { Address a = it.next(); // if virtual nodes are enabled we have to avoid duplicate addresses - if (!(isVirtualNodesEnabled() && owners.contains(a))) { + if (!(virtualNodesEnabled && owners.contains(a))) { if (target != null && target.equals(a)) return null; diff --git a/core/src/main/java/org/infinispan/statetransfer/DistributedStateTransferManagerImpl.java b/core/src/main/java/org/infinispan/statetransfer/DistributedStateTransferManagerImpl.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/infinispan/statetransfer/DistributedStateTransferManagerImpl.java +++ b/core/src/main/java/org/infinispan/statetransfer/DistributedStateTransferManagerImpl.java @@ -97,8 +97,8 @@ public class DistributedStateTransferManagerImpl extends BaseStateTransferManage } public boolean isLocationInDoubt(Object key) { - return isStateTransferInProgress() && !chOld.locate(key, configuration.getNumOwners()).contains(getAddress()) - && chNew.locate(key, configuration.getNumOwners()).contains(getAddress()); + return isStateTransferInProgress() && !chOld.isKeyLocalToAddress(getAddress(), key, configuration.getNumOwners()) + && chNew.isKeyLocalToAddress(getAddress(), key, configuration.getNumOwners()); } }
ISPN-<I> Improve performance of DefaultConsistentHash.locateInternal
infinispan_infinispan
train
e7a3142f1ab636caca7fd82c2785a79cda6a075e
diff --git a/salt/fileserver/gitfs.py b/salt/fileserver/gitfs.py index <HASH>..<HASH> 100644 --- a/salt/fileserver/gitfs.py +++ b/salt/fileserver/gitfs.py @@ -137,16 +137,21 @@ def _verify_gitpython(quiet=False): ''' Check if GitPython is available and at a compatible version (>= 0.3.0) ''' - if not HAS_GITPYTHON: - log.error( - 'Git fileserver backend is enabled in master config file, but ' - 'could not be loaded, is GitPython installed?' - ) - if HAS_PYGIT2 and not quiet: + def _recommend(): + if HAS_PYGIT2: log.error(_RECOMMEND_PYGIT2) - if HAS_DULWICH and not quiet: + if HAS_DULWICH: log.error(_RECOMMEND_DULWICH) + + if not HAS_GITPYTHON: + if not quiet: + log.error( + 'Git fileserver backend is enabled in master config file, but ' + 'could not be loaded, is GitPython installed?' + ) + _recommend() return False + gitver = distutils.version.LooseVersion(git.__version__) minver_str = '0.3.0' minver = distutils.version.LooseVersion(minver_str) @@ -162,15 +167,15 @@ def _verify_gitpython(quiet=False): 'The git command line utility is required by the Git fileserver ' 'backend when using the \'gitpython\' provider.' ) + if errors: - if HAS_PYGIT2 and not quiet: - errors.append(_RECOMMEND_PYGIT2) - if HAS_DULWICH and not quiet: - errors.append(_RECOMMEND_DULWICH) for error in errors: log.error(error) + if not quiet: + _recommend() return False - log.info('gitpython gitfs_provider enabled') + + log.debug('gitpython gitfs_provider enabled') __opts__['verified_gitfs_provider'] = 'gitpython' return True @@ -180,15 +185,19 @@ def _verify_pygit2(quiet=False): Check if pygit2/libgit2 are available and at a compatible version. Pygit2 must be at least 0.20.3 and libgit2 must be at least 0.20.0. ''' - if not HAS_PYGIT2: - log.error( - 'Git fileserver backend is enabled in master config file, but ' - 'could not be loaded, are pygit2 and libgit2 installed?' - ) - if HAS_GITPYTHON and not quiet: + def _recommend(): + if HAS_GITPYTHON: log.error(_RECOMMEND_GITPYTHON) - if HAS_DULWICH and not quiet: + if HAS_DULWICH: log.error(_RECOMMEND_DULWICH) + + if not HAS_PYGIT2: + if not quiet: + log.error( + 'Git fileserver backend is enabled in master config file, but ' + 'could not be loaded, are pygit2 and libgit2 installed?' + ) + _recommend() return False pygit2ver = distutils.version.LooseVersion(pygit2.__version__) @@ -217,15 +226,15 @@ def _verify_pygit2(quiet=False): 'The git command line utility is required by the Git fileserver ' 'backend when using the \'pygit2\' provider.' ) + if errors: - if HAS_GITPYTHON and not quiet: - errors.append(_RECOMMEND_GITPYTHON) - if HAS_DULWICH and not quiet: - errors.append(_RECOMMEND_DULWICH) for error in errors: log.error(error) + if not quiet: + _recommend() return False - log.info('pygit2 gitfs_provider enabled') + + log.debug('pygit2 gitfs_provider enabled') __opts__['verified_gitfs_provider'] = 'pygit2' return True @@ -234,15 +243,19 @@ def _verify_dulwich(quiet=False): ''' Check if dulwich is available. ''' - if not HAS_DULWICH: - log.error( - 'Git fileserver backend is enabled in the master config file, but ' - 'could not be loaded. Is Dulwich installed?' - ) - if HAS_GITPYTHON and not quiet: + def _recommend(): + if HAS_GITPYTHON: log.error(_RECOMMEND_GITPYTHON) - if HAS_PYGIT2 and not quiet: + if HAS_PYGIT2: log.error(_RECOMMEND_PYGIT2) + + if not HAS_DULWICH: + if not quiet: + log.error( + 'Git fileserver backend is enabled in the master config file, but ' + 'could not be loaded. Is Dulwich installed?' + ) + _recommend() return False dulwich_version = dulwich.__version__ @@ -257,16 +270,14 @@ def _verify_dulwich(quiet=False): 'detected.'.format(dulwich_min_version, dulwich_version) ) - if HAS_PYGIT2 and not quiet: - errors.append(_RECOMMEND_PYGIT2) - if HAS_GITPYTHON and not quiet: - errors.append(_RECOMMEND_GITPYTHON) - + if errors: for error in errors: log.error(error) + if not quiet: + _recommend() return False - log.info('dulwich gitfs_provider enabled') + log.debug('dulwich gitfs_provider enabled') __opts__['verified_gitfs_provider'] = 'dulwich' return True
Fix spurious error in master log When only GitPython is installed, and no gitfs_provider is set, an error about pygit2 not being available is logged. This should not happen, as gitfs should just select the first available out of pygit2, gitpython, and dulwich. This fixes #<I> by suppressing the error.
saltstack_salt
train
30d1824b14a5006834ff35a28c16741626614df9
diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -309,6 +309,18 @@ function describeTestsWithOptions(options, postText) { assert.equal(md.render(replaceDelimiters(src, options)), replaceDelimiters(expected, options)); }); + it(replaceDelimiters('should not apply inside item lists with trailing `code{.red}`', options), () => { + src = '- item with trailing `code = {.red}`'; + expected = '<ul>\n<li>item with trailing <code>code = {.red}</code></li>\n</ul>\n'; + assert.equal(md.render(replaceDelimiters(src, options)), replaceDelimiters(expected, options)); + }); + + it(replaceDelimiters('should not apply inside item lists with trailing non-text, eg *{.red}*', options), () => { + src = '- item with trailing *{.red}*'; + expected = '<ul>\n<li>item with trailing <em>{.red}</em></li>\n</ul>\n'; + assert.equal(md.render(replaceDelimiters(src, options)), replaceDelimiters(expected, options)); + }); + it(replaceDelimiters('should work with multiple inline code blocks in same paragraph', options), () => { src = 'bla `click()`{.c} blah `release()`{.cpp}'; expected = '<p>bla <code class="c">click()</code> blah <code class="cpp">release()</code></p>\n';
add tests for correct handling of trailing non-text 'attr-like' strings within items
arve0_markdown-it-attrs
train
54c9230b5a21ced92946192e90234fbbd3288613
diff --git a/mock.py b/mock.py index <HASH>..<HASH> 100644 --- a/mock.py +++ b/mock.py @@ -36,6 +36,11 @@ except ImportError: inPy3k = sys.version_info[0] == 3 +if inPy3k: + class_types = (type,) +else: + class_types = (type, types.ClassType) + # getsignature and mocksignature heavily "inspired" by # the decorator module: http://pypi.python.org/pypi/decorator/ @@ -299,8 +304,7 @@ class _patch(object): def __call__(self, func): - if isinstance(func, type) or (hasattr(types, "ClassType") and - isinstance(func, types.ClassType)): + if isinstance(func, class_types): return self.decorate_class(func) else: return self.decorate_callable(func)
Minor change to class decorator code.
testing-cabal_mock
train
5ad726e607d04f249e724a93361fd8c1d703e005
diff --git a/examples/1_basic/counter.js b/examples/1_basic/counter.js index <HASH>..<HASH> 100644 --- a/examples/1_basic/counter.js +++ b/examples/1_basic/counter.js @@ -1,6 +1,5 @@ import React from 'react' -import {on, TanokDispatcher} from '../../lib/tanok.js'; -import tanokComponent from '../../lib/component.js'; +import {on, TanokDispatcher, tanokComponent} from '../../lib/tanok.js'; /* Model diff --git a/examples/3_subcomponents/subcomponents.js b/examples/3_subcomponents/subcomponents.js index <HASH>..<HASH> 100644 --- a/examples/3_subcomponents/subcomponents.js +++ b/examples/3_subcomponents/subcomponents.js @@ -1,6 +1,5 @@ import React from 'react'; -import tanokComponent from '../../lib/component.js'; -import {on, TanokDispatcher, effectWrapper, subcomponentFx} from '../../lib/tanok.js'; +import {on, TanokDispatcher, effectWrapper, subcomponentFx, tanokComponent} from '../../lib/tanok.js'; import {init as counterInit, CounterDispatcher, Counter} from '../2_effects/counter-effects.js'; diff --git a/examples/4_subcomponent_collection/counter-collection.js b/examples/4_subcomponent_collection/counter-collection.js index <HASH>..<HASH> 100644 --- a/examples/4_subcomponent_collection/counter-collection.js +++ b/examples/4_subcomponent_collection/counter-collection.js @@ -1,6 +1,5 @@ import React from 'react' -import tanokComponent from '../../lib/component.js'; -import {on, TanokDispatcher} from '../../lib/tanok.js'; +import {on, TanokDispatcher, tanokComponent} from '../../lib/tanok.js'; /* Model diff --git a/examples/4_subcomponent_collection/subcomponents.js b/examples/4_subcomponent_collection/subcomponents.js index <HASH>..<HASH> 100644 --- a/examples/4_subcomponent_collection/subcomponents.js +++ b/examples/4_subcomponent_collection/subcomponents.js @@ -1,6 +1,5 @@ import React from 'react'; -import tanokComponent from '../../lib/component.js'; -import {on, TanokDispatcher, effectWrapper} from '../../lib/tanok.js'; +import {on, TanokDispatcher, effectWrapper, tanokComponent} from '../../lib/tanok.js'; import {init as counterInit, CounterDispatcher, Counter} from './counter-collection.js'; diff --git a/src/component.js b/src/component.js index <HASH>..<HASH> 100644 --- a/src/component.js +++ b/src/component.js @@ -14,7 +14,7 @@ import { StreamWrapper } from './streamWrapper.js'; * } * * */ -export default function tanokComponent(target) { +export function tanokComponent(target) { target.propTypes = target.propTypes || {}; target.propTypes.eventStream = React.PropTypes.instanceOf(StreamWrapper);
Export tanokComponent by name, not as default
brabadu_tanok
train
5bf8e70f26309b0a5ceda49ce0a7e97e89eee764
diff --git a/jwt/__init__.py b/jwt/__init__.py index <HASH>..<HASH> 100644 --- a/jwt/__init__.py +++ b/jwt/__init__.py @@ -23,7 +23,7 @@ from .exceptions import ( ) from .jwks_client import PyJWKClient -__version__ = "2.0.0a1" +__version__ = "2.0.0a2" __title__ = "PyJWT" __description__ = "JSON Web Token implementation in Python"
Bump up version to <I>a2
jpadilla_pyjwt
train
70d4b5c8d52d9a5615e5d0f5c7f147e15573c566
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ setup( platforms=['OS Independent'], keywords='django, app, reusable, review, rating, voting', author='Daniel Kaufhold', - author_email='daniel.kaufhold.com', + author_email='[email protected]', url="https://github.com/bitmazk/django-review", packages=find_packages(), include_package_data=True,
Fixes author email in setup.py
bitlabstudio_django-review
train
6d574be07a26120ef38b45d027c138eb1f108205
diff --git a/platform/core/registry.js b/platform/core/registry.js index <HASH>..<HASH> 100644 --- a/platform/core/registry.js +++ b/platform/core/registry.js @@ -147,12 +147,13 @@ class Registry extends Subscribable { get registrants() { let all = Object.assign({}, this._paths); + for (let alias of Object.keys(this._aliases)) { let resolved = this.resolve(alias); if (resolved in this._paths) { all[alias] = {}; - for(let method in this._paths) { + for(let method in this._paths[resolved]) { let _obj = Object.assign({}, this._paths[resolved][method]); _obj.signature = Object.assign({}, _obj.signature, { path: alias, @@ -162,7 +163,7 @@ class Registry extends Subscribable { } } } - + return all; } }
Fixed an issue where aliases were wrongfully generated as part of the registrants
CONNECT-platform_connect-platform
train
3cfbe6aff8a90420249d00f634af2d6ef66cb86d
diff --git a/test/e2e/federation-deployment.go b/test/e2e/federation-deployment.go index <HASH>..<HASH> 100644 --- a/test/e2e/federation-deployment.go +++ b/test/e2e/federation-deployment.go @@ -179,7 +179,7 @@ func verifyCascadingDeletionForDeployment(clientset *fedclientset.Clientset, clu _, err := clusterClientset.Extensions().Deployments(nsName).Get(deploymentName) if shouldExist && errors.IsNotFound(err) { errMessages = append(errMessages, fmt.Sprintf("unexpected NotFound error for deployment %s in cluster %s, expected deployment to exist", deploymentName, clusterName)) - } else if shouldExist && !errors.IsNotFound(err) { + } else if !shouldExist && !errors.IsNotFound(err) { errMessages = append(errMessages, fmt.Sprintf("expected NotFound error for deployment %s in cluster %s, got error: %v", deploymentName, clusterName, err)) } }
Fixing a typo in deployment e2e
kubernetes_kubernetes
train
98db201174773c8af61458f225299db07f8551e1
diff --git a/versions_tests/tests/test_models.py b/versions_tests/tests/test_models.py index <HASH>..<HASH> 100644 --- a/versions_tests/tests/test_models.py +++ b/versions_tests/tests/test_models.py @@ -19,6 +19,7 @@ from unittest import skip, skipUnless import re import uuid +from django import get_version from django.core.exceptions import SuspiciousOperation, ObjectDoesNotExist, ValidationError from django.db import connection, IntegrityError, transaction from django.db.models import Q, Count, Sum @@ -1996,7 +1997,7 @@ class SpecifiedUUIDTest(TestCase): # Postgresql will provide protection here, since util.postgresql.create_current_version_unique_identity_indexes # has been invoked in the post migration handler. - if connection.vendor == 'postgresql': + if connection.vendor == 'postgresql' and get_version() >= '1.7': with self.assertRaises(IntegrityError): with transaction.atomic(): Person.objects.create(forced_identity=p.identity, name="Alexis")
ref: #<I> only execute post-migration dependent assert for postgresql and django <I>+
swisscom_cleanerversion
train
cfe7f0da2d1f203ae855f375a8b6a4ed5a210d18
diff --git a/src/de/unihd/dbs/uima/annotator/intervaltagger/IntervalTagger.java b/src/de/unihd/dbs/uima/annotator/intervaltagger/IntervalTagger.java index <HASH>..<HASH> 100644 --- a/src/de/unihd/dbs/uima/annotator/intervaltagger/IntervalTagger.java +++ b/src/de/unihd/dbs/uima/annotator/intervaltagger/IntervalTagger.java @@ -416,7 +416,7 @@ public class IntervalTagger extends JCasAnnotator_ImplBase { Timex3 timex3 = (Timex3) iterTimex3.next(); //DATE Pattern - Pattern pDate = Pattern.compile("(?:BC)?(\\d+)(-(\\d+))?(-(\\d+))?(T(\\d+))?(:(\\d+))?(:(\\d+))?"); + Pattern pDate = Pattern.compile("(?:BC)?(\\d\\d\\d\\d)(-(\\d+))?(-(\\d+))?(T(\\d+))?(:(\\d+))?(:(\\d+))?"); Pattern pCentury = Pattern.compile("(\\d\\d)"); Pattern pDecate = Pattern.compile("(\\d\\d\\d)"); Pattern pQuarter = Pattern.compile("(\\d+)-Q([1-4])"); @@ -578,11 +578,16 @@ public class IntervalTagger extends JCasAnnotator_ImplBase { beginDay=endDay=mTimeOfDay.group(3); } if(!beginYear.equals("UNDEF") && !endYear.equals("UNDEF")){ +// annotation.setTimexValueEB(beginYear+"-"+beginMonth+"-"+beginDay+"T"+beginHour+":"+beginMinute+":"+beginSecond); +// annotation.setTimexValueEE(endYear+"-"+endMonth+"-"+endDay+"T"+endHour+":"+endMinute+":"+endSecond); +// annotation.setTimexValueLB(beginYear+"-"+beginMonth+"-"+beginDay+"T"+beginHour+":"+beginMinute+":"+beginSecond); +// annotation.setTimexValueLE(endYear+"-"+endMonth+"-"+endDay+"T"+endHour+":"+endMinute+":"+endSecond); + annotation.setTimexValueEB(beginYear+"-"+beginMonth+"-"+beginDay+"T"+beginHour+":"+beginMinute+":"+beginSecond); - annotation.setTimexValueEE(endYear+"-"+endMonth+"-"+endDay+"T"+endHour+":"+endMinute+":"+endSecond); - annotation.setTimexValueLB(beginYear+"-"+beginMonth+"-"+beginDay+"T"+beginHour+":"+beginMinute+":"+beginSecond); + annotation.setTimexValueLB(endYear+"-"+endMonth+"-"+endDay+"T"+endHour+":"+endMinute+":"+endSecond); + annotation.setTimexValueEE(beginYear+"-"+beginMonth+"-"+beginDay+"T"+beginHour+":"+beginMinute+":"+beginSecond); annotation.setTimexValueLE(endYear+"-"+endMonth+"-"+endDay+"T"+endHour+":"+endMinute+":"+endSecond); - + //Copy Values from the Timex3 Annotation annotation.setTimexFreq(timex3.getTimexFreq()); annotation.setTimexId(timex3.getTimexId());
Bug fixes in the interval tagger (decades and centuries; latest begin and earliest end were swapped)
HeidelTime_heideltime
train
c787650ec2853aa86c88aafd6e2dc131808b7819
diff --git a/examples/simple.js b/examples/simple.js index <HASH>..<HASH> 100644 --- a/examples/simple.js +++ b/examples/simple.js @@ -33,14 +33,14 @@ function e2() { } function e3() { - throw new UnexpectedAPIResponse('/api/test'); + throw (new UnexpectedAPIResponse('/api/test')).addSeverity(bugsy.SYSLOG_NOTICE).addMeta({ acceptLanguage: 'fr-CH' }); } function handle(fn) { try { fn(); } catch (err) { - console.log(bugsy.toString(err)); + console.log(bugsy.toString(err), err.meta); } } diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -46,6 +46,7 @@ export function toString(err: { export class ExtendableError extends Error { code: string; severity: Severity; + meta: any; constructor(code: string, message: string, { severity = SYSLOG_ERROR }: { severity?: Severity; } = {}) { super(); @@ -53,6 +54,7 @@ export class ExtendableError extends Error { this.name = `${transform(code)}Error`; this.code = code; this.severity = severity; + this.meta = {}; const error = new Error(message); error.name = this.name; const stack = error.stack.split('\n'); @@ -65,6 +67,14 @@ export class ExtendableError extends Error { return this; } + addMeta(value: any): this { + this.meta = { + ...this.meta, + ...value + }; + return this; + } + // eslint-disable-next-line class-methods-use-this marshal(): any { return {};
Add ability to attach metadata to an error
njakob_bugsy
train
636539eb9452c415bbd53094186ee45d56473422
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ author = 'Jose Padilla' author_email = '[email protected]' license = 'MIT' install_requires = [ - 'PyJWT>=1.5.1,<2.0.0', + 'PyJWT>=1.5.2,<2.0.0', ]
Bump up PyJWT to <I>
GetBlimp_django-rest-framework-jwt
train
603027b0572d2b8199133eb03eb19e25e715fd49
diff --git a/tests/test_orbit.py b/tests/test_orbit.py index <HASH>..<HASH> 100644 --- a/tests/test_orbit.py +++ b/tests/test_orbit.py @@ -1708,7 +1708,7 @@ def test_fixedstepsize(): # Somewhat long time times= numpy.linspace(0.,100.,30001) # Test the following multiples - mults= [10.] + mults= [1.,10.] # Just do this for LogarithmicHaloPotential pot= LogarithmicHaloPotential(normalize=1.) planarpot= pot.toPlanar() @@ -1727,11 +1727,13 @@ def test_fixedstepsize(): runtimes= numpy.empty(len(mults)) for ii,mult in enumerate(mults): start= time.time() - o.integrate(times,pot,dt=(times[1]-times[0])/mult) + o.integrate(times,pot,dt=(times[1]-times[0])/mult, + method=integrator) runtimes[ii]= time.time()-start for ii,mult in enumerate(mults): if ii == 0: continue - assert numpy.fabs(runtimes[ii]/runtimes[0]/mults[ii]*mults[0]-1.) < 0.4, 'Runtime of integration with fixed stepsize for integrator %s, type or orbit %s, stepsize reduction %i is not %i times less (residual is %g, times %g and %g)' % (integrator,type,mults[ii],mults[ii], + # Pretty loose test, because hard to get exactly right with overhead + assert numpy.fabs(runtimes[ii]/runtimes[0]/mults[ii]*mults[0]-1.) < 0.7, 'Runtime of integration with fixed stepsize for integrator %s, type or orbit %s, stepsize reduction %i is not %i times less (residual is %g, times %g and %g)' % (integrator,type,mults[ii],mults[ii], numpy.fabs(runtimes[ii]/runtimes[0]/mults[ii]*mults[0]-1.),mults[ii]/mults[0],runtimes[ii]/runtimes[0]) return None
Fix stepsize test for orbit integration (which didn't work at all before)
jobovy_galpy
train
bebc6f5e379b2d655b4e834f6566af22bf23fb2c
diff --git a/src/org/opencms/search/galleries/CmsGallerySearchResult.java b/src/org/opencms/search/galleries/CmsGallerySearchResult.java index <HASH>..<HASH> 100644 --- a/src/org/opencms/search/galleries/CmsGallerySearchResult.java +++ b/src/org/opencms/search/galleries/CmsGallerySearchResult.java @@ -195,7 +195,7 @@ public class CmsGallerySearchResult implements Comparable<CmsGallerySearchResult m_title = doc.getFieldValueAsString(effFieldName); if (CmsStringUtil.isEmptyOrWhitespaceOnly(m_title)) { m_title = doc.getFieldValueAsString( - CmsPropertyDefinition.PROPERTY_TITLE + CmsSearchField.FIELD_DYNAMIC_PROPERTIES); + CmsPropertyDefinition.PROPERTY_TITLE + CmsSearchField.FIELD_DYNAMIC_PROPERTIES_DIRECT); } effFieldName = CmsSearchFieldConfiguration.getLocaleExtendedName( @@ -204,7 +204,7 @@ public class CmsGallerySearchResult implements Comparable<CmsGallerySearchResult m_description = doc.getFieldValueAsString(effFieldName); if (CmsStringUtil.isEmptyOrWhitespaceOnly(m_description)) { m_description = doc.getFieldValueAsString( - CmsPropertyDefinition.PROPERTY_DESCRIPTION + CmsSearchField.FIELD_DYNAMIC_PROPERTIES); + CmsPropertyDefinition.PROPERTY_DESCRIPTION + CmsSearchField.FIELD_DYNAMIC_PROPERTIES_DIRECT); } m_resourceType = doc.getFieldValueAsString(CmsSearchField.FIELD_TYPE);
Fix CmsGallerySearch: Use only title/description directly at the resource, not searched ones.
alkacon_opencms-core
train
19b14172cd2712302d6969f9f4302656cad3f65e
diff --git a/src/com/google/javascript/jscomp/TypeCheck.java b/src/com/google/javascript/jscomp/TypeCheck.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/TypeCheck.java +++ b/src/com/google/javascript/jscomp/TypeCheck.java @@ -881,6 +881,30 @@ public final class TypeCheck implements NodeTraversal.Callback, CompilerPass { validator.expectIterable( t, n.getSecondChild(), iterable, "Can only iterate over a (non-null) Iterable type"); typeable = false; + + // Check the declared type of the loop variable. Note that TypeInference does not set the + // loop variable name node's JSType. The node will only have a non-null JSType if one was + // actually declared and set in TypedScopeCreator. + + // Get the name node for the loop variable, e.g. "loopVar" in + // for (let /** string */ loopVar in obj) { + Node loopVarNode = + NodeUtil.isNameDeclaration(n.getFirstChild()) + ? n.getFirstFirstChild() + : n.getFirstChild(); + JSType declaredType = loopVarNode.getJSType(); + if (declaredType != null) { + JSType actualType = + iterable + .getTemplateTypeMap() + .getResolvedTemplateType(typeRegistry.getIterableTemplate()); + validator.expectCanAssignTo( + t, + loopVarNode, + declaredType, + actualType, + "declared type of for-of loop variable does not match inferred type"); + } break; // These nodes are typed during the type inference. diff --git a/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java b/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java index <HASH>..<HASH> 100644 --- a/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java +++ b/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java @@ -300,16 +300,43 @@ public final class TypeCheckNoTranspileTest extends CompilerTypeTestCase { "required: string")); } - public void testForOf_wrongLoopVarType() { - // TODO(b/77905791): this should generate an error + public void testForOf_wrongLoopVarType1() { testTypes( lines( "/** @type {!Array<number>} */", "var numArray = [1, 2];", "/** @type {string} */", "var elem = '';", - "for (elem of numArray) {}", "") - ); + "for (elem of numArray) {", + "}"), + lines( + "declared type of for-of loop variable does not match inferred type", + "found : string", + "required: number")); + } + + public void testForOf_wrongLoopVarType2() { + testTypes( + lines( + "/** @type {!Array<number>} */", + "var numArray = [1, 2];", + "for (let /** string */ elem of numArray) {", + "}"), + lines( + "declared type of for-of loop variable does not match inferred type", + "found : string", + "required: number")); + } + + public void testForOf_wrongLoopVarType3() { + // If the thing we're trying to iterate over is not actually an Iterable, we treat the inferred + // type of the for-of loop variable as unknown and only warn for the non-Iterable item. + testTypes( + "for (var /** number */ x of 3) {}", + lines( + "Can only iterate over a (non-null) Iterable type", + "found : Number", + "required: Iterable")); } public void testForOf_array1() {
Warn for type mismatches on a for-of loop variable during native ES6 typechecking This check depends on TypeInference not rewriting the type of a for-of loop variable in the loop initialization clause, so that we have the variable's initially declared type if it was declared. ------------- Created by MOE: <URL>
google_closure-compiler
train
47233c33251fd8f1536465beb358d6af01914299
diff --git a/googleanalytics/query.py b/googleanalytics/query.py index <HASH>..<HASH> 100644 --- a/googleanalytics/query.py +++ b/googleanalytics/query.py @@ -1,5 +1,6 @@ from copy import deepcopy import collections +import time import addressable import inspector import utils @@ -113,6 +114,15 @@ class Query(object): self._report = None self._specify(metrics=metrics, dimensions=dimensions) + _lock = 0 + + # no not execute more than one query per second + def _wait(self): + now = time.time() + elapsed = now - self._lock + time.sleep(max(0, 1 - elapsed)) + self._lock = now + def _serialize_criterion(criterion): pattern = r'(?P<identifier>[\w:]+)((?P<operator>[\!\=\>\<\@\~]+)(?P<value>[\w:]+))?' match = re.match(pattern, criterion) @@ -600,6 +610,7 @@ class CoreQuery(Query): raw['dimensions'] = ','.join(self.raw['dimensions']) service = self.account.service + self._wait() try: response = service.data().ga().get(**raw).execute() diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup(name='googleanalytics', author_email='[email protected]', url='https://github.com/debrouwere/google-analytics/', download_url='http://www.github.com/debrouwere/google-analytics/tarball/master', - version='0.10.1', + version='0.10.2', license='ISC', packages=find_packages(), keywords='data analytics api wrapper google',
Add query lock, don't execute more than one query per second.
debrouwere_google-analytics
train
f7127e55225840681224dc55eecb144d12369352
diff --git a/spring-boot-tools/spring-boot-loader/src/main/java/org/springframework/boot/loader/jar/JarFile.java b/spring-boot-tools/spring-boot-loader/src/main/java/org/springframework/boot/loader/jar/JarFile.java index <HASH>..<HASH> 100644 --- a/spring-boot-tools/spring-boot-loader/src/main/java/org/springframework/boot/loader/jar/JarFile.java +++ b/spring-boot-tools/spring-boot-loader/src/main/java/org/springframework/boot/loader/jar/JarFile.java @@ -298,6 +298,7 @@ public class JarFile extends java.util.jar.JarFile { @Override public void close() throws IOException { + super.close(); this.rootFile.close(); } diff --git a/spring-boot-tools/spring-boot-loader/src/test/java/org/springframework/boot/loader/jar/JarFileTests.java b/spring-boot-tools/spring-boot-loader/src/test/java/org/springframework/boot/loader/jar/JarFileTests.java index <HASH>..<HASH> 100644 --- a/spring-boot-tools/spring-boot-loader/src/test/java/org/springframework/boot/loader/jar/JarFileTests.java +++ b/spring-boot-tools/spring-boot-loader/src/test/java/org/springframework/boot/loader/jar/JarFileTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2012-2016 the original author or authors. + * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -469,4 +469,13 @@ public class JarFileTests { } } + @Test + public void jarFileCanBeDeletedOnceItHasBeenClosed() throws Exception { + File temp = this.temporaryFolder.newFile(); + TestJarCreator.createTestJar(temp); + JarFile jf = new JarFile(temp); + jf.close(); + assertThat(temp.delete()).isTrue(); + } + }
Ensure that closing a JarFile closes all underlying resources Closes gh-<I>
spring-projects_spring-boot
train
646343aaabc94398f4ddf224e8a1c1e0e14d4ee6
diff --git a/osmnx/__init__.py b/osmnx/__init__.py index <HASH>..<HASH> 100644 --- a/osmnx/__init__.py +++ b/osmnx/__init__.py @@ -7,6 +7,7 @@ ################################################################################ from .buildings import * +from .pois import * from .elevation import * from .core import * from .plot import * diff --git a/osmnx/pois.py b/osmnx/pois.py index <HASH>..<HASH> 100644 --- a/osmnx/pois.py +++ b/osmnx/pois.py @@ -251,4 +251,4 @@ def pois_from_place(place, amenities, retain_invalid=False): city = gdf_from_place(place) polygon = city['geometry'].iloc[0] - return create_poi_gdf(polygon, amenities, retain_invalid=retain_invalid) \ No newline at end of file + return create_poi_gdf(polygon, amenities, retain_invalid=retain_invalid)
Added poi-functions to init.
gboeing_osmnx
train
b5f141d47ff78e787632ddcfbfe51a86c07e614b
diff --git a/src/lib/peer.js b/src/lib/peer.js index <HASH>..<HASH> 100644 --- a/src/lib/peer.js +++ b/src/lib/peer.js @@ -20,12 +20,13 @@ export default class Peer { /** * @protected * @param {Object} channel - Communication channel - * @param {Object} api - Usage API + * @param {Object} apiQuery - Usage API * @param {Object} query - Query executed + * @param {Boolean=} [needCursor] - Create a cursor * @returns {Object} Cursor * @description Creates a cursor and processes the collected request packet. */ - exec(channel, api, query) {} + exec(channel, apiQuery, query, needCursor = false) {} /** * @protected
Updating the skeleton peer
AncientSouls_Peer
train
e987b08c78afd9b7d5232b23fc38f44b99cc2056
diff --git a/unconvert_test.go b/unconvert_test.go index <HASH>..<HASH> 100644 --- a/unconvert_test.go +++ b/unconvert_test.go @@ -39,11 +39,11 @@ func TestBinary(t *testing.T) { SortAnnotations(expected) need := map[Annotation]struct{}{} - for _, annotation := range got { + for _, annotation := range expected { need[annotation] = struct{}{} } - for _, annotation := range expected { + for _, annotation := range got { _, ok := need[annotation] if ok { delete(need, annotation) @@ -52,7 +52,7 @@ func TestBinary(t *testing.T) { } } - for _, annotation := range got { + for _, annotation := range expected { _, ok := need[annotation] if ok { t.Errorf("missing: %v", annotation)
unconvert: fix TestBinary diagnostics As pointed out by @egonelbre.
mdempsky_unconvert
train
1204fb6987610838c97cbad84c8c843a7f950978
diff --git a/src/ItemManager.js b/src/ItemManager.js index <HASH>..<HASH> 100644 --- a/src/ItemManager.js +++ b/src/ItemManager.js @@ -1,5 +1,4 @@ import {MULTI, GROUPKEY_ATT, IGNORE_CLASSNAME} from "./consts"; -import DOMRenderer from "./DOMRenderer"; import {$, toArray} from "./utils"; export default class ItemManager { @@ -191,4 +190,7 @@ export default class ItemManager { this._data = data.concat(); } } + getData(index) { + return this._data[index]; + } }
fix(ItemManager): disconnect link with renderer
naver_egjs-infinitegrid
train
cbd4d3e5d306a5ffe4ba48318305602ab9ff9d77
diff --git a/dateparser/conf.py b/dateparser/conf.py index <HASH>..<HASH> 100644 --- a/dateparser/conf.py +++ b/dateparser/conf.py @@ -56,14 +56,17 @@ settings = Settings() def apply_settings(f): @wraps(f) def wrapper(*args, **kwargs): - if 'settings' in kwargs: - if isinstance(kwargs['settings'], dict): - kwargs['settings'] = settings.replace(**kwargs['settings']) - elif isinstance(kwargs['settings'], Settings): - kwargs['settings'] = kwargs['settings'] - else: - raise TypeError("settings can only be either dict or instance of Settings class") - else: + kwargs['settings'] = kwargs.get('settings', settings) + + if kwargs['settings'] is None: kwargs['settings'] = settings + + if isinstance(kwargs['settings'], dict): + kwargs['settings'] = settings.replace(**kwargs['settings']) + + if not isinstance(kwargs['settings'], Settings): + raise TypeError( + "settings can only be either dict or instance of Settings class") + return f(*args, **kwargs) return wrapper
Re-wrote conditions for apply_settings
scrapinghub_dateparser
train
ffcea153bf5a0adcbc4edb300fb93d857f5c9113
diff --git a/mpop/instruments/visir.py b/mpop/instruments/visir.py index <HASH>..<HASH> 100644 --- a/mpop/instruments/visir.py +++ b/mpop/instruments/visir.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2010, 2011. +# Copyright (c) 2010, 2011, 2012. # Author(s): @@ -31,6 +31,51 @@ from mpop.compositer import Compositer class VisirCompositer(Compositer): + def __call__(self, *channels, **keys): + """Build a geoimage. + e.g.: + img = l.image(0.6, 0.8, -10.8, mode="RGB") + """ + + data = [] + area = None + inv = [] + new_channels = [] + + for channel in channels: + if isinstance(channel, str): + if channel.startswith("-"): + inv.append(True) + channel = channel[1:] + else: + inv.append(False) + else: + if channel < 0: + inv.append(True) + channel = -channel + else: + inv.append(False) + + new_channels.append(channel) + + data.append(self[channel].data) + + new_area = self[channel].area + if area and (new_area != area): + raise ValueError("Channels should have the same area") + else: + area = new_area + + self.check_channels(*new_channels) + + img = geo_image.GeoImage(data, + area=area, + time_slot=self.time_slot, + fill_value=keys.get("fill_value", None), + mode=keys.get("mode", None)) + img.invert(inv) + return img + def channel_image(self, channel, fill_value=0): """Make a black and white image of the *channel*. """
Feature: new function to create an image from a scene.
pytroll_satpy
train
97118af4cb2131896bd854c18e6d2a3f60d94c22
diff --git a/app/models/asset.rb b/app/models/asset.rb index <HASH>..<HASH> 100644 --- a/app/models/asset.rb +++ b/app/models/asset.rb @@ -4,6 +4,20 @@ class Asset < ActiveRecord::Base has_attached_file :attachment, :styles => { :medium => "300x300>", :thumb => "100x100>" } validates_attachment :attachment, :presence => true, - :content_type => { :content_type => "image/jpg", :content_type => "image/png" }, + :content_type => { + :content_type => "image/jpeg", + :content_type => "image/png", + :content_type => "application/pdf", + :content_type => "application/vnd.ms-excel", + :content_type => "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + :content_type => "application/msword", + :content_type => "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + :content_type => "text/plain", + :content_type => "image/gif", + :content_type => "application/zip", + :content_type => "video/x-msvideo", + :content_type => "video/quicktime", + :content_type => "video/mp4" + }, :size => { :in => 0..10.megabytes } end
Add MIME type validation as defined in Upload Asset story acceptance criteria
cortex-cms_cortex
train
605d358c3525c7e9a5b029835bd52ef974987962
diff --git a/mod/folder/lib.php b/mod/folder/lib.php index <HASH>..<HASH> 100644 --- a/mod/folder/lib.php +++ b/mod/folder/lib.php @@ -244,6 +244,9 @@ function folder_get_file_info($browser, $areas, $course, $cm, $context, $fileare if ($filearea === 'content') { + if (!has_capability('mod/folder:view', $context)) { + return NULL; + } $fs = get_file_storage(); $filepath = is_null($filepath) ? '/' : $filepath;
MDL-<I> fixed file_browser access control
moodle_moodle
train
ed76174af13fc73c1d2df8dc1b0130268cf0027c
diff --git a/lib/resqorn/listener.rb b/lib/resqorn/listener.rb index <HASH>..<HASH> 100644 --- a/lib/resqorn/listener.rb +++ b/lib/resqorn/listener.rb @@ -22,12 +22,13 @@ module Resqorn @config ||= Config.load_file(@config_path) end - SIGNALS = [ :QUIT, :CHLD ] + SIGNALS = [ :QUIT ] SIGNAL_QUEUE = [] # Public: Run the main loop. def run + trap(:CHLD) { awake } SIGNALS.each { |signal| trap(signal) { SIGNAL_QUEUE << signal ; awake } } write_procline('running') @@ -51,6 +52,7 @@ module Resqorn yawn(60.0) when :QUIT workers.each { |worker| worker.kill(signal) } + return end end end @@ -112,7 +114,7 @@ module Resqorn end end @running_workers.each do |running_worker| - if blocked_worker = workers.detect { |worker| worker.idle? && worker.queue_key == running_worker[:queue]) } + if blocked_worker = workers.detect { |worker| worker.idle? && worker.queue_key == running_worker[:queue] } blocked_worker.wait_for(running_worker[:pid].to_i) end end diff --git a/lib/resqorn/master.rb b/lib/resqorn/master.rb index <HASH>..<HASH> 100644 --- a/lib/resqorn/master.rb +++ b/lib/resqorn/master.rb @@ -124,11 +124,12 @@ module Resqorn end while true end - SIGNALS = [ :HUP, :INT, :TERM, :QUIT, :CHLD ] + SIGNALS = [ :HUP, :INT, :TERM, :QUIT ] SIGNAL_QUEUE = [] def install_signal_handlers + trap(:CHLD) { awake } SIGNALS.each { |signal| trap(signal) { SIGNAL_QUEUE << signal ; awake } } end diff --git a/lib/resqorn/worker.rb b/lib/resqorn/worker.rb index <HASH>..<HASH> 100644 --- a/lib/resqorn/worker.rb +++ b/lib/resqorn/worker.rb @@ -1,3 +1,5 @@ +require 'resque' + module Resqorn # Models a worker process. class Worker @@ -37,13 +39,22 @@ module Resqorn def try_start @self_started = true @pid = fork do - # todo! start resque worker! + $0 = "STARTING RESQUE FOR #{queues.join(',')}" + resque_worker = Resque::Worker.new(*queues) + resque_worker.term_child = true + resque_worker.term_timeout = 999999999 + resque_worker.log "Starting worker #{resque_worker}" + resque_worker.work(5) end end # Public: Shut this worker down. def kill(signal) - Process.kill(signal.to_s, pid) if pid && @self_started + signal = signal.to_s + # Use the new resque worker signals. + signal = 'INT' if signal == 'TERM' + signal = 'TERM' if signal == 'QUIT' + Process.kill(signal, pid) if pid && @self_started end end end
Start a resque worker.
spraints_resqued
train
159f9785406d9ac081f379602657c2de6b15d582
diff --git a/lib/fsr/listener/outbound.rb b/lib/fsr/listener/outbound.rb index <HASH>..<HASH> 100644 --- a/lib/fsr/listener/outbound.rb +++ b/lib/fsr/listener/outbound.rb @@ -53,6 +53,11 @@ module FSR send_data("api uuid_dump #{@session.headers[:unique_id]}\n\n") end + def next_step + @step += 1 + receive_reply(@session) + end + protected def post_init @session = nil # holds the session object
added #next_step to manually increment state machine
vangberg_librevox
train
107033d5d79b40897df73f3d4d5063053b93035c
diff --git a/api/datastore/bolt/bolt.go b/api/datastore/bolt/bolt.go index <HASH>..<HASH> 100644 --- a/api/datastore/bolt/bolt.go +++ b/api/datastore/bolt/bolt.go @@ -518,7 +518,7 @@ func (ds *BoltDatastore) Get(ctx context.Context, key []byte) ([]byte, error) { } func applyAppFilter(app *models.App, filter *models.AppFilter) bool { - if filter.Name != "" { + if filter != nil && filter.Name != "" { nameLike, err := regexp.MatchString(strings.Replace(filter.Name, "%", ".*", -1), app.Name) return err == nil && nameLike } @@ -527,7 +527,7 @@ func applyAppFilter(app *models.App, filter *models.AppFilter) bool { } func applyRouteFilter(route *models.Route, filter *models.RouteFilter) bool { - return (filter.Path == "" || route.Path == filter.Path) && + return filter == nil || (filter.Path == "" || route.Path == filter.Path) && (filter.AppName == "" || route.AppName == filter.AppName) && (filter.Image == "" || route.Image == filter.Image) }
Fix: handle nil filters (#<I>) fix bug when filter is nil
iron-io_functions
train
5009f84c8d05e3a5e2f3ce6e87638fcfe3ab9501
diff --git a/lib/celluloid/io/tcp_socket.rb b/lib/celluloid/io/tcp_socket.rb index <HASH>..<HASH> 100644 --- a/lib/celluloid/io/tcp_socket.rb +++ b/lib/celluloid/io/tcp_socket.rb @@ -14,16 +14,13 @@ module Celluloid # automatically when done (if a block is given) def self.open(*args, &block) sock = new(*args) + return sock unless block_given? - if block_given? - begin - return yield(sock) - ensure - sock.close - end + begin + yield(sock) + ensure + sock.close end - - sock end # Convert a Ruby TCPSocket into a Celluloid::IO::TCPSocket
Tighten up Celluloid::IO::TCPSocket.open
celluloid_celluloid-io
train
c01e1b9b3de51a6b88b35647ce2cb7b4543f84d1
diff --git a/src/Flare/Flare.php b/src/Flare/Flare.php index <HASH>..<HASH> 100644 --- a/src/Flare/Flare.php +++ b/src/Flare/Flare.php @@ -11,7 +11,7 @@ class Flare * * @var string */ - const VERSION = '0.2.x-dev'; + const VERSION = '0.3.x-dev'; /** * Array of expected configuration keys
Add version to <I> dev master
laravelflare_flare
train
2b3becf66691de2f9f3bc1cb7557223f7bf2d37b
diff --git a/web/src/main/java/org/springframework/security/web/util/matcher/RequestHeaderRequestMatcher.java b/web/src/main/java/org/springframework/security/web/util/matcher/RequestHeaderRequestMatcher.java index <HASH>..<HASH> 100644 --- a/web/src/main/java/org/springframework/security/web/util/matcher/RequestHeaderRequestMatcher.java +++ b/web/src/main/java/org/springframework/security/web/util/matcher/RequestHeaderRequestMatcher.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -75,14 +75,14 @@ public final class RequestHeaderRequestMatcher implements RequestMatcher { * * @param expectedHeaderName * the name of the expected header. Cannot be null - * @param expectedHeaderName + * @param expectedHeaderValue * the expected header value or null if the value does not matter */ public RequestHeaderRequestMatcher(String expectedHeaderName, - String exepctedHeaderName) { + String expectedHeaderValue) { Assert.notNull(expectedHeaderName, "headerName cannot be null"); this.expectedHeaderName = expectedHeaderName; - this.expectedHeaderValue = exepctedHeaderName; + this.expectedHeaderValue = expectedHeaderValue; } public boolean matches(HttpServletRequest request) { @@ -100,4 +100,4 @@ public final class RequestHeaderRequestMatcher implements RequestMatcher { + expectedHeaderName + ", expectedHeaderValue=" + expectedHeaderValue + "]"; } -} \ No newline at end of file +}
SEC-<I>: RequestHeaderRequestMatcher constructor argument name has typo
spring-projects_spring-security
train
180fb7d566f9e3b5a5b03a09feadf72b1c9cf9e1
diff --git a/src/Models/Attribute.php b/src/Models/Attribute.php index <HASH>..<HASH> 100644 --- a/src/Models/Attribute.php +++ b/src/Models/Attribute.php @@ -154,13 +154,11 @@ class Attribute extends Model implements AttributeContract, Sortable parent::boot(); // Auto generate slugs early before validation - static::registerModelEvent('validating', function (self $attribute) { - if (! $attribute->slug) { - if ($attribute->exists && $attribute->getSlugOptions()->generateSlugsOnUpdate) { - $attribute->generateSlugOnUpdate(); - } elseif (! $attribute->exists && $attribute->getSlugOptions()->generateSlugsOnCreate) { - $attribute->generateSlugOnCreate(); - } + static::validating(function (self $attribute) { + if ($attribute->exists && $attribute->getSlugOptions()->generateSlugsOnUpdate) { + $attribute->generateSlugOnUpdate(); + } elseif (! $attribute->exists && $attribute->getSlugOptions()->generateSlugsOnCreate) { + $attribute->generateSlugOnCreate(); } }); }
Assure unique slugs generated at all times
rinvex_laravel-attributes
train
5e3cb29f29bf3d00ecf239928f33767db0e410a3
diff --git a/src/main/java/com/github/noraui/browser/DriverFactory.java b/src/main/java/com/github/noraui/browser/DriverFactory.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/github/noraui/browser/DriverFactory.java +++ b/src/main/java/com/github/noraui/browser/DriverFactory.java @@ -11,6 +11,7 @@ import org.openqa.selenium.UnexpectedAlertBehaviour; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.chrome.ChromeDriverService; +import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.htmlunit.HtmlUnitDriver; import org.openqa.selenium.ie.InternetExplorerDriver; @@ -169,6 +170,9 @@ public class DriverFactory { } System.setProperty(Driver.CHROME.getDriverName(), pathWebdriver); ChromeDriverService service = new ChromeDriverService.Builder().withWhitelistedIps("").withVerbose(false).build(); + ChromeOptions chromeOptions = new ChromeOptions(); + chromeOptions.setBinary("/home/jenkins/tools/chromium/58.0.3029.96/chrome-linux/chrome"); + capabilities.setCapability(ChromeOptions.CAPABILITY, chromeOptions); return new ChromeDriver(service, capabilities); }
add a tmp stub for chromeOptions
NoraUi_NoraUi
train
343d194f599e01f6054cc7325de1002e05926c8f
diff --git a/src/Access/NoAuthAccess.php b/src/Access/NoAuthAccess.php index <HASH>..<HASH> 100644 --- a/src/Access/NoAuthAccess.php +++ b/src/Access/NoAuthAccess.php @@ -42,8 +42,15 @@ class NoAuthAccess extends BaseAccessClass */ public function hasAccess($url, $user) { - if (!empty($url['action']) && $this->_isSkipAction($url['action'])) { - return true; + debug($url); + if (!empty($url['action']) && !empty($url['controller'])) { + $plugin = !empty($url['plugin']) ? $url['plugin'] : 'App'; + $plugin = preg_replace('/\//', '\\', $plugin); + $controllerName = $plugin . '\\Controller\\' . $url['controller'] . 'Controller'; + debug($controllerName); + if ($this->_isSkipAction($controllerName, $url['action'])) { + return true; + } } if (!empty($url['controller']) && $this->_isSkipController($url['controller'])) { @@ -62,11 +69,12 @@ class NoAuthAccess extends BaseAccessClass * * returns a list of actions which should be skipped * + * @param string $controller the user tried to access * @return array list of skipped actions */ - public function getSkipActions() + public function getSkipActions($controller) { - return $this->_skipActions['CakeDC\Users\Controller\UsersController']; + return $this->_skipActions[$controller]; } /** @@ -103,13 +111,14 @@ class NoAuthAccess extends BaseAccessClass * * checks if given action should be skipped * - * @param string $action action the user tries to access - * @return bool true if action is empty or in the list of skip actions, false if not + * @param string $controller the user tries to access + * @param string $action the user tries to access + * @return bool true if action is empty or in the list of skip actions, false if not * */ - protected function _isSkipAction($action) + protected function _isSkipAction($controller, $action) { - if (in_array($action, $this->getSkipActions())) { + if (in_array($action, $this->getSkipActions($controller))) { return true; }
Added possibility to get list of access check skipped actions by passed controller - task #<I>
QoboLtd_cakephp-roles-capabilities
train
ee0330a17b8d2deecbc700444e3c6268ccaf7ab2
diff --git a/src/Malenki/Math/Stats/NonParametricTest/WilcoxonSignedRank.php b/src/Malenki/Math/Stats/NonParametricTest/WilcoxonSignedRank.php index <HASH>..<HASH> 100644 --- a/src/Malenki/Math/Stats/NonParametricTest/WilcoxonSignedRank.php +++ b/src/Malenki/Math/Stats/NonParametricTest/WilcoxonSignedRank.php @@ -34,6 +34,7 @@ class WilcoxonSignedRank implements \Countable protected $arr_ranks = array(); protected $arr_signed_ranks = array(); protected $int_nr = null; + protected $float_sigma = null; public function add($s) @@ -223,4 +224,23 @@ class WilcoxonSignedRank implements \Countable { return abs(array_sum($this->signedRanks())); } + + public function sigma(){ + if(is_null($this->float_sigma)){ + if(count($this) < 10){ + throw new \RuntimeException( + sprintf( + 'Resulting size of %d available ranks is too small to' + .' converge to a normal distribution. Cannot compute ' + .'sigma!', + count($this) + ) + ); + } + $n = $this->nr(); + $this->float_sigma = sqrt(($n * ($n + 1) * (2 * $n + 1)) / 6); + } + + return $this->float_sigma; + } } diff --git a/tests/Stats/NonParametricTest/WilcoxonSignedRankTest.php b/tests/Stats/NonParametricTest/WilcoxonSignedRankTest.php index <HASH>..<HASH> 100644 --- a/tests/Stats/NonParametricTest/WilcoxonSignedRankTest.php +++ b/tests/Stats/NonParametricTest/WilcoxonSignedRankTest.php @@ -317,4 +317,35 @@ class WilcoxonSignedRankTest extends PHPUnit_Framework_TestCase $should = array(-1, 3, 3, -3); $this->assertEquals(2, $w->w()); } + + + public function testGettingSigmaShouldSuccess() + { + // example taken from http://vassarstats.net/textbook/ch12a.html + $w = new WilcoxonSignedRank(); + $w->add( + array( + 78, 24, 62, 48, 68, 56, 25, 44, 56, 40, 68, 36, 68, 20, 58, 32 + ) + ); + $w->add( + array( + 78, 24, 64, 45, 64, 52, 30, 50, 64, 50, 78, 22, 84, 40, 90, 72 + ) + ); + $this->assertEquals((float) 31.86, (float) round($w->sigma(), 2)); + } + + + + /** + * @expectedException \RuntimeException + */ + public function testGettingSigmaWithTooSmallRankSizeShouldFail() + { + $w = new WilcoxonSignedRank(); + $w->add(array(126, 115, 122, 116)); + $w->add(array(125, 122, 115, 123)); + $w->sigma(); + } }
WSRT: Compute sigma
malenkiki_math
train
856607646963772102fe0a5f8ea4a209fa5a8815
diff --git a/public/src/Conn/Read.php b/public/src/Conn/Read.php index <HASH>..<HASH> 100644 --- a/public/src/Conn/Read.php +++ b/public/src/Conn/Read.php @@ -68,10 +68,12 @@ class Read extends Conn */ public function exeRead($tabela, $termos = null, $parseString = null, $ignoreSystem = null) { - $ignoreOwnerpub = preg_match("/ownerpub/i", $termos); - if($ignoreSystem === null && preg_match("/system_id/i", $termos)) + $queryLogic = explode(" WHERE ", $termos); + if($ignoreSystem === null && count($queryLogic) > 1 && preg_match("/system_id/i", $queryLogic[1])) $ignoreSystem = 1; + $ignoreOwnerpub = (count($queryLogic) > 1 && preg_match("/ownerpub/i", $queryLogic[1])); + $this->setTabela($tabela); if (!empty($parseString)) parse_str($parseString, $this->places); diff --git a/public/src/Conn/SqlCommand.php b/public/src/Conn/SqlCommand.php index <HASH>..<HASH> 100644 --- a/public/src/Conn/SqlCommand.php +++ b/public/src/Conn/SqlCommand.php @@ -59,11 +59,11 @@ class SqlCommand extends Conn */ public function exeCommand($Query, $ignoreSystem = null) { - $ignoreOwnerpub = preg_match("/ownerpub/i", $Query); - if($ignoreSystem === null && preg_match("/system_id/i", $Query)) + $queryLogic = explode(" WHERE ", $Query); + if($ignoreSystem === null && count($queryLogic) > 1 && preg_match("/system_id/i", $queryLogic[1])) $ignoreSystem = 1; - $this->select = parent::addLogicMajor((string)$Query, "", [], $this->ignoreSystem || $ignoreSystem !== null, $ignoreOwnerpub); + $this->select = parent::addLogicMajor((string)$Query, "", [], $this->ignoreSystem || $ignoreSystem !== null, (count($queryLogic) > 1 && preg_match("/ownerpub/i", $queryLogic[1]))); $this->execute(); }
fix ignore system and ownerpub only on WHERE clause declared
edineibauer_uebConn
train
c261f66b2bcee87755e16e5fe1906c5156025e49
diff --git a/vault/barrier.go b/vault/barrier.go index <HASH>..<HASH> 100644 --- a/vault/barrier.go +++ b/vault/barrier.go @@ -77,6 +77,11 @@ type SecurityBarrier interface { // VerifyMaster is used to check if the given key matches the master key VerifyMaster(key []byte) error + // ReloadKeyring is used to re-read the underlying keyring. + // This is used for HA deployments to ensure the latest keyring + // is present in the leader. + ReloadKeyring() error + // Seal is used to re-seal the barrier. This requires the barrier to // be unsealed again to perform any further operations. Seal() error @@ -85,7 +90,8 @@ type SecurityBarrier interface { // should use the new key, while old values should still be decryptable. Rotate() error - // AddKey is used to add a new key to the keyring + // AddKey is used to add a new key to the keyring. This assumes the keyring + // has already been updated and does not persist a new keyring. AddKey(k *Key) error // ActiveKeyInfo is used to inform details about the active key diff --git a/vault/barrier_aes_gcm.go b/vault/barrier_aes_gcm.go index <HASH>..<HASH> 100644 --- a/vault/barrier_aes_gcm.go +++ b/vault/barrier_aes_gcm.go @@ -187,6 +187,46 @@ func (b *AESGCMBarrier) VerifyMaster(key []byte) error { return nil } +// ReloadKeyring is used to re-read the underlying keyring. +// This is used for HA deployments to ensure the latest keyring +// is present in the leader. +func (b *AESGCMBarrier) ReloadKeyring() error { + b.l.Lock() + defer b.l.Unlock() + + // Create the AES-GCM + gcm, err := b.aeadFromKey(b.keyring.MasterKey()) + if err != nil { + return err + } + + // Read in the keyring + out, err := b.backend.Get(keyringPath) + if err != nil { + return fmt.Errorf("failed to check for keyring: %v", err) + } + + // Decrypt the barrier init key + plain, err := b.decrypt(gcm, out.Value) + if err != nil { + if strings.Contains(err.Error(), "message authentication failed") { + return ErrBarrierInvalidKey + } + return err + } + defer memzero(plain) + + // Recover the keyring + keyring, err := DeserializeKeyring(plain) + if err != nil { + return fmt.Errorf("keyring deserialization failed: %v", err) + } + + // Setup the keyring and finish + b.keyring = keyring + return nil +} + // Unseal is used to provide the master key which permits the barrier // to be unsealed. If the key is not correct, the barrier remains sealed. func (b *AESGCMBarrier) Unseal(key []byte) error { diff --git a/vault/barrier_test.go b/vault/barrier_test.go index <HASH>..<HASH> 100644 --- a/vault/barrier_test.go +++ b/vault/barrier_test.go @@ -357,6 +357,12 @@ func testBarrier_Rotate(t *testing.T, b SecurityBarrier) { if info.Term != 3 { t.Fatalf("Bad term: %d", info.Term) } + + // Should be fine to reload keyring + err = b.ReloadKeyring() + if err != nil { + t.Fatalf("err: %v", err) + } } func testBarrier_Rekey(t *testing.T, b SecurityBarrier) { @@ -431,4 +437,10 @@ func testBarrier_Rekey(t *testing.T, b SecurityBarrier) { if out == nil { t.Fatalf("bad: %v", out) } + + // Should be fine to reload keyring + err = b.ReloadKeyring() + if err != nil { + t.Fatalf("err: %v", err) + } }
vault: support keyring reload
hashicorp_vault
train
59446cf7469e4eac65dea86ed298a4fe7bf5de99
diff --git a/ovirtlago/__init__.py b/ovirtlago/__init__.py index <HASH>..<HASH> 100644 --- a/ovirtlago/__init__.py +++ b/ovirtlago/__init__.py @@ -30,15 +30,11 @@ import nose.config from ovirtsdk.infrastructure.errors import (RequestError, ConnectionError) import lago from lago import log_utils -from lago.utils import LockFile +from lago.utils import (LockFile, run_command, ) from lago.prefix import Prefix from lago.workdir import Workdir -import merge_repos -import paths -import testlib -import utils -import virt +from . import (utils, merge_repos, paths, testlib, virt, ) # TODO: put it into some config PROJECTS_LIST = ['vdsm', 'ovirt-engine', 'vdsm-jsonrpc-java', 'ioprocess', ] @@ -69,7 +65,7 @@ def _fix_reposync_issues(reposync_out, repo_path): package_regex = re.compile(r'(?P<package_name>[^:\r\s]+): \[Errno 256\]') for match in package_regex.findall(reposync_out): find_command = ['find', repo_path, '-name', match + '*', ] - ret, out, _ = utils.run_command(find_command) + ret, out, _ = run_command(find_command) if ret: raise RuntimeError('Failed to execute %s' % find_command) @@ -102,13 +98,13 @@ def _sync_rpm_repository(repo_path, yum_config, repos): with LockFile(lock_path, timeout=180): with LogTask('Running reposync'): - ret, out, _ = utils.run_command(reposync_command) + ret, out, _ = run_command(reposync_command) if not ret: return _fix_reposync_issues(reposync_out=out, repo_path=repo_path) with LogTask('Rerunning reposync'): - ret, _, _ = utils.run_command(reposync_command) + ret, _, _ = run_command(reposync_command) if not ret: return @@ -119,7 +115,7 @@ def _sync_rpm_repository(repo_path, yum_config, repos): ) shutil.rmtree('%s/cache' % repo_path) with LogTask('Rerunning reposync a last time'): - ret, _, _ = utils.run_command(reposync_command) + ret, _, _ = run_command(reposync_command) if ret: raise RuntimeError( 'Failed to run reposync a second time, aborting' @@ -133,7 +129,7 @@ def _build_rpms(name, script, source_dir, output_dir, dists, env=None): 'Build %s(%s) from %s, for %s, store results in %s' % (name, script, source_dir, ', '.join(dists), output_dir), ): - ret, out, err = utils.run_command( + ret, out, err = run_command( [ script, source_dir, @@ -181,7 +177,7 @@ def _build_ioprocess_rpms(source_dir, output_dir, dists): def _git_revision_at(path): - ret, out, _ = utils.run_command(['git', 'rev-parse', 'HEAD'], cwd=path) + ret, out, _ = run_command(['git', 'rev-parse', 'HEAD'], cwd=path) if ret: return 'unknown' return out.strip() diff --git a/ovirtlago/merge_repos.py b/ovirtlago/merge_repos.py index <HASH>..<HASH> 100644 --- a/ovirtlago/merge_repos.py +++ b/ovirtlago/merge_repos.py @@ -24,8 +24,8 @@ import shutil import sys from functools import partial -import utils from lago import log_utils +from lago.utils import run_command LOGGER = logging.getLogger(__name__) @@ -47,7 +47,7 @@ def merge(output_dir, input_dirs): for input_dir in input_dirs: with LogTask('Processing directory %s' % input_dir): - ret = utils.run_command( + ret = run_command( [ 'find', input_dir, @@ -73,7 +73,7 @@ def merge(output_dir, input_dirs): ) try: - ret = utils.run_command(['createrepo', output_dir], cwd=output_dir) + ret = run_command(['createrepo', output_dir], cwd=output_dir) if ret: raise RuntimeError('createrepo for %s failed', output_dir) except OSError:
Using main lago run_command As the one from ovirtlago was removed Fixes #<I> Change-Id: Ic1e<I>b0b<I>bf<I>d<I>e<I>ea1b<I>b2ff<I>dfe
lago-project_lago
train
4549cc0033587998ee37503910542460e326d5e1
diff --git a/seamless-immutable.js b/seamless-immutable.js index <HASH>..<HASH> 100644 --- a/seamless-immutable.js +++ b/seamless-immutable.js @@ -304,6 +304,7 @@ Object.freeze(Immutable); + /* istanbul ignore if */ if (typeof module === "object") { module.exports = Immutable; } else if (typeof exports === "object") {
Don't run code coverage on the module exports section.
rtfeldman_seamless-immutable
train
10c9de55ed335ddd24232b3a7fed05c739bda34b
diff --git a/astroplan/core.py b/astroplan/core.py index <HASH>..<HASH> 100644 --- a/astroplan/core.py +++ b/astroplan/core.py @@ -1137,6 +1137,7 @@ class Observer(object): obs.lat = self.location.latitude.to(u.degree).to_string(sep=':') obs.lon = self.location.longitude.to(u.degree).to_string(sep=':') obs.elevation = self.location.height.to(u.m).value + obs.pressure = self.pressure.to(u.bar).value*1000.0 if time.isscalar: obs.date = time.datetime @@ -1155,8 +1156,6 @@ class Observer(object): moon_dec.append(float(moon.dec)) moon_dist.append(moon.earth_distance) - # For now, assemble a SkyCoord without a distance - #print(moon_ra, moon_dec, moon_dist) moon_sc = SkyCoord(ra=Longitude(moon_ra, unit=u.rad), dec=Latitude(moon_dec, unit=u.rad), distance=u.Quantity(moon_dist, u.AU), frame='gcrs') @@ -1285,7 +1284,9 @@ class Observer(object): altaz : `~astropy.coordinates.SkyCoord` Position of the moon transformed to altitude and azimuth """ - return self.altaz(time, self.get_moon(time)) + # This solution is affected by bug in astropy Issue #3920 + #return self.altaz(time, self.get_moon(time)) + raise NotImplementedError() @u.quantity_input(horizon=u.deg) def can_see(self, time, target, horizon=0*u.degree, return_altaz=False): diff --git a/astroplan/tests/test_core.py b/astroplan/tests/test_core.py index <HASH>..<HASH> 100644 --- a/astroplan/tests/test_core.py +++ b/astroplan/tests/test_core.py @@ -785,6 +785,34 @@ def test_is_night(): nights2 = [observer.is_night(time2) for observer in [lco, aao, vbo]] assert np.all(nights2 == [True, False, False]) +# def test_moon_altaz(): +# time = Time('2012-06-21 03:00:00') +# location = EarthLocation.from_geodetic(-155*u.deg, 19*u.deg, 0*u.m) +# obs = Observer(location=location, pressure=0*u.bar) +# altaz = obs.moon_altaz(time) +# astroplan_altaz = [altaz.alt.radian, altaz.az.radian] +# # Get this from print_pyephem_moon_altaz(): +# pyephem_altaz = [0.7092548608779907, 4.865438938140869] +# assert_allclose(astroplan_altaz, pyephem_altaz, atol=0.1) +# +# def print_pyephem_moon_altaz(): +# """ +# To run: +# python -c 'from astroplan.tests.test_core import print_pyephem_moon_altaz as f; f()' +# """ +# time = Time('2012-06-21 03:00:00') +# location = EarthLocation.from_geodetic(-155*u.deg, 19*u.deg, 0*u.m) +# import ephem +# moon = ephem.Moon() +# pe_obs = ephem.Observer() +# pe_obs.lat = location.latitude.to(u.degree).to_string(sep=':') +# pe_obs.lon = location.longitude.to(u.degree).to_string(sep=':') +# pe_obs.elevation = location.height.to(u.m).value +# pe_obs.date = time.datetime +# pe_obs.pressure = 0 +# moon.compute(pe_obs) +# print(map(float, [moon.alt, moon.az])) + class TestExceptions(unittest.TestCase): def test_rise_set_transit_which(self): lat = '00:00:00' diff --git a/astroplan/tests/test_moon.py b/astroplan/tests/test_moon.py index <HASH>..<HASH> 100644 --- a/astroplan/tests/test_moon.py +++ b/astroplan/tests/test_moon.py @@ -47,4 +47,4 @@ def print_pyephem_illumination(): pe_obs.date = t.datetime moon.compute(pe_obs) illuminations.append(moon.moon_phase) - print(illuminations) + print(illuminations) \ No newline at end of file
Keeping moon_altaz out of tests for now
astropy_astroplan
train
4325d5c1f8a5e358187ef9ab1dec3e460f60e818
diff --git a/lib/demo/commands/bugs.js b/lib/demo/commands/bugs.js index <HASH>..<HASH> 100644 --- a/lib/demo/commands/bugs.js +++ b/lib/demo/commands/bugs.js @@ -193,6 +193,9 @@ function liFromBug(doc, bug, predictions) { if (done) { li.setAttribute('style', 'text-decoration: line-through; color: grey;'); } + if (bug.status ==='ASSIGNED') { + li.setAttribute('style', 'font-weight: bold;'); + } var a = doc.createElement('a'); a.setAttribute('target', '_blank'); a.setAttribute('href', 'https://bugzilla.mozilla.org/show_bug.cgi?id=' + bug.id);
Bug <I> (part): Tweak bugz command to highlight work in progress
joewalker_gcli
train
a1ba27da099b31d64511be12dc81fcebf500449a
diff --git a/coursera/coursera_dl.py b/coursera/coursera_dl.py index <HASH>..<HASH> 100755 --- a/coursera/coursera_dl.py +++ b/coursera/coursera_dl.py @@ -261,7 +261,7 @@ def get_anchor_format(a): return (fmt.group(1) if fmt else None) -def parse_syllabus(page, cookies_file): +def parse_syllabus(page, cookies_file, reverse=False): """ Parses a Coursera course listing/syllabus page. Each section is a week of classes. @@ -310,6 +310,9 @@ def parse_syllabus(page, cookies_file): logging.info('Found %d sections and %d lectures on this page', len(sections), sum(len(s[1]) for s in sections)) + if sections and reverse: + sections = sections[::-1] + if not len(sections): logging.error('Probably bad cookies file (or wrong class name)') @@ -606,6 +609,13 @@ def parseArgs(): action='append', default=[], help='additional classes to get') + parser.add_argument('-r', + '--reverse', + dest='reverse', + action='store_true', + default=False, + help='download sections in reverse order') + args = parser.parse_args() # turn list of strings into list @@ -658,7 +668,7 @@ def download_class(args, class_name): # parse it sections = parse_syllabus(page, args.cookies_file - or tmp_cookie_file) + or tmp_cookie_file, args.reverse) # obtain the resources download_lectures(
Added -r,--reverse parameter to download courses in reverse order. Adding this option prevents re-downloading all courses if the course list is in reverse order (last week first).
coursera-dl_coursera-dl
train
44cee94f62bb64ec194b48a8fb6cd1bd67a1d7c2
diff --git a/tests/Text/EllipsisHelperTest.php b/tests/Text/EllipsisHelperTest.php index <HASH>..<HASH> 100644 --- a/tests/Text/EllipsisHelperTest.php +++ b/tests/Text/EllipsisHelperTest.php @@ -1,8 +1,11 @@ <?php /* + * This file is part of Handlebars.php Helpers Set * - * (c) Matteo Merola <[email protected]> + * (c) Dmitriy Simushev <[email protected]> * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. */ namespace JustBlackBird\HandlebarsHelpers\Tests\Text;
Fix license header in a Test file
JustBlackBird_handlebars.php-helpers
train
20528f5fab2cdf286a4ae50a5652b7d80c69bb70
diff --git a/organizations/mixins.py b/organizations/mixins.py index <HASH>..<HASH> 100644 --- a/organizations/mixins.py +++ b/organizations/mixins.py @@ -52,7 +52,7 @@ class MembershipRequiredMixin(object): self.request = request self.args = args self.kwargs = kwargs - self.organization = self.get_organization(**kwargs) + self.organization = self.get_organization() if not self.organization.is_member(request.user): return HttpResponseForbidden(_("Whoops, wrong organization")) return super(MembershipRequiredMixin, self).dispatch(request, *args, @@ -65,7 +65,7 @@ class AdminRequiredMixin(object): self.request = request self.args = args self.kwargs = kwargs - self.organization = self.get_organization(**kwargs) + self.organization = self.get_organization() if not self.organization.is_admin(request.user): return HttpResponseForbidden(_("Sorry, admins only")) return super(AdminRequiredMixin, self).dispatch(request, *args, @@ -78,7 +78,7 @@ class OwnerRequiredMixin(object): self.request = request self.args = args self.kwargs = kwargs - self.organization = self.get_organization(**kwargs) + self.organization = self.get_organization() if self.organization.owner.organization_user.user != request.user: return HttpResponseForbidden(_("You are not the organization owner")) return super(OwnerRequiredMixin, self).dispatch(request, *args,
Fix error due to change in get_object definition
bennylope_django-organizations
train
8ce4cf3ecbad58be4d376c6e4bb8905a622761f1
diff --git a/http/src/http-driver.js b/http/src/http-driver.js index <HASH>..<HASH> 100644 --- a/http/src/http-driver.js +++ b/http/src/http-driver.js @@ -102,10 +102,13 @@ function normalizeRequestOptions(reqOptions) { } function isolateSource(response$$, scope) { - return response$$.filter(res$ => + let isolatedResponse$$ = response$$.filter(res$ => Array.isArray(res$.request._namespace) && res$.request._namespace.indexOf(scope) !== -1 ) + isolatedResponse$$.isolateSource = isolateSource + isolatedResponse$$.isolateSink = isolateSink + return isolatedResponse$$ } function isolateSink(request$, scope) {
Fix isolateSource to allow nested scoping
cyclejs_cyclejs
train
76261319e18c7a94c1241878031067753545e745
diff --git a/backprop/main.py b/backprop/main.py index <HASH>..<HASH> 100644 --- a/backprop/main.py +++ b/backprop/main.py @@ -5,8 +5,8 @@ import numpy as np class Instance: def __init__(self, features, target): - self.features = np.matrix(features) - self.target = target + self.features = np.array(features) + self.targets = np.array(target) #end Instance @@ -14,22 +14,20 @@ class Instance: training_one = [ Instance( [0,0], [0] ), Instance( [0,1], [1] ), Instance( [1,0], [1] ), Instance( [1,1], [0] ) ] training_two = [ Instance( [0,0], [0,0] ), Instance( [0,1], [1,1] ), Instance( [1,0], [1,1] ), Instance( [1,1], [0,0] ) ] - - n_inputs = 2 n_outputs = 1 -n_hiddens = 6 +n_hiddens = 2 n_hidden_layers = 1 # specify activation functions per layer -activation_functions = [ tanh_function ]*n_hidden_layers + [ linear_function ] +activation_functions = [ tanh_function ]*n_hidden_layers + [ sigmoid_function ] # initialize your neural network network = NeuralNet(n_inputs, n_outputs, n_hiddens, n_hidden_layers, activation_functions) # start training -network.backpropagation(training_one, ERROR_LIMIT=1e-3) +network.backpropagation(training_one, ERROR_LIMIT=1e-4) for instance in training_one: - print instance.features, network.update( instance.features ), "\ttarget:", instance.target \ No newline at end of file + print instance.features, network.update( np.array([instance.features]) ), "\ttarget:", instance.targets \ No newline at end of file
Minor: updated to support the changes in the net
jorgenkg_python-neural-network
train
0f0bf191791aa880329243d84270c87f261ca50a
diff --git a/jplephem/daf.py b/jplephem/daf.py index <HASH>..<HASH> 100644 --- a/jplephem/daf.py +++ b/jplephem/daf.py @@ -26,7 +26,7 @@ class DAF(object): self.locidw = file_record[:8].upper().rstrip() if not self.locidw.startswith(b'DAF/'): - raise ValueError('file starts with {0!r}, not the 4 bytes {0!r}' + raise ValueError('file starts with {0!r}, not the 4 bytes {1!r}' .format(self.locidw, b'DAF/')) if file_record[500:1000].strip(b'\0') != FTPSTR:
Fix typo pointed out in #<I>
brandon-rhodes_python-jplephem
train
e4dc7c267abe40abe3686a842a68bba6a792cfc0
diff --git a/plivo/resources/multipartycall.py b/plivo/resources/multipartycall.py index <HASH>..<HASH> 100644 --- a/plivo/resources/multipartycall.py +++ b/plivo/resources/multipartycall.py @@ -228,8 +228,8 @@ class MultiPartyCalls(PlivoResourceInterface): of_type_exact(str), one_of(is_url(), is_in(('real', 'none'), case_sensitive=False, case_type='lower')), )], - ring_timeout=[optional(one_of(of_type_exact(str), of_type_exact(int)), multiple_valid_integers(15, 120))], - delay_dial=[optional(one_of(of_type_exact(str), of_type_exact(int)), multiple_valid_integers(0, 120))], + ring_timeout=[optional(one_of(of_type_exact(str), of_type_exact(int)), multiple_valid_integers())], + delay_dial=[optional(one_of(of_type_exact(str), of_type_exact(int)), multiple_valid_integers())], max_duration=[optional( of_type_exact(int), check(lambda max_duration: 300 <= max_duration <= 28800, '300 < max_duration <= 28800'))], diff --git a/plivo/utils/validators.py b/plivo/utils/validators.py index <HASH>..<HASH> 100644 --- a/plivo/utils/validators.py +++ b/plivo/utils/validators.py @@ -180,27 +180,17 @@ def is_iterable(validator, sep=None): return required(f) -def multiple_valid_integers(lowerbound, upperbound): +def multiple_valid_integers(): def f(name, value): - if isinstance(value, int): - if value >= lowerbound and value <= upperbound: - return value, [] - else: - return None, {name + ' value must be in range ' + str(lowerbound) + ' to ' + str(upperbound)} - else: + if isinstance(value, str): values = value.split('<') for i in values: - is_int = True try: int(i) except ValueError: - is_int = False - if is_int: - if int(i) > upperbound or int(i) < lowerbound: - return None, [name + ' destination value must be in range ' + str(lowerbound) + ' to ' + str(upperbound)] - else: return None, ['{} destination value must be integer'.format(name)] return value, [] + return value, [] return f
Removed range validation for ringtimeout and delaydial
plivo_plivo-python
train
4b8fa58206f0c9a39f695df896eb7098b0e29a56
diff --git a/phy/cluster/manual/gui_plugins.py b/phy/cluster/manual/gui_plugins.py index <HASH>..<HASH> 100644 --- a/phy/cluster/manual/gui_plugins.py +++ b/phy/cluster/manual/gui_plugins.py @@ -129,32 +129,13 @@ class ManualClustering(IPlugin): def select(self, cluster_ids): self.wizard.selection = cluster_ids - # def reset_wizard(self): - # self.wizard.start() - - # def first(self): - # self.wizard.first() - - # def last(self): - # self.wizard.last() - - # def next(self): - # self.wizard.next() - - # def previous(self): - # self.wizard.previous() - - # def pin(self): - # self.wizard.pin() - - # def unpin(self): - # self.wizard.unpin() - # Clustering actions # ------------------------------------------------------------------------- def merge(self, cluster_ids=None): - pass + if cluster_ids is None: + cluster_ids = self.wizard.selection + self.clustering.merge(cluster_ids) def split(self, spike_ids=None): pass diff --git a/phy/cluster/manual/tests/test_gui_plugins.py b/phy/cluster/manual/tests/test_gui_plugins.py index <HASH>..<HASH> 100644 --- a/phy/cluster/manual/tests/test_gui_plugins.py +++ b/phy/cluster/manual/tests/test_gui_plugins.py @@ -42,32 +42,42 @@ def test_manual_clustering(manual_clustering): def on_select(cluster_ids, spike_ids): _s.append((cluster_ids, spike_ids)) + def _assert_selection(*cluster_ids): + assert _s[-1][0] == list(cluster_ids) + # Test select actions. actions.select([]) - ae(_s[-1][0], []) - ae(_s[-1][1], []) + _assert_selection() # Test wizard actions. actions.reset_wizard() assert wizard.best_list == [3, 2, 7, 5] assert wizard.best == 3 + _assert_selection(3) actions.next() assert wizard.best == 2 + _assert_selection(2) actions.last() assert wizard.best == 5 + _assert_selection(5) actions.previous() assert wizard.best == 7 + _assert_selection(7) actions.first() assert wizard.best == 3 + _assert_selection(3) # Test pinning. actions.pin() assert wizard.match_list == [2, 7, 5] assert wizard.match == 2 + _assert_selection(3, 2) + wizard.next() assert wizard.match == 7 assert len(_s) == 9 + _assert_selection(3, 7)
WIP: test wizard actions in manual clustering plugin
kwikteam_phy
train
423b667edfd1de4144392d092e443c7110831283
diff --git a/python/orca/src/bigdl/orca/data/shard.py b/python/orca/src/bigdl/orca/data/shard.py index <HASH>..<HASH> 100644 --- a/python/orca/src/bigdl/orca/data/shard.py +++ b/python/orca/src/bigdl/orca/data/shard.py @@ -213,16 +213,16 @@ class SparkXShards(XShards): return self.rdd.map(lambda data: len(data) if hasattr(data, '__len__') else 1)\ .reduce(lambda l1, l2: l1 + l2) else: - first = self.rdd.first() - if not hasattr(first, '__getitem__'): - raise Exception("No selection operation available for this XShards") - else: + + def get_len(data): + assert hasattr(data, '__getitem__'), \ + "No selection operation available for this XShards" try: - data = first[key] + value = data[key] except: raise Exception("Invalid key for this XShards") - return self.rdd.map(lambda data: len(data[key]) if hasattr(data[key], '__len__') - else 1).reduce(lambda l1, l2: l1 + l2) + return len(value) if hasattr(value, '__len__') else 1 + return self.rdd.map(get_len).reduce(lambda l1, l2: l1 + l2) def save_pickle(self, path, batchSize=10): self.rdd.saveAsPickleFile(path, batchSize)
Orca len on spark change check to task (#<I>) * add length operation * update len * fix style * change check to each shard
intel-analytics_BigDL
train
1aad8ad7bce4358889e7c809e58fe5e1712d18f1
diff --git a/lib/constants.rb b/lib/constants.rb index <HASH>..<HASH> 100644 --- a/lib/constants.rb +++ b/lib/constants.rb @@ -1,6 +1,6 @@ module Normalic - module Address + class Address Directional = { "north" => "n", diff --git a/lib/normalic.rb b/lib/normalic.rb index <HASH>..<HASH> 100644 --- a/lib/normalic.rb +++ b/lib/normalic.rb @@ -1,9 +1,44 @@ #only handles U.S addresses require 'constants' -require 'ruby-debug' module Normalic - module Address + class Address + + attr_accessor :number, :direction, :street, :type, :city, :state, :zipcode + def initialize(fields={}) + @number = fields[:number] + @direction = fields[:direction] + @street = fields[:street] + @type = fields[:type] + @city = fields[:city] + @state = fields[:state] + @zipcode = fields[:zipcode] + end + + def [](field_name) + begin + self.send(field_name.to_s) + rescue NoMethodError => e + nil + end + end + + def []=(field_name, value) + begin + self.send("#{field_name}=", value) + rescue NoMethodError => e + nil + end + end + + def to_s + "#{line1}, #{city.gsub(/\w+/){|w| w.capitalize}}, #{state.upcase} #{zipcode}" + end + + def line1 + "#{number}#{" "+direction.upcase if direction} #{street.gsub(/\w+/){|w| w.capitalize}} #{type.capitalize}" + end + #Iteratively take chunks off of the string. def self.parse(address) address.strip! @@ -64,15 +99,17 @@ module Normalic end street = arr[4].strip.downcase if arr[4] && !street - { - :number => number, - :direction => dir, - :street => street, - :type => type, - :city => city, - :state => state, - :zipcode => zipcode - } + self.new( + { + :number => number, + :direction => dir, + :street => street, + :type => type, + :city => city, + :state => state, + :zipcode => zipcode + } + ) end end end diff --git a/spec/normalic_spec.rb b/spec/normalic_spec.rb index <HASH>..<HASH> 100644 --- a/spec/normalic_spec.rb +++ b/spec/normalic_spec.rb @@ -1,7 +1,7 @@ require 'lib/normalic' describe "Normalic test" do - + it "should parse an address with unit(floor) information" do addr = Normalic::Address.parse("201 Varick St. floor 12th, New York, NY 10014") addr[:number].should == "201" @@ -55,4 +55,24 @@ describe "Normalic test" do addr[:direction].should == "w" addr[:type].should == "st" end + + it "should use dot notation" do + addr = Normalic::Address.parse("871 West Washington Street") + addr.number.should == "871" + end + + it "should return nil if a bad field is passed in" do + addr = Normalic::Address.parse("871 West Washington Street") + addr[:bad_name].should == nil + end + + it "should return a line1" do + addr = Normalic::Address.parse("871 West Washington Street") + addr.line1.should == "871 W Washington St" + end + + it "should have a to_s method" do + addr = Normalic::Address.parse("167 West 4th Street, New York, NY 10014") + addr.to_s.should == "167 W 4th St, New York, NY 10014" + end end
objectify the hash returned in 'parse'
ericxtang_normalic
train
f1b6ce824d6f7704bdc6199718671ed04b20d73a
diff --git a/mod/data/tabs.php b/mod/data/tabs.php index <HASH>..<HASH> 100755 --- a/mod/data/tabs.php +++ b/mod/data/tabs.php @@ -32,13 +32,13 @@ $inactive = NULL; $row = array(); - $row[] = new tabobject('browse', $CFG->wwwroot.'/mod/data/view.php?d='.$data->id, get_string('browse','data')); + $row[] = new tabobject('browse', $CFG->wwwroot.'/mod/data/view.php?d='.$data->id, get_string('browse','data'), '', true); if (isteacher($course->id) or ($data->participants == PARTICIPANTS_S) or ($data->participants == PARTICIPANTS_TS)){ - $row[] = new tabobject('add', $CFG->wwwroot.'/mod/data/add.php?d='.$data->id, get_string('add','data')); + $row[] = new tabobject('add', $CFG->wwwroot.'/mod/data/add.php?d='.$data->id, get_string('add','data'), '', true); } if (isteacher($course->id)) { $row[] = new tabobject('templates', $CFG->wwwroot.'/mod/data/templates.php?d='.$data->id.'&amp;mode=singletemplate', get_string('templates','data')); - $row[] = new tabobject('fields', $CFG->wwwroot.'/mod/data/fields.php?d='.$data->id, get_string('fields','data')); + $row[] = new tabobject('fields', $CFG->wwwroot.'/mod/data/fields.php?d='.$data->id, get_string('fields','data'), '', true); } $tabs[] = $row; @@ -83,4 +83,4 @@ print_tabs($tabs, $currenttab, $inactive); -?> +?> \ No newline at end of file
Made some of the tabs linked even if selected in the different views.
moodle_moodle
train
f96be45fa06ed20582baccc3314d030c982393fb
diff --git a/src/editor/EditorCommandHandlers.js b/src/editor/EditorCommandHandlers.js index <HASH>..<HASH> 100644 --- a/src/editor/EditorCommandHandlers.js +++ b/src/editor/EditorCommandHandlers.js @@ -629,7 +629,9 @@ define(function (require, exports, module) { hasSelection = (sel.start.line !== sel.end.line) || (sel.start.ch !== sel.end.ch), inlineWidget = EditorManager.getFocusedInlineWidget(), firstLine = editor.getFirstVisibleLine(), - lastLine = editor.getLastVisibleLine(); + lastLine = editor.getLastVisibleLine(), + totalLines = editor.lineCount(), + lineLength = 0; sel.start.ch = 0; // The end of the selection becomes the start of the next line, if it isn't already @@ -645,7 +647,13 @@ define(function (require, exports, module) { var prevText = doc.getRange({ line: sel.start.line - 1, ch: 0 }, sel.start); if (sel.end.line === lastLine + 1) { - prevText = "\n" + prevText.substring(0, prevText.length - 1); + if (inlineWidget) { + prevText = prevText.substring(0, prevText.length - 1); + lineLength = doc.getLine(sel.end.line - 1).length; + doc.replaceRange("\n", { line: sel.end.line - 1, ch: lineLength }); + } else { + prevText = "\n" + prevText.substring(0, prevText.length - 1); + } } doc.replaceRange("", { line: sel.start.line - 1, ch: 0 }, sel.start); @@ -663,17 +671,28 @@ define(function (require, exports, module) { } break; case DIRECTION_DOWN: - if (sel.end.line <= lastLine + (inlineWidget ? -1 : 1)) { + if (sel.end.line <= lastLine) { doc.batchOperation(function () { - var nextText = doc.getRange(sel.end, { line: sel.end.line + 1, ch: 0 }); + var nextText = doc.getRange(sel.end, { line: sel.end.line + 1, ch: 0 }), + deletionStart = sel.end; - var deletionStart = sel.end; - if (!inlineWidget && sel.end.line === lastLine) { - nextText += "\n"; - deletionStart = { line: sel.end.line - 1, ch: doc.getLine(sel.end.line - 1).length }; + if (sel.end.line === lastLine) { + if (inlineWidget) { + if (sel.end.line === totalLines - 1) { + nextText += "\n"; + } + lineLength = doc.getLine(sel.end.line - 1).length; + doc.replaceRange("\n", { line: sel.end.line, ch: doc.getLine(sel.end.line).length }); + } else { + nextText += "\n"; + deletionStart = { line: sel.end.line - 1, ch: doc.getLine(sel.end.line - 1).length }; + } } doc.replaceRange("", deletionStart, { line: sel.end.line + 1, ch: 0 }); + if (lineLength) { + doc.replaceRange("", { line: sel.end.line - 1, ch: lineLength }, { line: sel.end.line, ch: 0 }); + } doc.replaceRange(nextText, { line: sel.start.line, ch: 0 }); }); }
Adding special cases when moving lines up or down at the last lines of an inline editor
adobe_brackets
train
cf7692cbf1665daea28fbb11bc5a04e628b633ae
diff --git a/alot/message.py b/alot/message.py index <HASH>..<HASH> 100644 --- a/alot/message.py +++ b/alot/message.py @@ -116,6 +116,10 @@ class Message(object): self._thread = self._dbman.get_thread(self._thread_id) return self._thread + def has_replies(self): + """returns true if this message has at least one reply""" + return (len(self.get_replies()) > 0) + def get_replies(self): """returns replies to this message as list of :class:`Message`""" t = self.get_thread() diff --git a/alot/widgets.py b/alot/widgets.py index <HASH>..<HASH> 100644 --- a/alot/widgets.py +++ b/alot/widgets.py @@ -481,7 +481,7 @@ class MessageWidget(urwid.WidgetWrap): return ('fixed', length, spacer) def _get_arrowhead_aligner(self): - if len(self.message.get_replies()) > 0: + if self.message.has_replies(): aligner = u'\u2502' else: aligner = ' '
Add Message::has_replies() This method enables a callee to ask a Message object whether there are any replies to it. Use it to decide what spacer to use for aligning unfolded message parts to its summary line (cf issue #<I>).
pazz_alot
train
284d862a220e697a8473ac627d5f20cf3beffe9b
diff --git a/src/utils/constants.js b/src/utils/constants.js index <HASH>..<HASH> 100644 --- a/src/utils/constants.js +++ b/src/utils/constants.js @@ -112,7 +112,6 @@ export const validProps = [ 'enablePredictiveSuggestions', 'applyStopwords', 'customStopwords', - 'onData', ]; export const CLEAR_ALL = {
fix: remove onData to be stored in redux store
appbaseio_reactivecore
train
fd482657f2b14c42badf26e73bd5c125855523fb
diff --git a/jmccc-mcdownloader/src/main/java/org/to2mbn/jmccc/mcdownloader/provider/liteloader/LiteloaderDownloadProvider.java b/jmccc-mcdownloader/src/main/java/org/to2mbn/jmccc/mcdownloader/provider/liteloader/LiteloaderDownloadProvider.java index <HASH>..<HASH> 100644 --- a/jmccc-mcdownloader/src/main/java/org/to2mbn/jmccc/mcdownloader/provider/liteloader/LiteloaderDownloadProvider.java +++ b/jmccc-mcdownloader/src/main/java/org/to2mbn/jmccc/mcdownloader/provider/liteloader/LiteloaderDownloadProvider.java @@ -104,6 +104,13 @@ public class LiteloaderDownloadProvider extends AbstractMinecraftDownloadProvide // it's a snapshot return source.liteloaderSnapshotVersionJson(liteloader) + .andThen(new ResultProcessor<JSONObject, JSONObject>() { + + @Override + public JSONObject process(JSONObject json) throws Exception { + return processSnapshotLiteloaderVersion(mcdir, json, liteloader); + } + }) .andThen(new VersionJsonProcessor(mcdir)) .cachePool(CacheNames.LITELOADER_VERSION_JSON); } else { @@ -189,10 +196,9 @@ public class LiteloaderDownloadProvider extends AbstractMinecraftDownloadProvide String tweakClass = liteloader.getTweakClass(); Set<JSONObject> liteloaderLibraries = liteloader.getLibraries(); - JSONObject versionjson = IOUtils.toJson(mcdir.getVersionJson(superVersion)); + JSONObject versionJson = IOUtils.toJson(mcdir.getVersionJson(superVersion)); - String version = String.format("%s-LiteLoader%s", superVersion, minecraftVersion); - String minecraftArguments = String.format("%s --tweakClass %s", versionjson.getString("minecraftArguments"), + String minecraftArguments = String.format("%s --tweakClass %s", versionJson.getString("minecraftArguments"), tweakClass == null ? LITELOADER_TWEAK_CLASS : tweakClass); JSONArray libraries = new JSONArray(); JSONObject liteloaderLibrary = new JSONObject(); @@ -220,15 +226,38 @@ public class LiteloaderDownloadProvider extends AbstractMinecraftDownloadProvide } } - versionjson.put("inheritsFrom", superVersion); - versionjson.put("minecraftArguments", minecraftArguments); - versionjson.put("mainClass", LAUNCH_WRAPPER_MAINCLASS); - versionjson.put("id", version); - versionjson.put("libraries", libraries); - versionjson.remove("downloads"); - versionjson.remove("assets"); - versionjson.remove("assetIndex"); - return versionjson; + versionJson.put("inheritsFrom", superVersion); + versionJson.put("minecraftArguments", minecraftArguments); + versionJson.put("mainClass", LAUNCH_WRAPPER_MAINCLASS); + versionJson.put("id", generateLiteloaderVersionName(liteloader)); + versionJson.put("libraries", libraries); + versionJson.remove("downloads"); + versionJson.remove("assets"); + versionJson.remove("assetIndex"); + return versionJson; + } + + protected JSONObject processSnapshotLiteloaderVersion(MinecraftDirectory mcdir, JSONObject versionJson, LiteloaderVersion liteloader) throws IOException { + versionJson.put("inheritsFrom", liteloader.getSuperVersion()); + versionJson.put("id", generateLiteloaderVersionName(liteloader)); + + final String TWEAK_CLASS_ARG_PREFIX = "--tweakClass "; + String minecraftArguments = versionJson.getString("minecraftArguments"); + int tweakArgIdx = minecraftArguments.lastIndexOf(TWEAK_CLASS_ARG_PREFIX); + String tweakClass = tweakArgIdx == -1 + ? LITELOADER_TWEAK_CLASS + : minecraftArguments.substring(tweakArgIdx + TWEAK_CLASS_ARG_PREFIX.length()); + + JSONObject superVersionJson = IOUtils.toJson(mcdir.getVersionJson(liteloader.getSuperVersion())); + String superMinecraftArguments = superVersionJson.getString("minecraftArguments"); + + versionJson.put("minecraftArguments", String.format("%s --tweakClass %s", superMinecraftArguments, tweakClass)); + + return versionJson; + } + + protected String generateLiteloaderVersionName(LiteloaderVersion liteloader) { + return String.format("%s-LiteLoader%s", liteloader.getSuperVersion(), liteloader.getMinecraftVersion()); } }
Fix cannot download forge+liteloader snapshot
to2mbn_JMCCC
train
8d2b5bbf631b4746f97245c18f4ac165290c7c8d
diff --git a/ggplot/geoms/geom_point.py b/ggplot/geoms/geom_point.py index <HASH>..<HASH> 100644 --- a/ggplot/geoms/geom_point.py +++ b/ggplot/geoms/geom_point.py @@ -78,7 +78,7 @@ class geom_point(geom): [0.5*da.height], alpha=data['alpha'], marker=data['shape'], - markersize=data['size'], + markersize=data['size']+data['stroke'], markerfacecolor=data['fill'], markeredgecolor=data['color'], markeredgewidth=data['stroke']) diff --git a/ggplot/guides/guide_legend.py b/ggplot/guides/guide_legend.py index <HASH>..<HASH> 100644 --- a/ggplot/guides/guide_legend.py +++ b/ggplot/guides/guide_legend.py @@ -203,11 +203,19 @@ class guide_legend(guide): for i in range(nbreak): for gl in self.glayers: pad = default_pad + # Full size of object to appear in the + # legend key + if 'stroke' in gl.data: + _size = (gl.data.ix[i, 'size'] + + 2*gl.data.ix[i, 'stroke']) + else: + _size = gl.data.ix[i, 'size'] + # special case, color does not apply to # border/linewidth if issubclass(gl.geom, geom_text): pad = 0 - if gl.data.ix[i, 'size'] < default_size: + if _size < default_size: continue try: @@ -215,11 +223,10 @@ class guide_legend(guide): # When the edge is not visible, we should # not expand the size of the keys if gl.data.ix[i, 'color'] is not None: - size[i] = np.max([ - gl.data.ix[i, 'size'].max()+pad, - size[i]]) + size[i] = np.max([_size+pad, size[i]]) except KeyError: break + return size if self.keywidth is None:
Fix legend entry sizes when using stroke aesthetic
has2k1_plotnine
train
35edca3e422554444b6ec34a010bc1c9326a7745
diff --git a/pkg/apis/kops/networking.go b/pkg/apis/kops/networking.go index <HASH>..<HASH> 100644 --- a/pkg/apis/kops/networking.go +++ b/pkg/apis/kops/networking.go @@ -73,6 +73,8 @@ type CalicoNetworkingSpec struct { CrossSubnet bool `json:"crossSubnet,omitempty"` // Enables Calico's cross-subnet mode when set to true // LogSeverityScreen lets us set the desired log level. (Default: info) LogSeverityScreen string `json:"logSeverityScreen,omitempty"` + // MTU to be set in the cni-network-config for calico. + MTU *int32 `json:"mtu,omitempty"` // PrometheusMetricsEnabled can be set to enable the experimental Prometheus // metrics server (default: false) PrometheusMetricsEnabled bool `json:"prometheusMetricsEnabled,omitempty"` @@ -83,8 +85,6 @@ type CalicoNetworkingSpec struct { PrometheusGoMetricsEnabled bool `json:"prometheusGoMetricsEnabled,omitempty"` // PrometheusProcessMetricsEnabled enables Prometheus process metrics collection PrometheusProcessMetricsEnabled bool `json:"prometheusProcessMetricsEnabled,omitempty"` - // MTU to be set in the cni-network-config for calico. - MTU *int32 `json:"mtu,omitempty"` } // CanalNetworkingSpec declares that we want Canal networking diff --git a/pkg/apis/kops/v1alpha1/networking.go b/pkg/apis/kops/v1alpha1/networking.go index <HASH>..<HASH> 100644 --- a/pkg/apis/kops/v1alpha1/networking.go +++ b/pkg/apis/kops/v1alpha1/networking.go @@ -73,6 +73,8 @@ type CalicoNetworkingSpec struct { CrossSubnet bool `json:"crossSubnet,omitempty"` // Enables Calico's cross-subnet mode when set to true // LogSeverityScreen lets us set the desired log level. (Default: info) LogSeverityScreen string `json:"logSeverityScreen,omitempty"` + // MTU to be set in the cni-network-config for calico. + MTU *int32 `json:"mtu,omitempty"` // PrometheusMetricsEnabled can be set to enable the experimental Prometheus // metrics server (default: false) PrometheusMetricsEnabled bool `json:"prometheusMetricsEnabled,omitempty"` @@ -83,8 +85,6 @@ type CalicoNetworkingSpec struct { PrometheusGoMetricsEnabled bool `json:"prometheusGoMetricsEnabled,omitempty"` // PrometheusProcessMetricsEnabled enables Prometheus process metrics collection PrometheusProcessMetricsEnabled bool `json:"prometheusProcessMetricsEnabled,omitempty"` - // MTU to be set in the cni-network-config for calico. - MTU *int32 `json:"mtu,omitempty"` } // CanalNetworkingSpec declares that we want Canal networking diff --git a/pkg/apis/kops/v1alpha2/networking.go b/pkg/apis/kops/v1alpha2/networking.go index <HASH>..<HASH> 100644 --- a/pkg/apis/kops/v1alpha2/networking.go +++ b/pkg/apis/kops/v1alpha2/networking.go @@ -73,6 +73,8 @@ type CalicoNetworkingSpec struct { CrossSubnet bool `json:"crossSubnet,omitempty"` // Enables Calico's cross-subnet mode when set to true // LogSeverityScreen lets us set the desired log level. (Default: info) LogSeverityScreen string `json:"logSeverityScreen,omitempty"` + // MTU to be set in the cni-network-config for calico. + MTU *int32 `json:"mtu,omitempty"` // PrometheusMetricsEnabled can be set to enable the experimental Prometheus // metrics server (default: false) PrometheusMetricsEnabled bool `json:"prometheusMetricsEnabled,omitempty"` @@ -83,8 +85,6 @@ type CalicoNetworkingSpec struct { PrometheusGoMetricsEnabled bool `json:"prometheusGoMetricsEnabled,omitempty"` // PrometheusProcessMetricsEnabled enables Prometheus process metrics collection PrometheusProcessMetricsEnabled bool `json:"prometheusProcessMetricsEnabled,omitempty"` - // MTU to be set in the cni-network-config for calico. - MTU *int32 `json:"mtu,omitempty"` } // CanalNetworkingSpec declares that we want Canal networking
Add fields in CalicoNetworkingSpec in alphabetical order.
kubernetes_kops
train
0900402eb48267a1e0d3b00130d505fc2063bde1
diff --git a/src/lib/angular-iscroll.js b/src/lib/angular-iscroll.js index <HASH>..<HASH> 100644 --- a/src/lib/angular-iscroll.js +++ b/src/lib/angular-iscroll.js @@ -141,6 +141,7 @@ // Export the auto-determined value of `useNativeScroll`. this.useNativeScroll = useNativeScroll; + this.platform = platform; this.configureDefaults = _configureDefaults; function _getDefaults() { @@ -153,7 +154,8 @@ /* @ngInject */ function iScrollService($rootScope, iScrollSignals) { var _state = { - useIScroll: defaultOptions.directive.initiallyEnabled + useIScroll: defaultOptions.directive.initiallyEnabled, + autoDetectedUseNativeScroll: useNativeScroll }; function _disable(signalOnly) {
#<I>: Automatically detect the need for using iScroll. Export the auto-detected findings, and also export the `platform` value.
mtr_angular-iscroll
train
b5872438fdee5ddbe2d1d9c5d4ded0b1849521ea
diff --git a/src/components/user-settings-menu/user-settings-menu.js b/src/components/user-settings-menu/user-settings-menu.js index <HASH>..<HASH> 100644 --- a/src/components/user-settings-menu/user-settings-menu.js +++ b/src/components/user-settings-menu/user-settings-menu.js @@ -10,9 +10,9 @@ import Text from '@commercetools-local/ui-kit/typography/text'; import Spacings from '@commercetools-local/ui-kit/materials/spacings'; import { LOGOUT_REASONS } from '@commercetools-local/constants'; import formatUserName from '@commercetools-local/utils/user'; -import { Avatar } from '@commercetools-local/application-shell'; import Card from '@commercetools-local/core/components/card'; import { MCSupportFormURL } from '../../constants'; +import Avatar from '../avatar'; import styles from './user-settings-menu.mod.css'; import messages from './messages';
fix(app-shell): relative import. Refs #<I>
commercetools_merchant-center-application-kit
train
b7af625659195e35792edce544788d9ce8523742
diff --git a/lib/dependencies/AMDDefineDependencyParserPlugin.js b/lib/dependencies/AMDDefineDependencyParserPlugin.js index <HASH>..<HASH> 100644 --- a/lib/dependencies/AMDDefineDependencyParserPlugin.js +++ b/lib/dependencies/AMDDefineDependencyParserPlugin.js @@ -81,7 +81,7 @@ class AMDDefineDependencyParserPlugin { )) ) { // eslint-disable-line no-cond-assign - dep = new LocalModuleDependency(localModule); + dep = new LocalModuleDependency(localModule, undefined, false); dep.loc = expr.loc; parser.state.current.addDependency(dep); } else { @@ -122,7 +122,7 @@ class AMDDefineDependencyParserPlugin { )) ) { // eslint-disable-line no-cond-assign - dep = new LocalModuleDependency(localModule, param.range); + dep = new LocalModuleDependency(localModule, param.range, false); } else { dep = this.newRequireItemDependency(param.string, param.range); } diff --git a/lib/dependencies/AMDRequireDependenciesBlockParserPlugin.js b/lib/dependencies/AMDRequireDependenciesBlockParserPlugin.js index <HASH>..<HASH> 100644 --- a/lib/dependencies/AMDRequireDependenciesBlockParserPlugin.js +++ b/lib/dependencies/AMDRequireDependenciesBlockParserPlugin.js @@ -78,7 +78,7 @@ class AMDRequireDependenciesBlockParserPlugin { )) ) { // eslint-disable-line no-cond-assign - dep = new LocalModuleDependency(localModule); + dep = new LocalModuleDependency(localModule, undefined, false); dep.loc = expr.loc; parser.state.current.addDependency(dep); } else { @@ -126,7 +126,7 @@ class AMDRequireDependenciesBlockParserPlugin { )) ) { // eslint-disable-line no-cond-assign - dep = new LocalModuleDependency(localModule, param.range); + dep = new LocalModuleDependency(localModule, param.range, false); } else { dep = this.newRequireItemDependency(param.string, param.range); }
Force LocalModuleDependency arity to 3
webpack_webpack
train
c114dfa1bc4042327120bd263f1ca5eec0a97956
diff --git a/pyrax/__init__.py b/pyrax/__init__.py index <HASH>..<HASH> 100755 --- a/pyrax/__init__.py +++ b/pyrax/__init__.py @@ -575,7 +575,7 @@ def _get_service_endpoint(svc, region=None, public=True): @_require_auth -def connect_to_cloudservers(region=None): +def connect_to_cloudservers(region=None, **kwargs): """Creates a client for working with cloud servers.""" _cs_auth_plugin.discover_auth_systems() id_type = get_setting("identity_type") @@ -594,7 +594,7 @@ def connect_to_cloudservers(region=None): project_id=identity.tenant_id, auth_url=identity.auth_endpoint, auth_system=id_type, region_name=region, service_type="compute", auth_plugin=auth_plugin, insecure=insecure, - http_log_debug=_http_debug) + http_log_debug=_http_debug, **kwargs) agt = cloudservers.client.USER_AGENT cloudservers.client.USER_AGENT = _make_agent_name(agt) cloudservers.client.management_url = mgt_url
Provide a way to pass additional arguments onto novaclient in connect_to_cloudservers
pycontribs_pyrax
train
b7c7b951d03180e5db3353413d5b2bfa04bc1e17
diff --git a/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js b/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js index <HASH>..<HASH> 100644 --- a/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js +++ b/packages/neos-ui/src/Containers/LeftSideBar/NodeTree/Node/index.js @@ -55,8 +55,8 @@ export default class Node extends PureComponent { isLastChild: PropTypes.bool, childNodes: PropTypes.object, level: PropTypes.number.isRequired, - currentDocumentNodeContextPath: PropTypes.string, - focusedNodeContextPath: PropTypes.string, + isActive: PropTypes.bool, + isFocused: PropTypes.bool, toggledNodeContextPaths: PropTypes.object, hiddenContextPaths: PropTypes.object, intermediateContextPaths: PropTypes.object, @@ -79,7 +79,7 @@ export default class Node extends PureComponent { componentDidMount() { // Always request scroll on first render if given node is focused - if (this.props.focusedNodeContextPath === $get('contextPath', this.props.node)) { + if (this.props.isFocused) { this.setState({ shouldScrollIntoView: true }); @@ -88,9 +88,9 @@ export default class Node extends PureComponent { componentWillReceiveProps(nextProps) { // If focused node changed - if (this.props.focusedNodeContextPath !== nextProps.focusedNodeContextPath) { + if (this.props.isFocused !== nextProps.isFocused) { // And it is the current node - if (nextProps.focusedNodeContextPath === $get('contextPath', nextProps.node)) { + if (nextProps.isFocused) { // Request scrolling itself into view this.setState({ shouldScrollIntoView: true @@ -160,17 +160,17 @@ export default class Node extends PureComponent { } isFocused() { - const {node, focusedNodeContextPath} = this.props; + const {isFocused} = this.props; - return focusedNodeContextPath === $get('contextPath', node); + return isFocused; } isActive() { - const {node, currentDocumentNodeContextPath, isContentTreeNode} = this.props; + const {isActive, isContentTreeNode} = this.props; if (isContentTreeNode) { return this.isFocused(); } - return currentDocumentNodeContextPath === $get('contextPath', node); + return isActive; } isCollapsed() { @@ -323,9 +323,8 @@ export const PageTreeNode = withNodeTypeRegistryAndI18nRegistry(connect( loadingDepth: neos.configuration.nodeTree.loadingDepth, childNodes: childrenOfSelector(state, getContextPath(node)), hasChildren: hasChildrenSelector(state, getContextPath(node)), - currentDocumentNodeContextPath: selectors.UI.ContentCanvas.getCurrentContentCanvasContextPath(state), - currentDocumentNode: selectors.UI.ContentCanvas.documentNodeSelector(state), - focusedNodeContextPath: selectors.UI.PageTree.getFocused(state), + isActive: selectors.UI.ContentCanvas.getCurrentContentCanvasContextPath(state) === $get('contextPath', node), + isFocused: selectors.UI.PageTree.getFocused(state) === $get('contextPath', node), toggledNodeContextPaths: selectors.UI.PageTree.getToggled(state), hiddenContextPaths: selectors.UI.PageTree.getHidden(state), intermediateContextPaths: selectors.UI.PageTree.getIntermediate(state), @@ -365,9 +364,8 @@ export const ContentTreeNode = withNodeTypeRegistryAndI18nRegistry(connect( loadingDepth: neos.configuration.structureTree.loadingDepth, childNodes: childrenOfSelector(state, getContextPath(node)), hasChildren: hasChildrenSelector(state, getContextPath(node)), - currentDocumentNodeContextPath: selectors.UI.ContentCanvas.getCurrentContentCanvasContextPath(state), - currentDocumentNode: selectors.UI.ContentCanvas.documentNodeSelector(state), - focusedNodeContextPath: $get('cr.nodes.focused.contextPath', state), + isActive: selectors.UI.ContentCanvas.getCurrentContentCanvasContextPath(state) === $get('contextPath', node), + isFocused: $get('cr.nodes.focused.contextPath', state) === $get('contextPath', node), toggledNodeContextPaths: selectors.UI.ContentTree.getToggled(state), isNodeDirty: isContentNodeDirtySelector(state, $get('contextPath', node)), canBeInsertedAlongside: canBeMovedAlongsideSelector(state, {
BUGFIX: Improve performance of switching between nodes in NodeTree without this fix, all nodes in the node tree get redrawn everytime the user selects a different node; which leads to prohibitive performance when switching nodes.
neos_neos-ui
train
76fbe480179d9f06ed2c444b160f6e718533483f
diff --git a/ram-cache.js b/ram-cache.js index <HASH>..<HASH> 100644 --- a/ram-cache.js +++ b/ram-cache.js @@ -6,13 +6,19 @@ var md5omatic = require('md5-o-matic'); Cache object */ +function find(arr, predicate) { + var i, + len = arr.length; -function sortByLessPopular(a, b) { - return a.times < b.times; + for (i = 0; i < len; i++) { + if (predicate(arr[i])) { + return i; + } + } } -function sortByOldest(a, b) { - return a.ts < b.ts; +function sortByLessPopular(a, b) { + return a.times < b.times; } function removeByKey(key) { @@ -40,20 +46,27 @@ Cache.prototype.getCacheKey = function cache_getCacheKey(args) { }; Cache.prototype.push = function cache_push(args, output) { - var lru; - var k = this.getCacheKey(args); + var lru, oldestIndex, + k = this.getCacheKey(args); + if (k in this._cache) return; if(this._LRU.size() === this._maxLen) { + // remove from LRU heap lru = this._LRU.pop(); + // remove from cache delete this._cache[lru.key]; - this._oldest.remove(removeByKey(lru.key)); + // remove from stale objects cache + oldestIndex = find(this._oldest, removeByKey(lru.key)); + if (oldestIndex) { + this._oldest.splice(oldestIndex, 1); + } } - + // add to cache this._cache[k] = output; - + // add to LRU heap this._LRU.push({key: k, times: 0}); - + // add to stale objects cache this._oldest.push({ key: k, ts: Date.now() @@ -62,26 +75,28 @@ Cache.prototype.push = function cache_push(args, output) { Cache.prototype._purgeByAge = function cache__purgeByAge() { // remove old entries - var oldest; - var now = Date.now(); - - while (this._oldest.size()) { - oldest = this._oldest.pop(); - if (oldest.ts + this._maxAge < now) { - delete this._cache[oldest.key]; - this._LRU.remove(removeByKey(oldest.key)); - } - else { - this._oldest.push(oldest); - break; + var key, i, oldestIndex, + maxAge = this._maxAge, + now = Date.now(); + + var oldestIndex = find(this._oldest, function (oldest) { + return oldest.ts + maxAge >= now; + }); + + if (oldestIndex) { + for(i = 0; i < oldestIndex; i++){ + key = this._oldest[i].key; + delete this._cache[key]; + this._LRU.remove(removeByKey(key)); } + this._oldest.splice(0, i); } }; Cache.prototype.reset = function cache_reset() { this._cache = {}; // key, value this._LRU = new Heap(sortByLessPopular); - this._oldest = new Heap(sortByOldest); + this._oldest = []; }; Cache.prototype.query = function cache_query(args, next) {
replace stale objects data container with a plain array. Should deliver better performances
sithmel_memoize-cache
train
ce0cb8c998dca1b19eed284b3c06ba23507c900d
diff --git a/server/sonar-server/src/main/java/org/sonar/server/measure/index/ProjectMeasuresIndex.java b/server/sonar-server/src/main/java/org/sonar/server/measure/index/ProjectMeasuresIndex.java index <HASH>..<HASH> 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/measure/index/ProjectMeasuresIndex.java +++ b/server/sonar-server/src/main/java/org/sonar/server/measure/index/ProjectMeasuresIndex.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -264,19 +264,19 @@ public class ProjectMeasuresIndex { .subAggregation( AggregationBuilders.filter("filter_" + metricKey, termsQuery(FIELD_MEASURES_KEY, metricKey)) .subAggregation(filters(metricKey, - new FiltersAggregator.KeyedFilter("1", termQuery(FIELD_MEASURES_VALUE, 1d)), - new FiltersAggregator.KeyedFilter("2", termQuery(FIELD_MEASURES_VALUE, 2d)), - new FiltersAggregator.KeyedFilter("3", termQuery(FIELD_MEASURES_VALUE, 3d)), - new FiltersAggregator.KeyedFilter("4", termQuery(FIELD_MEASURES_VALUE, 4d)), - new FiltersAggregator.KeyedFilter("5", termQuery(FIELD_MEASURES_VALUE, 5d))))); + new KeyedFilter("1", termQuery(FIELD_MEASURES_VALUE, 1d)), + new KeyedFilter("2", termQuery(FIELD_MEASURES_VALUE, 2d)), + new KeyedFilter("3", termQuery(FIELD_MEASURES_VALUE, 3d)), + new KeyedFilter("4", termQuery(FIELD_MEASURES_VALUE, 4d)), + new KeyedFilter("5", termQuery(FIELD_MEASURES_VALUE, 5d))))); } private static AbstractAggregationBuilder createQualityGateFacet() { return AggregationBuilders.filters( ALERT_STATUS_KEY, QUALITY_GATE_STATUS.entrySet().stream() - .map(entry -> termQuery(FIELD_QUALITY_GATE_STATUS, entry.getValue())) - .toArray(QueryBuilder[]::new)); + .map(entry -> new KeyedFilter(entry.getKey(), termQuery(FIELD_QUALITY_GATE_STATUS, entry.getValue()))) + .toArray(KeyedFilter[]::new)); } private Map<String, QueryBuilder> createFilters(ProjectMeasuresQuery query) {
SONAR-<I> fix quality gate status facet of project measures index
SonarSource_sonarqube
train
dd48cde9904c279ee9cf111f0cc55775e9ed63a2
diff --git a/cmd/swagger/commands/generate/spec_go111.go b/cmd/swagger/commands/generate/spec_go111.go index <HASH>..<HASH> 100644 --- a/cmd/swagger/commands/generate/spec_go111.go +++ b/cmd/swagger/commands/generate/spec_go111.go @@ -29,6 +29,7 @@ type SpecFile struct { Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"` IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"` ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"` + ExcludeDeps bool `long:"exclude-deps" short:"" description:"exclude all dependencies of projec"` } // Execute runs this command @@ -52,6 +53,7 @@ func (s *SpecFile) Execute(args []string) error { opts.Exclude = s.Exclude opts.IncludeTags = s.IncludeTags opts.ExcludeTags = s.ExcludeTags + opts.ExcludeDeps = s.ExcludeDeps swspec, err := codescan.Run(&opts) if err != nil { return err diff --git a/codescan/application.go b/codescan/application.go index <HASH>..<HASH> 100644 --- a/codescan/application.go +++ b/codescan/application.go @@ -46,6 +46,7 @@ type Options struct { ScanModels bool WorkDir string BuildTags string + ExcludeDeps bool Include []string Exclude []string IncludeTags []string @@ -90,7 +91,7 @@ func newScanCtx(opts *Options) (*scanCtx, error) { return nil, err } - app, err := newTypeIndex(pkgs, + app, err := newTypeIndex(pkgs, opts.ExcludeDeps, sliceToSet(opts.IncludeTags), sliceToSet(opts.ExcludeTags), opts.Include, opts.Exclude) if err != nil { @@ -351,11 +352,14 @@ func (s *scanCtx) FindComments(pkg *packages.Package, name string) (*ast.Comment return nil, false } -func newTypeIndex(pkgs []*packages.Package, includeTags, excludeTags map[string]bool, +func newTypeIndex(pkgs []*packages.Package, + excludeDeps bool, includeTags, excludeTags map[string]bool, includePkgs, excludePkgs []string) (*typeIndex, error) { + ac := &typeIndex{ AllPackages: make(map[string]*packages.Package), Models: make(map[*ast.Ident]*entityDecl), + excludeDeps: excludeDeps, includeTags: includeTags, excludeTags: excludeTags, includePkgs: includePkgs, @@ -375,6 +379,7 @@ type typeIndex struct { Operations []parsedPathContent Parameters []*entityDecl Responses []*entityDecl + excludeDeps bool includeTags map[string]bool excludeTags map[string]bool includePkgs []string @@ -508,6 +513,9 @@ func (a *typeIndex) processDecl(pkg *packages.Package, file *ast.File, n node, g } func (a *typeIndex) walkImports(pkg *packages.Package) error { + if a.excludeDeps { + return nil + } for k := range pkg.Imports { if _, known := a.AllPackages[k]; known { continue
allow exclusion of all deps when building spec
go-swagger_go-swagger
train
5674c2d470723e1358af966a80d55cd2223dacea
diff --git a/client/solve.go b/client/solve.go index <HASH>..<HASH> 100644 --- a/client/solve.go +++ b/client/solve.go @@ -209,8 +209,10 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG <-time.After(3 * time.Second) cancelStatus() }() - bklog.G(ctx).Debugf("stopping session") - s.Close() + if !opt.SessionPreInitialized { + bklog.G(ctx).Debugf("stopping session") + s.Close() + } }() var pbd *pb.Definition if def != nil {
do not close sessions after client solve if pre-initialized This fixes what appears to be a bug from an old refactor to allow shared sessions: <URL>
moby_buildkit
train
a2de99164f75f45a3a32560b5748a7e9151192e8
diff --git a/hibernate-ogm-core/src/main/java/org/hibernate/ogm/util/impl/PropertyMetadataProvider.java b/hibernate-ogm-core/src/main/java/org/hibernate/ogm/util/impl/PropertyMetadataProvider.java index <HASH>..<HASH> 100644 --- a/hibernate-ogm-core/src/main/java/org/hibernate/ogm/util/impl/PropertyMetadataProvider.java +++ b/hibernate-ogm-core/src/main/java/org/hibernate/ogm/util/impl/PropertyMetadataProvider.java @@ -32,6 +32,7 @@ import org.hibernate.ogm.persister.EntityKeyBuilder; import org.hibernate.ogm.persister.OgmCollectionPersister; import org.hibernate.ogm.persister.OgmEntityPersister; import org.hibernate.ogm.type.GridType; +import org.hibernate.persister.collection.CollectionPersister; import java.io.Serializable; @@ -96,7 +97,7 @@ public class PropertyMetadataProvider { final Object[] columnValues = getKeyColumnValues(); collectionMetadataKey = new AssociationKey( tableName, keyColumnNames, columnValues ); if (collectionPersister != null) { - collectionMetadataKey.setCollectionRole( collectionPersister.getRole() ); + collectionMetadataKey.setCollectionRole( getUnqualifiedRole( collectionPersister ) ); EntityKey entityKey = EntityKeyBuilder.fromPersister( (OgmEntityPersister) collectionPersister.getOwnerEntityPersister(), (Serializable) key, @@ -113,6 +114,12 @@ public class PropertyMetadataProvider { return collectionMetadataKey; } + private String getUnqualifiedRole(CollectionPersister persister) { + String entity = persister.getOwnerEntityPersister().getEntityName(); + String role = persister.getRole(); + return role.substring( entity.length() + 1 ); + } + private Object[] getKeyColumnValues() { if ( columnValues == null ) { columnValues = LogicalPhysicalConverterHelper.getColumnsValuesFromObjectValue(
OGM-<I> Use the unqualified role name for collection
hibernate_hibernate-ogm
train
513f2f861177be6153c2feacdd393e4c8ea51bf6
diff --git a/packages/@uppy/companion/src/companion.js b/packages/@uppy/companion/src/companion.js index <HASH>..<HASH> 100644 --- a/packages/@uppy/companion/src/companion.js +++ b/packages/@uppy/companion/src/companion.js @@ -19,6 +19,7 @@ const { STORAGE_PREFIX } = require('./server/Uploader') const middlewares = require('./server/middlewares') const { shortenToken } = require('./server/Uploader') const { ProviderApiError, ProviderAuthError } = require('./server/provider/error') +const ms = require('ms') const defaultOptions = { server: { @@ -31,7 +32,8 @@ const defaultOptions = { endpoint: 'https://{service}.{region}.amazonaws.com', conditions: [], useAccelerateEndpoint: false, - getKey: (req, filename) => filename + getKey: (req, filename) => filename, + expires: ms('5 minutes') / 1000 } }, debug: true diff --git a/packages/@uppy/companion/src/server/controllers/s3.js b/packages/@uppy/companion/src/server/controllers/s3.js index <HASH>..<HASH> 100644 --- a/packages/@uppy/companion/src/server/controllers/s3.js +++ b/packages/@uppy/companion/src/server/controllers/s3.js @@ -1,5 +1,4 @@ const router = require('express').Router -const ms = require('ms') module.exports = function s3 (config) { if (typeof config.acl !== 'string') { @@ -46,7 +45,7 @@ module.exports = function s3 (config) { client.createPresignedPost({ Bucket: config.bucket, - Expires: ms('5 minutes') / 1000, + Expires: config.expires, Fields: fields, Conditions: config.conditions }, (err, data) => { @@ -94,7 +93,7 @@ module.exports = function s3 (config) { ACL: config.acl, ContentType: type, Metadata: metadata, - Expires: ms('5 minutes') / 1000 + Expires: config.expires }, (err, data) => { if (err) { next(err) @@ -191,7 +190,7 @@ module.exports = function s3 (config) { UploadId: uploadId, PartNumber: partNumber, Body: '', - Expires: ms('5 minutes') / 1000 + Expires: config.expires }, (err, url) => { if (err) { next(err) diff --git a/packages/@uppy/companion/src/standalone/helper.js b/packages/@uppy/companion/src/standalone/helper.js index <HASH>..<HASH> 100644 --- a/packages/@uppy/companion/src/standalone/helper.js +++ b/packages/@uppy/companion/src/standalone/helper.js @@ -56,7 +56,8 @@ const getConfigFromEnv = () => { endpoint: process.env.COMPANION_AWS_ENDPOINT, region: process.env.COMPANION_AWS_REGION, useAccelerateEndpoint: - process.env.COMPANION_AWS_USE_ACCELERATE_ENDPOINT === 'true' + process.env.COMPANION_AWS_USE_ACCELERATE_ENDPOINT === 'true', + expires: parseInt(process.env.COMPANION_AWS_EXPIRES || '300', 10) } }, server: { diff --git a/website/src/docs/companion.md b/website/src/docs/companion.md index <HASH>..<HASH> 100644 --- a/website/src/docs/companion.md +++ b/website/src/docs/companion.md @@ -199,6 +199,8 @@ export COMPANION_AWS_BUCKET="YOUR AWS S3 BUCKET" export COMPANION_AWS_REGION="AWS REGION" # to enable S3 Transfer Acceleration (default: false) export COMPANION_AWS_USE_ACCELERATE_ENDPOINT="false" +# to set X-Amz-Expires query param in presigned urls (in seconds, default: 300) +export COMPANION_AWS_EXPIRES="300" # corresponds to the server.oauthDomain option export COMPANION_OAUTH_DOMAIN="sub.domain.com" @@ -246,7 +248,8 @@ See [env.example.sh](https://github.com/transloadit/uppy/blob/master/env.example secret: "***", bucket: "bucket-name", region: "us-east-1", - useAccelerateEndpoint: false // default: false + useAccelerateEndpoint: false, // default: false, + expires: 3600 // default: 300 (5 minutes) } }, server: {
make s3 signed url expiry configurable in companion (#<I>) * make s3 signed url expiry configurable in companion * add 'expires' option to the companion docs
transloadit_uppy
train
e87e105c6a5e2a94eb726162f30c2c80a3a4a4e0
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ def read(fname): setup( name = "fastly-python", - version = "0.0.1", + version = "0.0.2", author = "Chris Zacharias", author_email = "[email protected]", description = ("A Python client libary for the Fastly API."),
Advancing the version number.
obulpathi_cdn-fastly-python
train
cc2cb188b5e05918462de5e350ce6048e566b803
diff --git a/src/com/google/javascript/jscomp/PolymerPass.java b/src/com/google/javascript/jscomp/PolymerPass.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/PolymerPass.java +++ b/src/com/google/javascript/jscomp/PolymerPass.java @@ -162,6 +162,7 @@ final class PolymerPass extends AbstractPostOrderCallback implements HotSwapComp return value != null && value.isExprResult() && value.getFirstChild().getFirstChild() != null && value.getFirstChild().getFirstChild().isGetProp() + && value.getFirstChild().getFirstChild().isQualifiedName() && NodeUtil.getRootOfQualifiedName( value.getFirstChild().getFirstChild()).matchesQualifiedName(POLYMER_ELEMENT_NAME); }
Fix crash in PolymerPass that can occur when people pass non-externs code as externs. ------------- Created by MOE: <URL>
google_closure-compiler
train
b8de9dafaedbd3513a8db3ac407201d19ca1ee0c
diff --git a/statics/js/components/inject-form.js b/statics/js/components/inject-form.js index <HASH>..<HASH> 100644 --- a/statics/js/components/inject-form.js +++ b/statics/js/components/inject-form.js @@ -215,7 +215,7 @@ Vue.component('inject-form', { }, getIP: function(node) { - md = node.metadata; + md = node.metadata.Neutron ? node.metadata.Neutron : node.metadata; ipFamily = this.type.slice(-1); if (ipFamily == "4" && "IPV4" in md) { return md.IPV4[0]; @@ -227,6 +227,7 @@ Vue.component('inject-form', { }, getMAC: function(node) { + if (node.metadata.ExtID && node.metadata.ExtID["attached-mac"]) return node.metadata.ExtID["attached-mac"]; return node.metadata.MAC || ""; },
pi: pick neutron IP in UI
skydive-project_skydive
train
bfb868ff2ec16e5022b440701e73b31715c4ff8b
diff --git a/src/org/opencms/db/I_CmsUserDriver.java b/src/org/opencms/db/I_CmsUserDriver.java index <HASH>..<HASH> 100644 --- a/src/org/opencms/db/I_CmsUserDriver.java +++ b/src/org/opencms/db/I_CmsUserDriver.java @@ -1,7 +1,7 @@ /* * File : $Source: /alkacon/cvs/opencms/src/org/opencms/db/I_CmsUserDriver.java,v $ - * Date : $Date: 2003/06/17 16:25:36 $ - * Version: $Revision: 1.2 $ + * Date : $Date: 2003/06/20 16:18:31 $ + * Version: $Revision: 1.3 $ * * This library is part of OpenCms - * the Open Source Content Mananagement System @@ -48,7 +48,7 @@ import source.org.apache.java.util.Configurations; * Definitions of all required user driver methods. * * @author Thomas Weckert ([email protected]) - * @version $Revision: 1.2 $ $Date: 2003/06/17 16:25:36 $ + * @version $Revision: 1.3 $ $Date: 2003/06/20 16:18:31 $ * @since 5.1 */ public interface I_CmsUserDriver { @@ -132,7 +132,7 @@ public interface I_CmsUserDriver { String digest(String value); Vector getChild(String groupname) throws CmsException; Vector getGroups() throws CmsException; - Vector getGroupsOfUser(String name) throws CmsException; + Vector getGroupsOfUser(CmsUUID userId) throws CmsException; Vector getUsers(int type) throws CmsException; Vector getUsers(int type, String namefilter) throws CmsException; Vector getUsersByLastname(String lastname, int userType, int userStatus, int wasLoggedIn, int nMax) throws CmsException;
Changed getGroupsOfUsers from name to uuid
alkacon_opencms-core
train
9fb9bda50d0486d993020fe1465fcc9111e452d8
diff --git a/test/test_benchmarks.py b/test/test_benchmarks.py index <HASH>..<HASH> 100644 --- a/test/test_benchmarks.py +++ b/test/test_benchmarks.py @@ -146,8 +146,8 @@ def test_find_benchmarks(tmpdir): assert times['time_examples.TimeWithRepeat.time_it']['stderr'].split() == expected # Calibration of iterations should not rerun setup - expected = ['setup']*3 - assert times['time_examples.TimeWithRepeatCalibrate.time_it']['stderr'].split() == expected + expected = (['setup']*2, ['setup']*3) + assert times['time_examples.TimeWithRepeatCalibrate.time_it']['stderr'].split() in expected # Check run time timestamps for name, result in times.items():
Fix issue in test The number of repeats may be smaller if too_slow() condition was encountered.
airspeed-velocity_asv
train
683e46d86514d5f70a2461062792fa2853e245d5
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -27,7 +27,7 @@ Matcher.prototype.isCaptured = function (currentNode, parentNode) { if (isCalleeOfParent(currentNode, parentNode)) { return false; } - if (matchCallee(this.exampleAst, parentNode)) { + if (this.test(parentNode)) { indexOfCurrentArg = parentNode.arguments.indexOf(currentNode); return indexOfCurrentArg !== -1 && indexOfCurrentArg < this.exampleAst.arguments.length; }
fix(escallmatch): isCaptured should not be true when whole CallExpression does not match
twada_escallmatch
train
0b7ac235630b4030ab30d6d90be95f2571763947
diff --git a/tests/fixtures/processors/custom-processor.js b/tests/fixtures/processors/custom-processor.js index <HASH>..<HASH> 100644 --- a/tests/fixtures/processors/custom-processor.js +++ b/tests/fixtures/processors/custom-processor.js @@ -2,9 +2,10 @@ module.exports = { processors: { ".txt": { preprocess: function(text) { - return [text]; + return [text.replace("a()", "b()")]; }, postprocess: function(messages) { + messages[0][0].ruleId = "post-processed"; return messages[0]; } } diff --git a/tests/lib/cli-engine.js b/tests/lib/cli-engine.js index <HASH>..<HASH> 100644 --- a/tests/lib/cli-engine.js +++ b/tests/lib/cli-engine.js @@ -923,8 +923,51 @@ describe("CLIEngine", function() { assert.equal(report.results.length, 1); assert.equal(report.results[0].messages.length, 2); }); - }); + it("should run processors when executing with config file that specifies a processor", function() { + engine = new CLIEngine({ + configFile: "./tests/fixtures/configurations/processors.json", + reset: true, + useEslintrc: false, + extensions: ["js", "txt"] + }); + var report = engine.executeOnFiles(["tests/fixtures/processors/test/test-processor.txt"]); + + assert.equal(report.results[0].messages[0].message, "b is defined but never used"); + assert.equal(report.results[0].messages[0].ruleId, "post-processed"); + }); + it("should run processors when executing with config file that specifies preloaded processor", function() { + engine = new CLIEngine({ + reset: true, + useEslintrc: false, + plugins: ["test-processor"], + rules: { + "no-console": 2, + "no-unused-vars": 2 + }, + extensions: ["js", "txt"] + }); + + engine.addPlugin("test-processor", { + processors: { + ".txt": { + preprocess: function(text) { + return [text.replace("a()", "b()")]; + }, + postprocess: function(messages) { + messages[0][0].ruleId = "post-processed"; + return messages[0]; + } + } + } + }); + + var report = engine.executeOnFiles(["tests/fixtures/processors/test/test-processor.txt"]); + + assert.equal(report.results[0].messages[0].message, "b is defined but never used"); + assert.equal(report.results[0].messages[0].ruleId, "post-processed"); + }); + }); }); describe("getConfigForFile", function() {
Update: add tests to assert that the preprocessor is running (fixes #<I>)
eslint_eslint
train
ed28ed467c660a36a9f6df32487086a0fd902856
diff --git a/lib/jsduck/options.rb b/lib/jsduck/options.rb index <HASH>..<HASH> 100644 --- a/lib/jsduck/options.rb +++ b/lib/jsduck/options.rb @@ -122,7 +122,7 @@ module JsDuck opts.on('-o', '--output=PATH', "Directory to output all this amazing documentation.", "This option MUST be specified (unless --stdout).", " ") do |path| - @output_dir = path + @output_dir = canonical(path) end opts.on('--ignore-global', "Turns off the creation of global class.", " ") do @@ -184,7 +184,7 @@ module JsDuck opts.on('--welcome=PATH', "Path to HTML file with content for welcome page.", " ") do |path| - @welcome = path + @welcome = canonical(path) end opts.on('--guides=PATH', @@ -192,26 +192,26 @@ module JsDuck "should be in a dir containing the actual guides.", "A guide is a dir containing README.md, icon.png,", "and other images referenced by the README.md file.", " ") do |path| - @guides = path + @guides = canonical(path) end opts.on('--videos=PATH', "Path to JSON file describing the videos.", " ") do |path| - @videos = path + @videos = canonical(path) end opts.on('--examples=PATH', "Path JSON file describing the examples.", " ") do |path| - @examples = path + @examples = canonical(path) end opts.on('--categories=PATH', "Path to JSON file which defines categories for classes.", " ") do |path| - @categories_path = path + @categories_path = canonical(path) end opts.on('--inline-examples=PATH', "Path to inline examples directory.", " ") do |path| - @inline_examples_dir = path + @inline_examples_dir = canonical(path) end opts.on('--pretty-json', "Turn on pretty-printing of JSON.", " ") do @@ -265,7 +265,7 @@ module JsDuck opts.on('--template=PATH', "Directory containing doc-browser UI template.", " ") do |path| - @template_dir = path + @template_dir = canonical(path) end opts.on('--template-links', @@ -278,7 +278,7 @@ module JsDuck opts.on('--extjs-path=PATH', "Path for main ExtJS JavaScript file. Useful for specifying", "something different than extjs/ext.js", " ") do |path| - @extjs_path = path + @extjs_path = path # NB! must be relative path end opts.on('--local-storage-db=NAME', @@ -353,6 +353,15 @@ module JsDuck end end + # Converts relative path to full path + # + # Especially important for running on Windows where C:\foo\bar + # pathnames are converted to C:/foo/bar which ruby can work on + # more easily. + def canonical(path) + File.expand_path(path) + end + # Runs checks on the options def validate if @input_files.length == 0
Support DOS pathnames in command line arguments. Using File.expand_path to expand all relative path to full paths, which also converts all DOS-style paths to UNIX style.
senchalabs_jsduck
train
b8070a2ab71559978fc84c13ee237f8e5dfc12c2
diff --git a/Model/Behavior/AttachmentBehavior.php b/Model/Behavior/AttachmentBehavior.php index <HASH>..<HASH> 100644 --- a/Model/Behavior/AttachmentBehavior.php +++ b/Model/Behavior/AttachmentBehavior.php @@ -473,9 +473,6 @@ class AttachmentBehavior extends ModelBehavior { )); } - // Reset ID - $model->id = null; - return true; }
Fix record deletion because of invalid model ID [#<I>]
milesj_uploader
train
cd483959e01a1448b64ab6dbd531f2310ca052c5
diff --git a/tests/Kwf/Component/Events/Table/Test.php b/tests/Kwf/Component/Events/Table/Test.php index <HASH>..<HASH> 100644 --- a/tests/Kwf/Component/Events/Table/Test.php +++ b/tests/Kwf/Component/Events/Table/Test.php @@ -8,48 +8,78 @@ class Kwf_Component_Events_Table_Test extends Kwc_TestAbstract public function setUp() { parent::setUp('Kwf_Component_Events_Table_Component'); - } - public function testEvents() - { - $events = Kwf_Component_Events_Table_Events::getInstance( + $this->_events = Kwf_Component_Events_Table_Events::getInstance( 'Kwf_Component_Events_Table_Events', array('componentClass' => 'Kwf_Component_Events_Table_Component') ); + $this->_events->countCalled = 0; + } + public function testEvents1() + { $model = Kwf_Model_Abstract::getInstance('Kwf_Component_Events_Table_Model'); - $count = 0; $row = $model->getRow(3); $row->visible = 1; $row->save(); - $this->assertEquals(++$count, $events->countCalled); + $this->assertEquals(1, $this->_events->countCalled); + } - $row = $model->getRow(3); + public function testEvents2() + { + $model = Kwf_Model_Abstract::getInstance('Kwf_Component_Events_Table_Model'); + + $row = $model->getRow(2); $row->visible = 0; $row->save(); - $this->assertEquals(++$count, $events->countCalled); + $this->assertEquals(1, $this->_events->countCalled); + } + + public function testEvents3() + { + $model = Kwf_Model_Abstract::getInstance('Kwf_Component_Events_Table_Model'); $row = $model->createRow(array('name' => 'F6', 'pos' => 5, 'visible' => 1)); $row->save(); - $this->assertEquals(++$count, $events->countCalled); + $this->assertEquals(1, $this->_events->countCalled); + } + + public function testEvents4() + { + $model = Kwf_Model_Abstract::getInstance('Kwf_Component_Events_Table_Model'); $row = $model->getRow(2); $row->delete(); - $this->assertEquals(++$count, $events->countCalled); + $this->assertEquals(1, $this->_events->countCalled); + } + + public function testEvents5() + { + $model = Kwf_Model_Abstract::getInstance('Kwf_Component_Events_Table_Model'); $row = $model->getRow(4); $row->pos = 1; $row->save(); - $this->assertEquals(++$count, $events->countCalled); + $this->assertEquals(1, $this->_events->countCalled); + } + + public function testEvents6() + { + $model = Kwf_Model_Abstract::getInstance('Kwf_Component_Events_Table_Model'); $row = $model->createRow(array('name' => 'F5', 'pos' => 5, 'visible' => 0)); $row->save(); - $this->assertEquals($count, $events->countCalled); + $this->assertEquals(0, $this->_events->countCalled); + } + + public function testEvents7() + { + $model = Kwf_Model_Abstract::getInstance('Kwf_Component_Events_Table_Model'); $row = $model->getRow(3); $row->delete(); - $this->assertEquals($count, $events->countCalled); + $this->assertEquals(0, $this->_events->countCalled); } } \ No newline at end of file
split up test for easier debugging
koala-framework_koala-framework
train
d6b68505c60d8bf2926304e412b9373299fb2d48
diff --git a/PyFunceble/__init__.py b/PyFunceble/__init__.py index <HASH>..<HASH> 100644 --- a/PyFunceble/__init__.py +++ b/PyFunceble/__init__.py @@ -93,7 +93,7 @@ from PyFunceble.whois import Whois # We set our project name. NAME = "PyFunceble" # We set out project version. -VERSION = "1.51.0.dev -- 2_0_0_rc8 -- (Blue Bontebok: Beetle)" +VERSION = "1.51.1.dev -- 2_0_0_rc8 -- (Blue Bontebok: Beetle)" # We set the list of windows "platforms" WINDOWS_PLATFORMS = ["windows", "cygwin", "cygwin_nt-10.0"] diff --git a/PyFunceble/file_core.py b/PyFunceble/file_core.py index <HASH>..<HASH> 100644 --- a/PyFunceble/file_core.py +++ b/PyFunceble/file_core.py @@ -290,7 +290,7 @@ class FileCore: # pylint: disable=too-many-instance-attributes of all the noises around the element we want to test. """ - if not line.startswith("#"): + if line and not line.startswith("#"): # The line is not a commented line. if "#" in line: @@ -456,7 +456,7 @@ class FileCore: # pylint: disable=too-many-instance-attributes # We remove cariage from the given line. line = line.strip() - if line[0] == "#": + if not line or line[0] == "#": # We line is a comment line. # We return None, there is nothing to test. diff --git a/version.yaml b/version.yaml index <HASH>..<HASH> 100644 --- a/version.yaml +++ b/version.yaml @@ -1,4 +1,4 @@ -current_version: '1.51.0.dev -- 2_0_0_rc8 -- (Blue Bontebok: Beetle)' +current_version: '1.51.1.dev -- 2_0_0_rc8 -- (Blue Bontebok: Beetle)' deprecated: [0.0.0, 0.0.1, 0.65.0, 0.67.1, 0.68.0, 0.69.3, 0.69.5, 0.70.4, 0.71.2, 0.72.7, 0.73.1, 0.74.5, 0.75.1, 0.76.2, 0.77.0, 0.78.0, 0.79.1, 0.80.9, 0.81.8, 0.82.4, 0.83.2, 0.84.5, 0.85.0, 0.86.0, 0.87.1, 0.88.3, 0.89.3, 0.90.2, 0.91.1,
Fix issue when an empty string is given
funilrys_PyFunceble
train
ad5811b09e2b76e34432d46b1e0a022ef057359c
diff --git a/pools.go b/pools.go index <HASH>..<HASH> 100644 --- a/pools.go +++ b/pools.go @@ -19,6 +19,7 @@ import ( "sync" "unsafe" + "github.com/couchbase/gomemcached" // package name is 'gomemcached' "github.com/couchbase/gomemcached/client" // package name is 'memcached' ) @@ -343,6 +344,51 @@ func (b Bucket) getConnectionToVBucket(vb uint32) (*memcached.Client, *connectio } } +// To get random documents, we need to cover all the nodes, so select +// a connection at random. + +func (b Bucket) getRandomConnection() (*memcached.Client, *connectionPool, error) { + for { + var currentPool = 0 + pools := b.getConnPools() + if len(pools) == 0 { + return nil, nil, fmt.Errorf("No connection pool found") + } else if len(pools) > 1 { // choose a random connection + currentPool = rand.Intn(len(pools)) + } // if only one pool, currentPool defaults to 0, i.e., the only pool + + // get the pool + pool := pools[currentPool] + conn, err := pool.Get() + if err != errClosedPool { + return conn, pool, err + } + + // If conn pool was closed, because another goroutine refreshed the vbucket map, retry... + } +} + +// +// Get a random document from a bucket. Since the bucket may be distributed +// across nodes, we must first select a random connection, and then use the +// Client.GetRandomDoc() call to get a random document from that node. +// + +func (b Bucket) GetRandomDoc() (*gomemcached.MCResponse, error) { + // get a connection from the pool + conn, pool, err := b.getRandomConnection() + + if err != nil { + return nil, err + } + + // get a randomm document from the connection + doc, err := conn.GetRandomDoc() + // need to return the connection to the pool + pool.Return(conn) + return doc, err +} + func (b Bucket) getMasterNode(i int) string { p := b.getConnPools() if len(p) > i {
Enhance Buckets with GetRandomDoc() method to get a random doc from the bucket. Change-Id: I<I>c<I>a<I>de<I>b<I>d9e<I>bb<I>a Reviewed-on: <URL>
couchbase_go-couchbase
train
c4195283937265fa8a9ee5cccb4d4f356eff375b
diff --git a/src/MvcCore/Application/GettersSetters.php b/src/MvcCore/Application/GettersSetters.php index <HASH>..<HASH> 100644 --- a/src/MvcCore/Application/GettersSetters.php +++ b/src/MvcCore/Application/GettersSetters.php @@ -146,6 +146,18 @@ trait GettersSetters { /** * @inheritDocs + * @var \MvcCore\Config|NULL + */ + public function GetConfig () { + if ($this->config === NULL) { + $configClass = $this->configClass; + $this->config = $configClass::GetSystem(); + } + return $this->config; + } + + /** + * @inheritDocs * @var \MvcCore\Environment */ public function GetEnvironment () { diff --git a/src/MvcCore/Application/Props.php b/src/MvcCore/Application/Props.php index <HASH>..<HASH> 100644 --- a/src/MvcCore/Application/Props.php +++ b/src/MvcCore/Application/Props.php @@ -46,6 +46,13 @@ trait Props { protected $compiled = NULL; /** + * System config INI file as `stdClass` or `array`, + * placed by default in: `"/App/config.ini"`. + * @var \MvcCore\Config|NULL + */ + protected $config = NULL; + + /** * Environment detection instance. * @var \MvcCore\Environment|NULL */ diff --git a/src/MvcCore/IApplication.php b/src/MvcCore/IApplication.php index <HASH>..<HASH> 100644 --- a/src/MvcCore/IApplication.php +++ b/src/MvcCore/IApplication.php @@ -143,6 +143,13 @@ interface IApplication extends \MvcCore\Application\IConstants { public function GetViewClass (); /** + * Get (optionally cached) system config INI file as `stdClass` or `array`, + * placed by default in: `"/App/config.ini"`. + * @return \MvcCore\Config|NULL + */ + public function GetConfig (); + + /** * Returns environment detection instance. * @return \MvcCore\Environment */
Added getter to get system config from application instance.
mvccore_mvccore
train
756310fc65af29f510a11c49eba72373844f1be2
diff --git a/spec/httparty/request_spec.rb b/spec/httparty/request_spec.rb index <HASH>..<HASH> 100644 --- a/spec/httparty/request_spec.rb +++ b/spec/httparty/request_spec.rb @@ -33,7 +33,12 @@ describe HTTParty::Request do it 'should not use ssl for port 80' do request = HTTParty::Request.new(Net::HTTP::Get, 'http://foobar.com') - @request.send(:http).use_ssl?.should == false + request.send(:http).use_ssl?.should == false + end + + it "should use ssl for https scheme" do + request = HTTParty::Request.new(Net::HTTP::Get, 'https://foobar.com') + request.send(:http).use_ssl?.should == true end it "should use basic auth when configured" do
Added a test to ensure that https is use when scheme is https.
jnunemaker_httparty
train
6060305b0f1718bf8e16515d5cb14e68e5ffb0d6
diff --git a/src/Sentinel.php b/src/Sentinel.php index <HASH>..<HASH> 100644 --- a/src/Sentinel.php +++ b/src/Sentinel.php @@ -522,6 +522,8 @@ class Sentinel $this->persistences->{$method}($user); + $this->user = null; + return $this->users->recordLogout($user); } diff --git a/tests/SentinelTest.php b/tests/SentinelTest.php index <HASH>..<HASH> 100644 --- a/tests/SentinelTest.php +++ b/tests/SentinelTest.php @@ -400,6 +400,24 @@ class SentinelTest extends PHPUnit_Framework_TestCase $sentinel->logout($user, true); } + public function testUserIsNullAfterLogout() + { + list($sentinel, $persistences, $users, $roles, $activations, $dispatcher) = $this->createSentinel(); + + $user = new EloquentUser; + + $persistences->shouldReceive('persist')->once(); + $persistences->shouldReceive('forget')->once(); + + $users->shouldReceive('recordLogin')->once(); + $users->shouldReceive('recordLogout')->once(); + + $sentinel->login($user); + $sentinel->logout($user); + + $this->assertNull($sentinel->getUser(false)); + } + public function testLogoutInvalidUser() { list($sentinel, $persistences, $users, $roles, $activations, $dispatcher) = $this->createSentinel();
set the user to null on logout
cartalyst_sentinel
train
da583df50c32d50261b682664fe43fd5e2f58f87
diff --git a/actionpack/lib/action_dispatch/testing/integration.rb b/actionpack/lib/action_dispatch/testing/integration.rb index <HASH>..<HASH> 100644 --- a/actionpack/lib/action_dispatch/testing/integration.rb +++ b/actionpack/lib/action_dispatch/testing/integration.rb @@ -364,7 +364,7 @@ module ActionDispatch end def respond_to?(method, include_private = false) - @integration_session.respond_to?(method, include_private) || super + integration_session.respond_to?(method, include_private) || super end # Delegate unhandled messages to the current session instance.
Remove bazillion warnings from AP suite.
rails_rails
train
bcc6db9993591a3502e1dfdfe586066b7f13e6ba
diff --git a/src/html/DeclarativeBase.js b/src/html/DeclarativeBase.js index <HASH>..<HASH> 100644 --- a/src/html/DeclarativeBase.js +++ b/src/html/DeclarativeBase.js @@ -127,7 +127,7 @@ export function initDeclarativeBase() { // detects if anything is visible including from styling, not // just content. Perhaps make a specific API for defining that // a node should have DOM content, to make it clear. - if ( this instanceof HTMLNode && !this.isDOMPlane && ( + if ( this instanceof HTMLNode && !this.isDOMNode && ( ( !( child instanceof Text ) && !( child instanceof Comment ) ) || ( child instanceof Text && child.textContent.trim().length > 0 ) ) ) { @@ -170,7 +170,7 @@ export function initDeclarativeBase() { Private(this)._slotElementsAssignedNodes.delete(child) } else { // if non-library content was removed (div, img, etc). - if ( this instanceof HTMLNode && !this.isDOMPlane && ( + if ( this instanceof HTMLNode && !this.isDOMNode && ( ( !( child instanceof Text ) && !( child instanceof Comment ) ) || ( child instanceof Text && child.textContent.trim().length > 0 ) ) ) {
fix: prevent from creating an implicit GL DOM Plane for DOMNodes and DOMPlanes, it was reading the wrong property name
trusktr_infamous
train
b1666986236eab820e9155a5943d6b94938b4c40
diff --git a/magic.py b/magic.py index <HASH>..<HASH> 100644 --- a/magic.py +++ b/magic.py @@ -72,7 +72,7 @@ class Magic: """ with self.lock: try: - return magic_buffer(self.cookie, buf) + return maybe_decode(magic_buffer(self.cookie, buf)) except MagicException as e: return self._handle509Bug(e) @@ -82,7 +82,7 @@ class Magic: pass with self.lock: try: - return magic_file(self.cookie, filename) + return maybe_decode(magic_file(self.cookie, filename)) except MagicException as e: return self._handle509Bug(e) @@ -189,6 +189,14 @@ def errorcheck_negative_one(result, func, args): return result +# return str on python3. Don't want to unconditionally +# decode because that results in unicode on python2 +def maybe_decode(s): + if str == bytes: + return s + else: + return s.decode('utf-8') + def coerce_filename(filename): if filename is None: return None diff --git a/test/test.py b/test/test.py index <HASH>..<HASH> 100755 --- a/test/test.py +++ b/test/test.py @@ -22,13 +22,11 @@ class MagicTest(unittest.TestCase): expected_value = (expected_value,) for i in expected_value: - expected_value_bytes = i.encode('utf-8') - with open(filename, 'rb') as f: buf_value = m.from_buffer(f.read()) file_value = m.from_file(filename) - if buf_value == expected_value_bytes and file_value == expected_value_bytes: + if buf_value == i and file_value == i: break else: self.assertTrue(False, "no match for " + repr(expected_value)) @@ -86,11 +84,10 @@ class MagicTest(unittest.TestCase): filename = os.path.join(self.TESTDATA_DIR, 'keep-going.jpg') m = magic.Magic(mime=True) - self.assertEqual(m.from_file(filename), - 'image/jpeg'.encode('utf-8')) + self.assertEqual(m.from_file(filename), 'image/jpeg') m = magic.Magic(mime=True, keep_going=True) - self.assertEqual(m.from_file(filename), 'image/jpeg'.encode('utf-8')) + self.assertEqual(m.from_file(filename), 'image/jpeg') def test_rethrow(self):
Return str rather than bytes for the description strings on python3. Assumes utf-8 encoding from magic return values, which I hope is always the case.
ahupp_python-magic
train
fef10aec444a5e70549ab475cc866f3a5c2a0663
diff --git a/termbox_windows.go b/termbox_windows.go index <HASH>..<HASH> 100644 --- a/termbox_windows.go +++ b/termbox_windows.go @@ -664,7 +664,6 @@ func input_event_producer() { var r input_record var err error mouseRelease := false - var prevButton dword for { err = read_console_input(in, &r) if err != nil { @@ -692,8 +691,7 @@ func input_event_producer() { // single or double click if mr.event_flags == 0 || mr.event_flags == 2 { // handle desync - mouseRelease = mouseRelease && mr.event_flag == 0 && prevButton == mr.button_state - prevButton = mr.button_state + mouseRelease = mouseRelease && mr.event_flags == 0 if mouseRelease { // ignore release mouseRelease = false @@ -717,7 +715,6 @@ func input_event_producer() { } else { // get ready for the next click mouseRelease = false - prevButton = 0 } } }
release does not really have the same button
nsf_termbox-go
train
03f7889bb36b3c15a2643e60118bfe925ad2405f
diff --git a/src/Hooks.php b/src/Hooks.php index <HASH>..<HASH> 100644 --- a/src/Hooks.php +++ b/src/Hooks.php @@ -95,23 +95,22 @@ class Hooks { } $name = $transaction->name; - preg_match("/\\w+\\s*>/", str_replace(" ", "", $name), $tokens); - + $tokens = explode(">", str_replace(" ", "", $name)); $previous = ''; foreach ($tokens as $token) { - if (array_key_exists($token, Hooks::${$propertyName})) { + $previous .= $token . ">"; + + if (array_key_exists($previous, Hooks::${$propertyName})) { - $hooks = Hooks::${$propertyName}[$token]; + $hooks = Hooks::${$propertyName}[$previous]; foreach ($hooks as $hook) { $callbacks[] = $hook; } } - - $previous .= $token; } if (array_key_exists($name, Hooks::${$propertyName})) { diff --git a/src/Runner.php b/src/Runner.php index <HASH>..<HASH> 100644 --- a/src/Runner.php +++ b/src/Runner.php @@ -50,52 +50,6 @@ class Runner { { $propertyName = $this->getPropertyNameFromMethodCall($method); -// $name = strpos($propertyName, 'All') || strpos($propertyName, 'Each') ? $transaction->name : null; - return Hooks::getCallbacksForName($propertyName, $transaction); } - -// private function getHooksFromMethodCall($method, $transaction) -// { -// $propertyName = $this->getPropertyNameFromMethodCall($method); -// -// if ( ! property_exists(Hooks::class, $propertyName)) throw new RuntimeException("Invalid property {$propertyName} trying to be accessed"); -// -// if (strpos($propertyName, 'All') || strpos($propertyName, 'Each')) { -// -// return Hooks::${$propertyName}; -// } -// -// else if ($this->hasWildcard($transaction->name) || array_key_exists($transaction->name, Hooks::${$propertyName})) { -// -// return $this->getHookForTransaction($transaction->name, $propertyName); -// } -// -// return []; -// } - -// public function hasWildcard($name) -// { -// return strpos($name, "*") ? true : false; -// } - -// public function getHookForTransaction($name, $propertyName) -// { -// $callbacks = []; -// -// if ($this->hasWildcard($name)) { -// -// // find all transaction names relevant and push them onto the array -// $tokens = explode("*", $name); -// -// $name = trim($tokens[0]); -// -// $callbacks[] = Hooks::${$propertyName}[$name]; -// -// return $callbacks; -// } -// -// return Hooks::${$propertyName}[$name]; -// } - } diff --git a/tests/DreddRunnerTest.php b/tests/DreddRunnerTest.php index <HASH>..<HASH> 100644 --- a/tests/DreddRunnerTest.php +++ b/tests/DreddRunnerTest.php @@ -220,4 +220,25 @@ class DreddRunnerTest extends PHPUnit_Framework_TestCase $this->expectOutputString('yay this is also called'); } + /** + * @test + */ + public function it_can_handle_multiple_levels_of_nesting_in_transaction_names() + { + $wildcardName = 'Admin > admin logs in > *'; + + $transaction = new stdClass(); + $transactionName = 'Admin > admin logs in > another event'; + $transaction->name = $transactionName; + + Hooks::before($wildcardName, function(&$transaction) + { + echo 'yay this is also called'; + }); + + $this->runner->runBeforeHooksForTransaction($transaction); + + $this->expectOutputString('yay this is also called'); + } + } \ No newline at end of file
Added test for wildcard multiple levels of nesting
ddelnano_dredd-hooks-php
train
e031a5a99bdf92bdd335a1e5dcde515a635a4784
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -288,6 +288,7 @@ class CanvasApi { fs.createReadStream(filename) ] } + const url = `${this.apiUrl}/accounts/${account}/sis_imports${batchMode ? '?batch_mode=1' : ''}${batchTerm ? '&batch_mode_term_id=' + batchTerm : ''}` log.info('url', url) @@ -302,6 +303,11 @@ class CanvasApi { }, formData, json + }).catch(e =>{ + const strippedError = new Error(e.message) + strippedError.statusCode = e.statusCode + strippedError.statusMessage = e.statusMessage + throw strippedError }) } }
strip token from sendCsvFile errors
KTH_canvas-api
train
534d1bdc18578b439f9a2e47b963e9bd61909848
diff --git a/tests/client/lib/sinon.js b/tests/client/lib/sinon.js index <HASH>..<HASH> 100644 --- a/tests/client/lib/sinon.js +++ b/tests/client/lib/sinon.js @@ -1348,7 +1348,7 @@ if (!this.sinon && commonJSModule) { this.callIds = []; if (this.fakes) { for (var i = 0; i < this.fakes.length; i++) { - this.fakes[i].reset(); + this.fakes[i].resetHistory(); } } }, @@ -1368,7 +1368,7 @@ if (!this.sinon && commonJSModule) { delete proxy.create; sinon.extend(proxy, func); - proxy.reset(); + proxy.resetHistory(); proxy.prototype = func.prototype; proxy.displayName = name || "spy"; proxy.toString = sinon.functionToString; diff --git a/tests/server/core/dust.js b/tests/server/core/dust.js index <HASH>..<HASH> 100644 --- a/tests/server/core/dust.js +++ b/tests/server/core/dust.js @@ -33,8 +33,8 @@ describe('Template loading', function () { callback = sinon.stub(); }); afterEach(function () { - mockConfig.log.warn.reset(); - mockRenderer.compileOnDemand.reset(); + mockConfig.log.warn.resetHistory(); + mockRenderer.compileOnDemand.resetHistory(); }); it('Should log and gracefully handle missing templates', function () { diff --git a/tests/server/core/processor.js b/tests/server/core/processor.js index <HASH>..<HASH> 100644 --- a/tests/server/core/processor.js +++ b/tests/server/core/processor.js @@ -103,7 +103,7 @@ describe('Request processor', function () { mockTimer = sinon.stub().returns(sinon.stub()); }); afterEach(function () { - renderer.render.reset(); + renderer.render.resetHistory(); }); describe('with default configuration', function () { @@ -428,7 +428,7 @@ describe('Request processor', function () { }); afterEach(function () { - renderer.renderPartial.reset(); + renderer.renderPartial.resetHistory(); }); it('Should allow the template to be specified in the url', function () { diff --git a/tests/server/core/renderer.js b/tests/server/core/renderer.js index <HASH>..<HASH> 100644 --- a/tests/server/core/renderer.js +++ b/tests/server/core/renderer.js @@ -86,7 +86,7 @@ describe('Renderer', function () { mockery.deregisterAll(); mockery.disable(); mockConfig.env.isProduction.returns(true); - mockConfig.log.error.reset(); + mockConfig.log.error.resetHistory(); }); describe('Asset handling', function () { diff --git a/tests/server/core/statsd.js b/tests/server/core/statsd.js index <HASH>..<HASH> 100644 --- a/tests/server/core/statsd.js +++ b/tests/server/core/statsd.js @@ -56,7 +56,7 @@ describe('Logging to statsd', function () { afterEach(function () { mockery.deregisterAll(); mockery.disable(); - mockConfig.log.error.reset(); + mockConfig.log.error.resetHistory(); }); describe('Native methods', function () { diff --git a/tests/server/core/worker.js b/tests/server/core/worker.js index <HASH>..<HASH> 100644 --- a/tests/server/core/worker.js +++ b/tests/server/core/worker.js @@ -84,8 +84,8 @@ describe('Worker process running in production', function () { process.exit.restore(); }); afterEach(function () { - process.exit.reset(); - config.log.debug.reset(); + process.exit.resetHistory(); + config.log.debug.resetHistory(); }); it('Should listen for exit messages', function () {
Replace sinon.reset() with resetHistory() (#<I>) sinon.reset() is deprecated so after we upgraded from sinon 2.x to 4.x in #<I> we started getting deprecation warnings when running our tests: ``` sinon.reset is deprecated and will be removed from the public API in a future version of sinon. ``` This replaces the calls to reset() with resetHistory() which has exactly the same functionality and parameters. Addresses #<I>.
springernature_shunter
train
a581115a48fec8369bd2e4751d2978bf8c404ee5
diff --git a/tests/task.py b/tests/task.py index <HASH>..<HASH> 100644 --- a/tests/task.py +++ b/tests/task.py @@ -1,8 +1,9 @@ # NOTE: named task.py, not tasks.py, to avoid some occasional pytest weirdness +from mock import Mock from pytest import skip # noqa -from fabric import Task, task +import fabric from fabric.tasks import ConnectionCall @@ -13,7 +14,7 @@ class Task_: "I am a docstring" pass - t = Task( + t = fabric.Task( body=body, name="dadbod", aliases=["heavenly", "check", "shop"], @@ -42,7 +43,7 @@ class task_: pass # Faux @task() - t = task( + t = fabric.task( name="dadbod", aliases=["heavenly", "check", "shop"], default=True, @@ -58,7 +59,7 @@ class task_: assert "parts" in t.iterable def returns_Fabric_level_Task_instance(self): - skip() + assert isinstance(fabric.task(Mock()), fabric.Task) class hosts_kwarg: # NOTE: these don't currently test anything besides "the value given is @@ -82,7 +83,7 @@ class ConnectionCall_: def mytask(c): pass - call = ConnectionCall(Task(body=mytask)) + call = ConnectionCall(fabric.Task(body=mytask)) call.host = "user@host" expected = "<ConnectionCall 'mytask', args: (), kwargs: {}, host='user@host'>" # noqa assert str(call) == expected
For clarity, use module (vs member) imports. And set up failing test around fabric.task return value
fabric_fabric
train
0bdb356ce4642de95ef5f354c6eda85b92dc6b7a
diff --git a/nessclient/cli/events.py b/nessclient/cli/events.py index <HASH>..<HASH> 100644 --- a/nessclient/cli/events.py +++ b/nessclient/cli/events.py @@ -25,10 +25,7 @@ def events(host: str, port: int): def on_event_received(event: BaseEvent): print(event) - loop.run_until_complete(asyncio.gather( - client.keepalive(), - client.update(), - )) + loop.create_task(client.keepalive()) + loop.create_task(client.update()) - loop.run_until_complete(client.close()) - loop.close() + loop.run_forever()
Update CLI to consume similar to home-assistant (#<I>)
nickw444_nessclient
train
8e27d3b215b8a555e2189740f594c9a5e551c9f4
diff --git a/satpy/readers/hrit_goes.py b/satpy/readers/hrit_goes.py index <HASH>..<HASH> 100644 --- a/satpy/readers/hrit_goes.py +++ b/satpy/readers/hrit_goes.py @@ -223,7 +223,7 @@ prologue = np.dtype([ ("SubSatScan", '>u2'), ("SubSatPixel", '>u2'), ("SubSatLatitude", gvar_float), - ("SubSatLongitude", gvar_float), # ">f4" seems better than "<f4". still wrong though. + ("SubSatLongitude", gvar_float), ("Junk4", "u1", 96), # move to "word" 295 ("IMCIdentifier", "S4"), ("Zeros", "u1", 12), @@ -430,8 +430,8 @@ class HRITGOESFileHandler(HRITFileHandler): """Calibrate *data*.""" idx = self.mda['calibration_parameters']['indices'] val = self.mda['calibration_parameters']['values'] - # TODO use dask's map_blocks for this - res = xr.DataArray(np.interp(data, idx, val), + ddata = data.data.map_blocks(lambda block: np.interp(block, idx, val), dtype=val.dtype) + res = xr.DataArray(ddata, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.clip(min=0)
Fix hrit goes calibration to interpolate dask arrays
pytroll_satpy
train