hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
cd06e4db1fa8c2eda0e08d8699964fa41fca3cfa
diff --git a/test/e2e/apimachinery/webhook.go b/test/e2e/apimachinery/webhook.go index <HASH>..<HASH> 100644 --- a/test/e2e/apimachinery/webhook.go +++ b/test/e2e/apimachinery/webhook.go @@ -788,10 +788,11 @@ func testWebhookForWebhookConfigurations(f *framework.Framework) { Name: "should-be-removable-validating-webhook.k8s.io", Rules: []v1beta1.RuleWithOperations{{ Operations: []v1beta1.OperationType{v1beta1.Create}, + // This will not match any real resources so this webhook should never be called. Rule: v1beta1.Rule{ - APIGroups: []string{"*"}, - APIVersions: []string{"*"}, - Resources: []string{"*"}, + APIGroups: []string{""}, + APIVersions: []string{"v1"}, + Resources: []string{"invalid"}, }, }}, ClientConfig: v1beta1.WebhookClientConfig{ @@ -831,10 +832,11 @@ func testWebhookForWebhookConfigurations(f *framework.Framework) { Name: "should-be-removable-mutating-webhook.k8s.io", Rules: []v1beta1.RuleWithOperations{{ Operations: []v1beta1.OperationType{v1beta1.Create}, + // This will not match any real resources so this webhook should never be called. Rule: v1beta1.Rule{ - APIGroups: []string{"*"}, - APIVersions: []string{"*"}, - Resources: []string{"*"}, + APIGroups: []string{""}, + APIVersions: []string{"v1"}, + Resources: []string{"invalid"}, }, }}, ClientConfig: v1beta1.WebhookClientConfig{
Remove wildcard matching of no-op test webhooks
kubernetes_kubernetes
train
d28d05c1fe8f6cbc4f104c85d8610ae0ed938aac
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup readme_contents = open("README.rst").read()
Switched to setuptools for wheel support.
exhuma_python-cluster
train
1cc50ab9d777da351337c694b1aaf1134410b90b
diff --git a/demos/Aspect/DebugAspect.php b/demos/Aspect/DebugAspect.php index <HASH>..<HASH> 100644 --- a/demos/Aspect/DebugAspect.php +++ b/demos/Aspect/DebugAspect.php @@ -49,7 +49,7 @@ class DebugAspect implements Aspect /** * Pointcut for example class * - * @Pointcut("execution(public Example->public*(*))") + * @Pointcut("execution(public *->*(*))") */ protected function examplePublicMethods() {} diff --git a/demos/index.php b/demos/index.php index <HASH>..<HASH> 100644 --- a/demos/index.php +++ b/demos/index.php @@ -38,7 +38,8 @@ if ($class instanceof Serializable) { } else { echo "Ooops, Example isn't serializable!", "<br>", PHP_EOL; } +unserialize(serialize($class)); $class->publicHello(); for ($i=10; $i--; ) { - $class->cacheMe(1); + $class->cacheMe(0.2); } \ No newline at end of file diff --git a/src/Go/Core/AspectContainer.php b/src/Go/Core/AspectContainer.php index <HASH>..<HASH> 100644 --- a/src/Go/Core/AspectContainer.php +++ b/src/Go/Core/AspectContainer.php @@ -297,7 +297,7 @@ class AspectContainer /** * Returns list of advices from advisor and point filter * - * @param ReflectionClass|ParsedReflectionClass|string $class Class to inject advices + * @param ReflectionClass|ParsedReflectionClass $class Class to inject advices * @param Aop\PointcutAdvisor $advisor Advisor for class * @param Aop\PointFilter $filter Filter for points * @@ -337,13 +337,18 @@ class AspectContainer /** * Returns list of introduction advices from advisor * - * @param ReflectionClass|ParsedReflectionClass|string $class Class to inject advices + * @param ReflectionClass|ParsedReflectionClass $class Class to inject advices * @param Aop\IntroductionAdvisor $advisor Advisor for class * * @return array */ private function getIntroductionFromAdvisor($class, $advisor) { + // Do not make introduction for traits + if ($class->isTrait()) { + return array(); + } + /** @var $advice Aop\IntroductionInfo */ $advice = $advisor->getAdvice();
Update the demo aspect to show, that methods serialize() and unserialize() from trait are intercepted correctly
goaop_framework
train
b8e812eba957e502892f3c68e727c581c9b3e902
diff --git a/src/Support/Facades/Setting.php b/src/Support/Facades/Setting.php index <HASH>..<HASH> 100644 --- a/src/Support/Facades/Setting.php +++ b/src/Support/Facades/Setting.php @@ -24,6 +24,6 @@ class Setting extends Facade */ protected static function getFacadeAccessor() { - return SettingModel::class; + return Dashboard::modelClass(SettingModel::class); } }
Deploy orchidsoftware/platform to github.com/orchidsoftware/platform.git:master
orchidsoftware_platform
train
3977f30869fad2c679d7913ce968add7c7987282
diff --git a/daemon/daemon.go b/daemon/daemon.go index <HASH>..<HASH> 100644 --- a/daemon/daemon.go +++ b/daemon/daemon.go @@ -956,7 +956,7 @@ func getDefaultRouteMtu() (int, error) { return 0, err } for _, r := range routes { - if r.Default { + if r.Default && r.Iface != nil { return r.Iface.MTU, nil } }
a quick fix to #<I>
containers_storage
train
030c8f57212d967eea5682094a301ea6f3ad863b
diff --git a/ecies/__init__.py b/ecies/__init__.py index <HASH>..<HASH> 100644 --- a/ecies/__init__.py +++ b/ecies/__init__.py @@ -1,4 +1,3 @@ - from ecies.utils import generate_key, hex2prv, hex2pub, derive, aes_encrypt, aes_decrypt __all__ = ["encrypt", "decrypt"] diff --git a/ecies/__version__.py b/ecies/__version__.py index <HASH>..<HASH> 100644 --- a/ecies/__version__.py +++ b/ecies/__version__.py @@ -4,4 +4,4 @@ __description__ = "Elliptic Curve Integrated Encryption Scheme for secp256k1 in __url__ = "https://github.com/kigawas/eciespy" __author__ = "Weiliang Li" __author_email__ = "[email protected]" -__license__ = "Apache-2.0" +__license__ = "MIT" diff --git a/requirements-dev.txt b/requirements-dev.txt index <HASH>..<HASH> 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,3 +3,4 @@ mypy flake8 autopep8 ipython +black diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ setup( classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", + "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python",
Change LICENSE to MIT
kigawas_eciespy
train
b83868b5149a5c26474a0637e2c26263de564f6a
diff --git a/mocker.go b/mocker.go index <HASH>..<HASH> 100644 --- a/mocker.go +++ b/mocker.go @@ -10,6 +10,7 @@ import ( "os" "path" "strings" + "sync" "syscall" "text/template" "time" @@ -21,6 +22,8 @@ output="{{.}}" echo -n "${output}" ` +var pathMutex sync.Mutex + // Add creates a temporary directory containing an executable file named "name" // that prints "output" when executed. It also adds the temporary directory to // the first position of $PATH. @@ -46,6 +49,8 @@ func Add(name, output string) (tempdir string, err error) { if err != nil { return } + pathMutex.Lock() + defer pathMutex.Unlock() path := os.Getenv("PATH") path = tempdir + ":" + path err = os.Setenv("PATH", path) @@ -60,13 +65,16 @@ func Remove(tempdir string) error { if !strings.HasPrefix(tempdir, os.TempDir()) { return errors.New("Remove can only remove temporary directories, tryied to remove " + tempdir) } + pathMutex.Lock() path := os.Getenv("PATH") index := strings.Index(path, tempdir) if index < 0 { + pathMutex.Unlock() return errors.New(fmt.Sprintf("%s is not in $PATH", tempdir)) } path = path[:index] + path[index+len(tempdir)+1:] err := os.Setenv("PATH", path) + pathMutex.Unlock() if err != nil { return err }
mocker: using a mutex to guarantee thread safety
tsuru_commandmocker
train
a60522daf8ff8475d670af28f6da3c95bb35773f
diff --git a/salt/minion.py b/salt/minion.py index <HASH>..<HASH> 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -422,6 +422,7 @@ class MinionBase(object): ' {0}'.format(opts['master'])) if opts['master_shuffle']: shuffle(opts['master']) + opts['auth_tries'] = 0 # if opts['master'] is a str and we have never created opts['master_list'] elif isinstance(opts['master'], str) and ('master_list' not in opts): # We have a string, but a list was what was intended. Convert.
Set auth retry count to 0 if multimaster mode is failover
saltstack_salt
train
fd0265aea4ee2afd0eed39e98a64e29ccf8cbb54
diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java +++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/Coordinator.java @@ -392,7 +392,7 @@ public final class Coordinator { LOGGER.info("Run starting..."); - new RunTestSuiteTask(testSuite, + boolean success = new RunTestSuiteTask(testSuite, coordinatorParameters, componentRegistry, failureCollector, @@ -400,7 +400,11 @@ public final class Coordinator { remoteClient, performanceStatsCollector).run(); - LOGGER.info("Run complete!"); + if (success) { + LOGGER.info("Run complete!"); + } else { + throw new RuntimeException("Run completed with failures!"); + } } public String killWorker(RcKillWorkerOperation op) throws Exception { diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java +++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/RunTestSuiteTask.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; import static com.hazelcast.simulator.utils.CommonUtils.getElapsedSeconds; import static com.hazelcast.simulator.utils.CommonUtils.rethrow; @@ -64,10 +65,10 @@ public class RunTestSuiteTask { this.performanceStatsCollector = performanceStatsCollector; } - public void run() { + public boolean run() { List<TestData> tests = componentRegistry.addTests(testSuite); try { - run0(tests); + return run0(tests); } finally { testPhaseListeners.removeAllListeners(runners); componentRegistry.removeTests(testSuite); @@ -75,7 +76,7 @@ public class RunTestSuiteTask { } } - private void run0(List<TestData> tests) { + private boolean run0(List<TestData> tests) { int testCount = testSuite.size(); boolean parallel = testSuite.isParallel() && testCount > 1; Map<TestPhase, CountDownLatch> testPhaseSyncMap = getTestPhaseSyncMap(testCount, parallel, @@ -103,12 +104,9 @@ public class RunTestSuiteTask { echoTestSuiteStart(testCount, parallel); long started = System.nanoTime(); - if (parallel) { - runParallel(); - } else { - runSequential(); - } + boolean success = parallel ? runParallel() : runSequential(); echoTestSuiteEnd(testCount, started); + return success; } private void echoTestSuiteDuration(boolean isParallel) { @@ -123,14 +121,17 @@ public class RunTestSuiteTask { } } - private void runParallel() { + private boolean runParallel() { + final AtomicBoolean success = new AtomicBoolean(true); ThreadSpawner spawner = new ThreadSpawner("runParallel", true); for (final TestCaseRunner runner : runners) { spawner.spawn(new Runnable() { @Override public void run() { try { - runner.run(); + if (!runner.run()) { + success.set(false); + } } catch (Exception e) { throw rethrow(e); } @@ -138,17 +139,22 @@ public class RunTestSuiteTask { }); } spawner.awaitCompletion(); + return success.get(); } - private void runSequential() { + private boolean runSequential() { + boolean success = true; for (TestCaseRunner runner : runners) { - runner.run(); + if (!runner.run()) { + success = false; + } boolean hasCriticalFailure = failureCollector.hasCriticalFailure(); if (hasCriticalFailure && testSuite.isFailFast()) { LOGGER.info("Aborting TestSuite due to critical failure"); break; } } + return success; } private void echoTestSuiteStart(int testCount, boolean isParallel) { diff --git a/simulator/src/main/java/com/hazelcast/simulator/coordinator/TestCaseRunner.java b/simulator/src/main/java/com/hazelcast/simulator/coordinator/TestCaseRunner.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/coordinator/TestCaseRunner.java +++ b/simulator/src/main/java/com/hazelcast/simulator/coordinator/TestCaseRunner.java @@ -157,7 +157,7 @@ final class TestCaseRunner implements TestPhaseListener { throw rethrow(e); } - return hasFailure(); + return !hasFailure(); } private void run0() {
Fixes #<I> exit code for coordinat-remote run in case of failure
hazelcast_hazelcast-simulator
train
51d674356db4882a4270005b99a92a81d7cec6ea
diff --git a/lib/eep.fn.stats.js b/lib/eep.fn.stats.js index <HASH>..<HASH> 100644 --- a/lib/eep.fn.stats.js +++ b/lib/eep.fn.stats.js @@ -40,7 +40,7 @@ function SumFunction() { self.name = "sum"; self.init = function() { s = 0; }; self.accumulate = function(v) { s += v; }; - self.compensate = function(v) { s += v; }; + self.compensate = function(v) { s -= v; }; self.emit = function() { return s; }; self.make = function() { return new SumFunction(); }; }
Sliding windows now O(n) performance. Numerically stable Mean, Vars. Aggregate Function API change.
darach_eep-js
train
ae340e98b137fdd04100da5b7415bce96768840f
diff --git a/src/main/java/dtest/mobileactions/MobileTestAction.java b/src/main/java/dtest/mobileactions/MobileTestAction.java index <HASH>..<HASH> 100644 --- a/src/main/java/dtest/mobileactions/MobileTestAction.java +++ b/src/main/java/dtest/mobileactions/MobileTestAction.java @@ -177,6 +177,9 @@ public abstract class MobileTestAction extends TestAction { case VISIBILITY_OF_ELEMENT_LOCATED: wait.until(ExpectedConditions.visibilityOfElementLocated(locator)); break; + case INVISIBILITY_OF_ELEMENT_LOCATED: + wait.until(ExpectedConditions.invisibilityOfElementLocated(locator)); + break; } } } diff --git a/src/main/java/dtest/mobileactions/WaitForCondition.java b/src/main/java/dtest/mobileactions/WaitForCondition.java index <HASH>..<HASH> 100644 --- a/src/main/java/dtest/mobileactions/WaitForCondition.java +++ b/src/main/java/dtest/mobileactions/WaitForCondition.java @@ -27,6 +27,14 @@ public class WaitForCondition extends MobileTestAction { } catch (Exception e) { System.out.println("Element not found :" + locator.toString()); } + case "invisibilityOfElementLocated": + try{ + waitForCondition(locator, Condition.INVISIBILITY_OF_ELEMENT_LOCATED, timeout); + System.out.println("Element Not Found :" + locator.toString()); + break; + } catch (Exception e) { + System.out.println("Element Found :" + locator.toString()); + } } } } diff --git a/src/main/java/dtest/mobileactions/enums/Condition.java b/src/main/java/dtest/mobileactions/enums/Condition.java index <HASH>..<HASH> 100644 --- a/src/main/java/dtest/mobileactions/enums/Condition.java +++ b/src/main/java/dtest/mobileactions/enums/Condition.java @@ -2,5 +2,6 @@ package dtest.mobileactions.enums; public enum Condition { VISIBILITY_OF_ELEMENT_LOCATED, - PRESENCE_OF_ELEMENT_LOCATED + PRESENCE_OF_ELEMENT_LOCATED, + INVISIBILITY_OF_ELEMENT_LOCATED }
Updated waitforcondition to have wait for invisibility of the element as well
mcdcorp_opentest
train
dfebf85b16439dccf9dee041a08866d49b535b2f
diff --git a/inplaceeditform_extra_fields/fields.py b/inplaceeditform_extra_fields/fields.py index <HASH>..<HASH> 100644 --- a/inplaceeditform_extra_fields/fields.py +++ b/inplaceeditform_extra_fields/fields.py @@ -159,12 +159,13 @@ class AdaptorTinyMCEField(AdaptorTextAreaField): config['fieldtypes'] = 'div.mce-content-body' config['focuswhenediting'] = "0" if not 'autosavetiny' in config: - auto_save = config.get('autoSave', None) + auto_save = config.get('autoSave', None) or config.get('autosave', None) if auto_save: config['autosavetiny'] = str(int(auto_save)) else: config['autosavetiny'] = str(int(getattr(settings, 'INPLACEEDIT_AUTO_SAVE', False))) config['autosave'] = "0" + config['autoSave'] = "0" return config def get_field(self): diff --git a/inplaceeditform_extra_fields/templates/inplaceeditform_extra_fields/adaptor_tiny/setup.js b/inplaceeditform_extra_fields/templates/inplaceeditform_extra_fields/adaptor_tiny/setup.js index <HASH>..<HASH> 100644 --- a/inplaceeditform_extra_fields/templates/inplaceeditform_extra_fields/adaptor_tiny/setup.js +++ b/inplaceeditform_extra_fields/templates/inplaceeditform_extra_fields/adaptor_tiny/setup.js @@ -5,17 +5,21 @@ function (editor) { $('#' + editor.id).parents(self.formSelector).find('.cancel').data('editor', editor); }, 300); var value = $('#' + editor.id).html(); + var form = $('#' + editor.id).parents('form'); {% ifequal autosavetiny '1' %} editor.on('blur', function () { - self.methods.bind(self.methods.autoSaveCallBack, {'oldValue': value, + if (editor.isDirty()) { + self.methods.bind(self.methods.autoSaveCallBack, {'oldValue': value, 'tag': $('#' + editor.id)})(); + } else { + form.find('.cancel').click(); + } }); {% else %} editor.on('blur', function () { return false; }); {% endifequal %} - var form = $('#' + editor.id).parents('form'); editor.addMenuItem('apply', { text: 'Apply', context: 'edit', diff --git a/inplaceeditform_extra_fields/widgets.py b/inplaceeditform_extra_fields/widgets.py index <HASH>..<HASH> 100644 --- a/inplaceeditform_extra_fields/widgets.py +++ b/inplaceeditform_extra_fields/widgets.py @@ -65,14 +65,14 @@ class TinyMCE(widgets.Textarea): if width < 700: toolbar_items = self.mce_settings['toolbar'].split(' | ') if width < 700 and width > 350: - toolbar1 = ' | '.join(toolbar_items[:len(toolbar_items)/2]) - toolbar2 = ' | '.join(toolbar_items[len(toolbar_items)/2:]) + toolbar1 = ' | '.join(toolbar_items[:int(len(toolbar_items) / 2)]) + toolbar2 = ' | '.join(toolbar_items[int(len(toolbar_items) / 2):]) self.mce_settings['toolbar1'] = toolbar1 self.mce_settings['toolbar2'] = toolbar2 if width < 350: - toolbar1 = ' | '.join(toolbar_items[:len(toolbar_items)/3]) - toolbar2 = ' | '.join(toolbar_items[len(toolbar_items)/3:2*len(toolbar_items)/3]) - toolbar3 = ' | '.join(toolbar_items[2*len(toolbar_items)/3:]) + toolbar1 = ' | '.join(toolbar_items[:int(len(toolbar_items) / 3)]) + toolbar2 = ' | '.join(toolbar_items[int(len(toolbar_items) / 3):int(2 * len(toolbar_items) / 3)]) + toolbar3 = ' | '.join(toolbar_items[int(2 * len(toolbar_items) / 3):]) self.mce_settings['toolbar1'] = toolbar1 self.mce_settings['toolbar2'] = toolbar2 self.mce_settings['toolbar3'] = toolbar3 @@ -90,7 +90,7 @@ class TinyMCE(widgets.Textarea): final_attrs = self.build_attrs(attrs, name=name) self.mce_settings['elements'] = "id_%s" % name mce_json = JSONEncoder().encode(self.mce_settings).replace("\"function", "function").replace("}\"", "}") - return mark_safe(u'''<div%s>%s</div> + return mark_safe(u'''<span%s>%s</span> <script type="text/javascript"> tinyMCE.init(%s); setTimeout(function () {
Details of tiny adaptor: python 3 compatible, and autosave details
django-inplaceedit_django-inplaceedit-extra-fields
train
9f52c8fd13098c250ced1d709908400b5b2b7667
diff --git a/phantomjs/main.js b/phantomjs/main.js index <HASH>..<HASH> 100644 --- a/phantomjs/main.js +++ b/phantomjs/main.js @@ -13,6 +13,7 @@ 'use strict'; var fs = require('fs'); +var _ = require('lodash'); // The temporary file used for communications. var tmpfile = phantom.args[0]; @@ -55,6 +56,11 @@ var inject = function() { injected = true; }; +// Merge phantomjs page settings from options.page +if (options.page) { + _.merge(page, options.page); +} + // Keep track if the client-side helper script already has been injected. page.onUrlChanged = function(newUrl) { injected = false;
Merge phantomjs page settings from Grunt config
GruntBlanketMocha_grunt-blanket-mocha
train
19a2e65cbec1e85ebfafc5ef1807c1c04eb4f25e
diff --git a/NavigationReactNative/sample/android/Page.js b/NavigationReactNative/sample/android/Page.js index <HASH>..<HASH> 100644 --- a/NavigationReactNative/sample/android/Page.js +++ b/NavigationReactNative/sample/android/Page.js @@ -4,21 +4,18 @@ import {NavigationContext} from 'navigation-react'; var Page = () => ( <NavigationContext.Consumer> - {({stateNavigator}) => { - var scene = stateNavigator.stateContext.crumbs.length + 1; - return ( - <View style={styles.container}> - <Text style={styles.text}> - {`Scene ${scene}`} - </Text> - <TouchableHighlight - style={styles.button} - onPress={() => stateNavigator.navigate('scene')}> - <Text>Next</Text> - </TouchableHighlight> - </View> - ) - }} + {({stateNavigator}) => ( + <View style={styles.container}> + <Text style={styles.text}> + {`Scene ${stateNavigator.stateContext.crumbs.length}`} + </Text> + <TouchableHighlight + style={styles.button} + onPress={() => stateNavigator.navigate('scene')}> + <Text>Next</Text> + </TouchableHighlight> + </View> + )} </NavigationContext.Consumer> );
Moved scene inline for implicity return
grahammendick_navigation
train
c7f72a80cbc3756f5d04f1c279d2a1daa901cad8
diff --git a/examples/hsmm-possiblechangepoints-separatetrans.py b/examples/hsmm-possiblechangepoints-separatetrans.py index <HASH>..<HASH> 100644 --- a/examples/hsmm-possiblechangepoints-separatetrans.py +++ b/examples/hsmm-possiblechangepoints-separatetrans.py @@ -73,7 +73,7 @@ for idx, (data, changepoints) in enumerate(zip(datas,changepointss)): posteriormodel.add_data(data=data,changepoints=changepoints,group_id=idx) for idx in progprint_xrange(100): - posteriormodel.resample_model() + posteriormodel.resample_model(joblib_jobs=2) for s in posteriormodel.states_list: s.Viterbi() diff --git a/models.py b/models.py index <HASH>..<HASH> 100644 --- a/models.py +++ b/models.py @@ -443,7 +443,7 @@ class _HMMMeanField(_HMMBase,ModelMeanField): if len(states_list) > 0: joblib_args = util.general.list_split( - [(s.data,s._kwargs) for s in states_list], + [self._get_joblib_pair(s) for s in states_list], joblib_jobs) allstats = Parallel(n_jobs=joblib_jobs,backend='multiprocessing')\ (delayed(_get_stats)(self,arg) for arg in joblib_args) @@ -453,6 +453,9 @@ class _HMMMeanField(_HMMBase,ModelMeanField): [s for grp in allstats for s in grp]): s.all_expected_stats = stats + def _get_joblib_pair(self,states_obj): + return (states_obj.data,states_obj._kwargs) + class _HMMSVI(_HMMBase,ModelMeanFieldSVI): # NOTE: classes with this mixin should also have the _HMMMeanField mixin for # joblib stuff to work diff --git a/util/general.py b/util/general.py index <HASH>..<HASH> 100644 --- a/util/general.py +++ b/util/general.py @@ -280,6 +280,11 @@ def indices_to_changepoints(T,changes): return zip(changes[:-1],changes[1:]) +def labels_to_changepoints(labels): + _, durs = rle(labels) + cdurs = np.concatenate(((0,),durs.cumsum())) + return zip(cdurs[:-1],cdurs[1:]) + def ndarrayhash(v): assert isinstance(v,np.ndarray) return hashlib.sha1(v).hexdigest() @@ -298,3 +303,4 @@ def treemap(f,l): return [treemap(f,_) for _ in l] else: return f(l) +
update tweaks to joblib stuff for AR
mattjj_pyhsmm
train
390f9784c5577e6307e9702933d5f952646264d5
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,10 @@ * [#744](https://github.com/deivid-rodriguez/byebug/pull/744): Another punctuation tweak in `enable breakpoints` help message. * [#736](https://github.com/deivid-rodriguez/byebug/pull/736): Skip warning about `$SAFE` global variable on ruby 2.7 when listing global variables. +### Added + +* [#688](https://github.com/deivid-rodriguez/byebug/pull/688): `where` command now receives an optional numeric argument `<n>` to print the nth first frames. + ### Removed * Support for MRI 2.4. Byebug no longer installs on this platform. diff --git a/lib/byebug/commands/where.rb b/lib/byebug/commands/where.rb index <HASH>..<HASH> 100644 --- a/lib/byebug/commands/where.rb +++ b/lib/byebug/commands/where.rb @@ -14,12 +14,12 @@ module Byebug self.allow_in_post_mortem = true def self.regexp - /^\s* (?:w(?:here)?|bt|backtrace) \s*$/x + /^\s* (?:w(?:here)?|bt|backtrace) (?:\s+(\S+))? \s*$/x end def self.description <<-DESCRIPTION - w[here]|bt|backtrace + w[here]|bt|backtrace[ maximum-frame] #{short_description} @@ -29,6 +29,10 @@ module Byebug The position of the current frame is marked with -->. C-frames hang from their most immediate Ruby frame to indicate that they are not navigable. + + Without an argument, the command prints all the frames. With an argument, + the command prints the nth first frames, where n is the largest between + the argument or the maximum stack frame. DESCRIPTION end @@ -43,7 +47,14 @@ module Byebug private def print_backtrace - bt = prc("frame.line", (0...context.stack_size)) do |_, index| + max_frame = + if @match[1] && @match[1].to_i <= context.stack_size + @match[1].to_i + else + context.stack_size + end + + bt = prc("frame.line", (0...max_frame)) do |_, index| Frame.new(context, index).to_hash end diff --git a/test/commands/where_test.rb b/test/commands/where_test.rb index <HASH>..<HASH> 100644 --- a/test/commands/where_test.rb +++ b/test/commands/where_test.rb @@ -110,6 +110,35 @@ module Byebug check_output_includes(*expected_output) end + + def test_where_with_argument_less_than_largest_frame + enter "where 3" + debug_code(program) + + expected_output = prepare_for_regexp <<-TXT + --> #0 #{example_full_class}.to_int(str#String) at #{example_path}:16 + #1 #{example_full_class}.encode(str#String) at #{example_path}:11 + #2 #{example_full_class}.initialize(l#String) at #{example_path}:7 + TXT + + check_output_includes(*expected_output) + end + + def test_where_with_argument_greater_than_largest_frame + enter "where 20" + debug_code(program) + + expected_output = prepare_for_regexp <<-TXT + --> #0 #{example_full_class}.to_int(str#String) at #{example_path}:16 + #1 #{example_full_class}.encode(str#String) at #{example_path}:11 + #2 #{example_full_class}.initialize(l#String) at #{example_path}:7 + ͱ-- #3 Class.new(*args) at #{example_path}:20 + #4 <module:Byebug> at #{example_path}:20 + #5 <top (required)> at #{example_path}:1 + TXT + + check_output_includes(*expected_output) + end end #
Pass argument to `where` to print the nth first frames (#<I>)
deivid-rodriguez_byebug
train
562f423fc755c2c59307053bc5afceebb068397f
diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index <HASH>..<HASH> 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -649,20 +649,21 @@ class NDFrameGroupBy(GroupBy): # if we make it here, test if we can use the fast path try: res_fast = fast_path(group) - - # verify fast path does not change columns (and names), otherwise - # its results cannot be joined with those of the slow path - if res_fast.columns != group.columns: - return path, res - # verify numerical equality with the slow path - if res.shape == res_fast.shape: - res_r = res.values.ravel() - res_fast_r = res_fast.values.ravel() - mask = notna(res_r) - if (res_r[mask] == res_fast_r[mask]).all(): - path = fast_path except Exception: - pass + # Hard to know ex-ante what exceptions `fast_path` might raise + return path, res + + # verify fast path does not change columns (and names), otherwise + # its results cannot be joined with those of the slow path + if not isinstance(res_fast, DataFrame): + return path, res + + if not res_fast.columns.equals(group.columns): + return path, res + + if res_fast.equals(res): + path = fast_path + return path, res def _transform_item_by_item(self, obj, wrapper):
CLN: avoid catching Exception in _choose_path (#<I>)
pandas-dev_pandas
train
f4e0ff6e85fef67d6c5123f6d629975fc5aafcc0
diff --git a/lib/FeaturesViewer.js b/lib/FeaturesViewer.js index <HASH>..<HASH> 100644 --- a/lib/FeaturesViewer.js +++ b/lib/FeaturesViewer.js @@ -37,6 +37,16 @@ var FeaturesViewer = function(opts) { var updateZoomFromChart = function() { fv.zoom.x(fv.xScale); + + // remove if no zoom + var fullDomain = fv.maxPos - 1, + currentDomain = fv.xScale.domain()[1] - fv.xScale.domain()[0]; + + var minScale = currentDomain / fullDomain, + maxScale = minScale * 20; + + fv.zoom.scaleExtent([minScale, maxScale]); + //end remove }; var updateViewportFromChart = function () { @@ -115,8 +125,9 @@ var FeaturesViewer = function(opts) { var createZoom = function() { var zoom = d3.behavior.zoom() .x(fv.xScale) - .scaleExtent([1,1]) + // .scaleExtent([1,1]) .on('zoom', function() { + console.log(fv.xScale.domain()[0]); if (fv.xScale.domain()[0] < 1) { var tempX = zoom.translate()[0] - fv.xScale(1) + fv.xScale.range()[0]; zoom.translate([tempX, 0]);
Re-enabled zoom on main section
ebi-uniprot_ProtVista
train
74d2b4c1889781c1fbd320cf95dce534bea62674
diff --git a/pandas/io/data.py b/pandas/io/data.py index <HASH>..<HASH> 100644 --- a/pandas/io/data.py +++ b/pandas/io/data.py @@ -680,6 +680,10 @@ class Options(object): root = self._parse_url(url) tables = root.xpath('.//table') + ntables = len(tables) + if ntables == 0: + raise RemoteDataError("No tables found at {0!r}".format(url)) + table_name = '_tables' + m1 + str(year)[-2:] setattr(self, table_name, tables) @@ -723,9 +727,7 @@ class Options(object): ntables = len(tables) table_loc = self._TABLE_LOC[name] - if ntables == 0: - raise RemoteDataError("No tables found at {0!r}".format(url)) - elif table_loc - 1 > ntables: + if table_loc - 1 > ntables: raise RemoteDataError("Table location {0} invalid, {1} tables" " found".format(table_loc, ntables)) diff --git a/pandas/io/tests/test_data.py b/pandas/io/tests/test_data.py index <HASH>..<HASH> 100644 --- a/pandas/io/tests/test_data.py +++ b/pandas/io/tests/test_data.py @@ -266,8 +266,7 @@ class TestYahooOptions(tm.TestCase): options = self.aapl.get_options_data(expiry=self.expiry) except RemoteDataError as e: nose.SkipTest(e) - else: - assert len(options) > 1 + self.assertTrue(len(options) > 1) @network def test_get_near_stock_price(self): @@ -276,9 +275,6 @@ class TestYahooOptions(tm.TestCase): expiry=self.expiry) except RemoteDataError as e: nose.SkipTest(e) - else: - assert len(options) > 1 - self.assertTrue(len(options) > 1) @network @@ -287,8 +283,7 @@ class TestYahooOptions(tm.TestCase): calls = self.aapl.get_call_data(expiry=self.expiry) except RemoteDataError as e: nose.SkipTest(e) - else: - assert len(calls) > 1 + self.assertTrue(len(calls) > 1) @network def test_get_put_data(self): @@ -296,33 +291,30 @@ class TestYahooOptions(tm.TestCase): puts = self.aapl.get_put_data(expiry=self.expiry) except RemoteDataError as e: nose.SkipTest(e) - else: - assert len(puts) > 1 + self.assertTrue(len(puts) > 1) @network def test_get_expiry_months(self): try: dates = self.aapl._get_expiry_months() - except RemoteDataError: - raise nose.SkipTest("RemoteDataError thrown no dates found") + except RemoteDataError as e: + raise nose.SkipTest(e) self.assertTrue(len(dates) > 1) @network def test_get_all_data(self): try: data = self.aapl.get_all_data(put=True) - except RemoteDataError: - raise nose.SkipTest("RemoteDataError thrown") - + except RemoteDataError as e: + raise nose.SkipTest(e) self.assertTrue(len(data) > 1) @network def test_get_all_data_calls_only(self): try: data = self.aapl.get_all_data(call=True, put=False) - except RemoteDataError: - raise nose.SkipTest("RemoteDataError thrown") - + except RemoteDataError as e: + raise nose.SkipTest(e) self.assertTrue(len(data) > 1) @network
TST: Remove else after except in tests for io.data.Options. Made the old tests consistent with the new tests, also passed the RemoteDataError on skip. CLN: Moved test for no tables to the get_option_tables method.
pandas-dev_pandas
train
a6d11c179bc0f3dd12e90802df1ea80e0dd2099c
diff --git a/asks/request.py b/asks/request.py index <HASH>..<HASH> 100644 --- a/asks/request.py +++ b/asks/request.py @@ -38,7 +38,7 @@ class Request: session passes the required info and calls `make_request`. Args: - session (child of BaseSession): A refrence to the calling session. + session (child of BaseSession): A reference to the calling session. method (str): The HTTP method to be used in the request. @@ -71,7 +71,7 @@ class Request: max_redirects (int): The maximum number of redirects allowed. - persist_cookies (True or None): Passing True instanciates a + persist_cookies (True or None): Passing True instantiates a CookieTracker object to manage the return of cookies to the server under the relevant domains. @@ -348,7 +348,7 @@ class Request: async def _get_new_sock(self): ''' - On 'Connetcion: close' headers we've to create a new connection. + On 'Connection: close' headers we've to create a new connection. This reaches in to the parent session and pulls a switcheroo, dunking the current connection and requesting a new one. ''' @@ -359,7 +359,7 @@ class Request: async def _formulate_body(self): ''' - Takes user suppied data / files and forms it / them + Takes user supplied data / files and forms it / them appropriately, returning the contents type, len, and the request body its self. @@ -501,12 +501,12 @@ class Request: async def _catch_response(self, hconnection): ''' - Instanciates the parser which manages incoming data, first getting + Instantiates the parser which manages incoming data, first getting the headers, storing cookies, and then parsing the response's body, if any. This function also instances the Response class in which the response - satus line, headers, cookies, and body is stored. + status line, headers, cookies, and body is stored. It should be noted that in order to remain preformant, if the user wishes to do any file IO it should use async files or risk long wait @@ -530,7 +530,8 @@ class Request: 'headers': c_i_dict( [(str(name, 'utf-8'), str(value, 'utf-8')) for name, value in response.headers]), - 'body': b'' + 'body': b'', + 'url': self.uri } for header in response.headers: if header[0] == b'set-cookie': @@ -629,7 +630,7 @@ class Request: async def _auth_handler_post_check_retry(self, response_obj): ''' - The other half of _auth_handler_post_check_retry (what a mouthfull). + The other half of _auth_handler_post_check_retry (what a mouthful). If auth has not yet been attempted and the most recent response object is a 401, we store that response object and retry the request in exactly the same manner as before except with the correct auth. diff --git a/asks/response_objects.py b/asks/response_objects.py index <HASH>..<HASH> 100644 --- a/asks/response_objects.py +++ b/asks/response_objects.py @@ -22,7 +22,8 @@ class Response: reason_phrase, headers, body, - method): + method, + url): self.encoding = encoding self.http_version = http_version self.status_code = status_code @@ -30,6 +31,7 @@ class Response: self.headers = headers self.body = body self.method = method + self.url = url self.history = [] self.cookies = [] diff --git a/asks/sessions.py b/asks/sessions.py index <HASH>..<HASH> 100644 --- a/asks/sessions.py +++ b/asks/sessions.py @@ -276,4 +276,4 @@ class Session(BaseSession): ''' Puts together the hostloc and current endpoint for use in request uri. ''' - return self.base_location or '' + self.endpoint or '' + return (self.base_location or '') + (self.endpoint or '') diff --git a/docs/source/the-response-object.rst b/docs/source/the-response-object.rst index <HASH>..<HASH> 100644 --- a/docs/source/the-response-object.rst +++ b/docs/source/the-response-object.rst @@ -107,3 +107,15 @@ If any redirects or 401-requiring auth attempts were handled during the request, # [<Response 302 at 0xb6a807cc>, <Response 302 at 0xb... # 302 + +URL +___ + +Find the url that the request was made to.:: + + async def main(): + r = asks.get('http://example.com') + print(r.url) + + # Results in: + # 'http://example.com'
Fixed bug relating to usage of endpoint attrib. Added url attrib to response object.
theelous3_asks
train
34eca61d38b45c2b88bd12c6c7bd40b48ae53118
diff --git a/putio.py b/putio.py index <HASH>..<HASH> 100755 --- a/putio.py +++ b/putio.py @@ -7,6 +7,9 @@ import webbrowser from urllib import urlencode import requests +from requests.adapters import HTTPAdapter +from requests.packages.urllib3.util.retry import Retry + import iso8601 BASE_URL = 'https://api.put.io/v2' @@ -53,10 +56,21 @@ class AuthHelper(object): class Client(object): - def __init__(self, access_token): + def __init__(self, access_token, use_retry=False): self.access_token = access_token self.session = requests.session() + if use_retry: + # Retry maximum 10 times, backoff on each retry + # Sleeps 1s, 2s, 4s, 8s, etc to a maximum of 120s between retries + # Retries on HTTP status codes 500, 502, 503, 504 + retries = Retry(total=10, + backoff_factor=1, + status_forcelist=[ 500, 502, 503, 504 ]) + + # Use the retry strategy for all HTTPS requests + self.session.mount('https://', HTTPAdapter(max_retries=retries)) + # Keep resource classes as attributes of client. # Pass client to resource classes so resource object # can use the client.
Retry strategy Allow the client's requests to be retried, simple boolean with the strategy decided in the class Defaults to NOT retrying so it conforms to current behaviour
cenkalti_putio.py
train
b78d118f5a86dd1a993083b7be482d7114c9fc32
diff --git a/includes/class-freemius.php b/includes/class-freemius.php index <HASH>..<HASH> 100755 --- a/includes/class-freemius.php +++ b/includes/class-freemius.php @@ -6047,6 +6047,7 @@ $this->_add_upgrade_action_link(); if ( ! $this->is_addon() && + ! ( ! $this->_is_network_active && fs_is_network_admin() ) && ( // Not registered nor anonymous. ( ! $this->is_registered() && ! $this->is_anonymous() ) ||
[opt-in-notice] [bug-fix] Don't add the activation notice when the plugin is not network level integrated and the user is in the network-level admin.
Freemius_wordpress-sdk
train
707fa49ac63007a8f1f134b72e0c7786265e7e11
diff --git a/src/WellCommerce/Bundle/DistributionBundle/Loader/BundleLoader.php b/src/WellCommerce/Bundle/DistributionBundle/Loader/BundleLoader.php index <HASH>..<HASH> 100644 --- a/src/WellCommerce/Bundle/DistributionBundle/Loader/BundleLoader.php +++ b/src/WellCommerce/Bundle/DistributionBundle/Loader/BundleLoader.php @@ -91,7 +91,6 @@ class BundleLoader \FOS\JsRoutingBundle\FOSJsRoutingBundle::class, \Bazinga\Bundle\JsTranslationBundle\BazingaJsTranslationBundle::class, \Liip\ImagineBundle\LiipImagineBundle::class, - \Ivory\LuceneSearchBundle\IvoryLuceneSearchBundle::class, \Knp\DoctrineBehaviors\Bundle\DoctrineBehaviorsBundle::class, \WellCommerce\Bundle\AppBundle\WellCommerceAppBundle::class, ];
Removed IvoryLuceneSearchBundle from loader
WellCommerce_CouponBundle
train
7b52f8082a68f7d555dba50a40ce2d4e0eb8d1bb
diff --git a/buildbot/changes/changes.py b/buildbot/changes/changes.py index <HASH>..<HASH> 100644 --- a/buildbot/changes/changes.py +++ b/buildbot/changes/changes.py @@ -14,7 +14,7 @@ class Change: a change comment for the group as a whole. If the version control system supports sequential repository- (or - branch-) wide change numbers (like SVN, P4, and Arch), then revision= + branch-) wide change numbers (like SVN, P4, and Bzr), then revision= should be set to that number. The highest such number will be used at checkout time to get the correct set of files. diff --git a/buildbot/steps/source.py b/buildbot/steps/source.py index <HASH>..<HASH> 100644 --- a/buildbot/steps/source.py +++ b/buildbot/steps/source.py @@ -846,6 +846,7 @@ class Arch(Source): that comes from the repository. If not, the repository's default will be used. """ + warn("Support for Arch will be removed in 0.8.2", DeprecationWarning) self.branch = version self.url = url Source.__init__(self, **kwargs)
put in deprecation warning for Arch and Bazaar - refs #<I>
buildbot_buildbot
train
f003d97761b0b8732f6851789d6f965af32dc8dd
diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index <HASH>..<HASH> 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -1,13 +1,6 @@ # Remove these configuration records # one by one as the offenses are removed from the code base. -# Offense count: 2 -# Configuration parameters: AllowSafeAssignment. -Lint/AssignmentInCondition: - Exclude: - - 'lib/graph_matching/algorithm/mcm_bipartite.rb' - - 'lib/graph_matching/algorithm/mwm_bipartite.rb' - # Offense count: 7 Lint/ShadowingOuterLocalVariable: Exclude: diff --git a/lib/graph_matching/algorithm/mcm_bipartite.rb b/lib/graph_matching/algorithm/mcm_bipartite.rb index <HASH>..<HASH> 100644 --- a/lib/graph_matching/algorithm/mcm_bipartite.rb +++ b/lib/graph_matching/algorithm/mcm_bipartite.rb @@ -30,7 +30,10 @@ module GraphMatching unmarked = r = u.select { |i| m[i] == nil } # While there are unmarked R-vertexes - while aug_path.nil? && start = unmarked.pop + loop do + break unless aug_path.nil? + start = unmarked.pop + break unless start # Follow the unmatched edges (if any) to vertexes in V # ignoring any V-vertexes already labeled T diff --git a/lib/graph_matching/algorithm/mwm_bipartite.rb b/lib/graph_matching/algorithm/mwm_bipartite.rb index <HASH>..<HASH> 100644 --- a/lib/graph_matching/algorithm/mwm_bipartite.rb +++ b/lib/graph_matching/algorithm/mwm_bipartite.rb @@ -38,7 +38,10 @@ module GraphMatching q = s.dup.to_a # While searching - while aug_path.nil? && i = q.pop + loop do + break unless aug_path.nil? + i = q.pop + break unless i # Follow the unmatched edges (if any) to free (unlabeled) # cats. Only consider edges with slack (π) of 0.
Lint: Fix Lint/AssignmentInCondition
jaredbeck_graph_matching
train
02c7164906c5a61f0a29b507d71ff76a5a95d6f0
diff --git a/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java b/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java index <HASH>..<HASH> 100644 --- a/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java +++ b/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java @@ -251,6 +251,7 @@ public abstract class AbstractConnectionAdapter extends AbstractUnsupportedOpera } }); } finally { + cachedConnections.clear(); rootInvokeHook.finish(connectionSize); } } diff --git a/sharding-jdbc/sharding-jdbc-core/src/test/java/io/shardingsphere/shardingjdbc/jdbc/adapter/ConnectionAdapterTest.java b/sharding-jdbc/sharding-jdbc-core/src/test/java/io/shardingsphere/shardingjdbc/jdbc/adapter/ConnectionAdapterTest.java index <HASH>..<HASH> 100644 --- a/sharding-jdbc/sharding-jdbc-core/src/test/java/io/shardingsphere/shardingjdbc/jdbc/adapter/ConnectionAdapterTest.java +++ b/sharding-jdbc/sharding-jdbc-core/src/test/java/io/shardingsphere/shardingjdbc/jdbc/adapter/ConnectionAdapterTest.java @@ -130,19 +130,15 @@ public final class ConnectionAdapterTest extends AbstractShardingJDBCDatabaseAnd public void assertClose() throws SQLException { try (ShardingConnection actual = getShardingDataSource().getConnection()) { actual.createStatement().executeQuery(sql); - assertClose(actual, false); actual.close(); - assertClose(actual, true); + assertClose(actual); } } - private void assertClose(final ShardingConnection actual, final boolean closed) throws SQLException { - assertThat(actual.isClosed(), is(closed)); + private void assertClose(final ShardingConnection actual) { + assertTrue(actual.isClosed()); Multimap<String, Connection> cachedConnections = getCachedConnections(actual); - assertThat(cachedConnections.size(), is(2)); - for (Connection each : cachedConnections.values()) { - assertThat(each.isClosed(), is(closed)); - } + assertTrue(cachedConnections.isEmpty()); } @Test
clear closed conn when call close method
apache_incubator-shardingsphere
train
aa876a8d10d1b599cd57f8c74facd8b59b773aa6
diff --git a/bibliopixel/drivers/SimPixel/driver.py b/bibliopixel/drivers/SimPixel/driver.py index <HASH>..<HASH> 100644 --- a/bibliopixel/drivers/SimPixel/driver.py +++ b/bibliopixel/drivers/SimPixel/driver.py @@ -1,7 +1,7 @@ import errno, struct, threading, uuid from ... util import log from .. driver_base import DriverBase -from . import websocket +from . websocket import Server ADDRESS_IN_USE_ERROR = """ @@ -29,34 +29,23 @@ class SimPixel(DriverBase): self.set_pixel_positions(pixel_positions) def start(self): - log.debug('Starting server...') - desc = dict(driver=self, - pixel_positions=self.pixel_positions, - selectInterval=0.001) try: - self.server = websocket.Server( - '', self.port, websocket.Client, **desc) + self.server = Server(self.port, driver=self, selectInterval=0.001) + except OSError as e: if e.errno == errno.EADDRINUSE: e.strerror += ADDRESS_IN_USE_ERROR.format(self.port) e.args = (e.errno, e.strerror) raise - self.thread = threading.Thread(target=self._serve_forever, daemon=True) - self.thread.start() + def set_pixel_positions(self, pixel_positions): + if self.server: + log.warning('set_pixel_positions after server has started') - def _serve_forever(self): - try: - self.server.serveforever() - except: - pass - log.debug('WebSocket Server closed') + # Flatten list of led positions. + pl = [c for p in pixel_positions for c in p] + self.pixel_positions = bytearray(struct.pack('<%sh' % len(pl), *pl)) - def set_pixel_positions(self, pixel_positions): - if not (self.thread and self.thread.is_alive()): - # Flatten list of led positions. - pl = [c for p in pixel_positions for c in p] - self.pixel_positions = bytearray(struct.pack('<%sh' % len(pl), *pl)) def add_websock(self, oid, send_pixels): self.websocks[oid] = send_pixels diff --git a/bibliopixel/drivers/SimPixel/websocket.py b/bibliopixel/drivers/SimPixel/websocket.py index <HASH>..<HASH> 100644 --- a/bibliopixel/drivers/SimPixel/websocket.py +++ b/bibliopixel/drivers/SimPixel/websocket.py @@ -1,17 +1,14 @@ -import uuid +import threading, uuid from ... util import log from . SimpleWebSocketServer import WebSocket, SimpleWebSocketServer -Server = SimpleWebSocketServer - class Client(WebSocket): - def __init__(self, *args, driver, pixel_positions): + def __init__(self, *args, driver): super().__init__(*args) self.driver = driver self.connected = False - self.pixel_positions = pixel_positions self.oid = None log.debug('Server started...') @@ -20,7 +17,7 @@ class Client(WebSocket): self.connected = True self.oid = uuid.uuid1() self.driver.add_websock(self.oid, self.send_pixels) - self.sendMessage(bytearray([0x00, 0x00]) + self.pixel_positions) + self.sendMessage(bytearray([0x00, 0x00]) + self.driver.pixel_positions) def handleClose(self): self.driver.remove_websock(self.oid) @@ -33,3 +30,28 @@ class Client(WebSocket): def send_pixels(self, pixels): if self.connected: self.sendMessage(bytearray([0x00, 0x01]) + pixels) + + +class Server: + + def __init__(self, port, **kwds): + self.ws_server = SimpleWebSocketServer('', port, Client, **kwds) + self.thread = threading.Thread(target=self.target, daemon=True) + self.thread.start() + + def stop(self): + self.ws_server.stop() + + def close(self): + self.ws_server.close() + + def is_alive(self): + return self.thread.is_alive() + + def target(self): + log.info('Starting WebSocket server thread...') + try: + self.ws_server.serveforever() + except: + pass + log.info('WebSocket server closed')
Move networking and threading out of the SimPixel driver * ...and into the websocket Server.
ManiacalLabs_BiblioPixel
train
b84a136cbf3081d4893179224e7c47e84c7035ac
diff --git a/go/vt/binlog/mysqlbinlog.go b/go/vt/binlog/mysqlbinlog.go index <HASH>..<HASH> 100644 --- a/go/vt/binlog/mysqlbinlog.go +++ b/go/vt/binlog/mysqlbinlog.go @@ -5,7 +5,6 @@ package binlog import ( - "bytes" "fmt" "io" "os/exec" @@ -61,10 +60,6 @@ type logWrapper struct { } func (lwp *logWrapper) Write(p []byte) (n int, err error) { - if bytes.HasPrefix(p, []byte("WARNING")) { - log.Warningf("%s", p) - } else { - log.Errorf("%s", p) - } + log.Infof("%s", p) return len(p), nil } diff --git a/go/vt/binlog/mysqlbinlog_test.go b/go/vt/binlog/mysqlbinlog_test.go index <HASH>..<HASH> 100644 --- a/go/vt/binlog/mysqlbinlog_test.go +++ b/go/vt/binlog/mysqlbinlog_test.go @@ -100,29 +100,6 @@ func TestExitFail(t *testing.T) { } } -func TestWarning(t *testing.T) { - env := setup("echo WARNING $* 1>&2", 1) - defer cleanup(env) - - mbl := &MysqlBinlog{} - out, err := mbl.Launch("db", "name", 10) - if err != nil { - panic(err) - } - ioutil.ReadAll(out) - mbl.Wait() - logutil.Flush() - warnbytes, err := ioutil.ReadFile(path.Join(os.TempDir(), "binlog.test.WARNING")) - if err != nil { - t.Error(err) - } - got := string(warnbytes) - want := "WARNING --database=db --start-position=10 name" - if !strings.Contains(got, want) { - t.Errorf("want '%s' in '%s'", want, got) - } -} - func TestError(t *testing.T) { env := setup("echo ERROR expected error $* 1>&2", 1) defer cleanup(env) @@ -135,7 +112,7 @@ func TestError(t *testing.T) { ioutil.ReadAll(out) mbl.Wait() logutil.Flush() - warnbytes, err := ioutil.ReadFile(path.Join(os.TempDir(), "binlog.test.ERROR")) + warnbytes, err := ioutil.ReadFile(path.Join(os.TempDir(), "binlog.test.INFO")) if err != nil { t.Error(err) }
mysqlbinlog info log change Redirect mysqlbinlog output to info log instead of error or warning. Those 'error' messages are business as usual, not really errors.
vitessio_vitess
train
e7b5ee3b2694e0db8251e62066292af2df1600a2
diff --git a/fritzhome/__main__.py b/fritzhome/__main__.py index <HASH>..<HASH> 100644 --- a/fritzhome/__main__.py +++ b/fritzhome/__main__.py @@ -61,7 +61,7 @@ def energy(context, features): for actor in fritz.get_actors(): click.echo("{} ({}): {:.2f} Watt current, {:.3f} wH total, {:.2f} °C".format( - actor.name, + actor.name.encode('utf-8'), actor.actor_id, (actor.get_power() or 0.0) / 1000, (actor.get_energy() or 0.0) / 100, diff --git a/fritzhome/fritz.py b/fritzhome/fritz.py index <HASH>..<HASH> 100644 --- a/fritzhome/fritz.py +++ b/fritzhome/fritz.py @@ -135,7 +135,7 @@ class FritzBox(object): url = self.base_url + '/webservices/homeautoswitch.lua' response = self.session.get(url, params=params, timeout=10) response.raise_for_status() - return response.text.strip() + return response.text.strip().encode('utf-8') def get_switch_actors(self): """
Added support for UTF-8 encoded characters in actor names
DerMitch_fritzbox-smarthome
train
9ee171756bfef91ac7eb99132f78166da506badc
diff --git a/api.go b/api.go index <HASH>..<HASH> 100644 --- a/api.go +++ b/api.go @@ -20,11 +20,12 @@ import ( ) type Client struct { - Token string - SIOEndpoint url.URL - Http http.Client - Insecure string - ShowBody bool + Token string + SIOEndpoint url.URL + Http http.Client + Insecure string + ShowBody bool + configConnect *ConfigConnect } type Cluster struct { @@ -41,12 +42,8 @@ type ClientPersistent struct { client *Client } -var clientPersistentGlobal ClientPersistent - func (client *Client) Authenticate(configConnect *ConfigConnect) (Cluster, error) { - clientPersistentGlobal.configConnect = configConnect - clientPersistentGlobal.client = client - + client.configConnect = configConnect endpoint := client.SIOEndpoint endpoint.Path += "/login" @@ -134,7 +131,7 @@ func (client *Client) retryCheckResp(httpClient *http.Client, req *http.Request) } if resp.StatusCode == 401 && errBody.MajorErrorCode == 0 { - _, err := clientPersistentGlobal.client.Authenticate(clientPersistentGlobal.configConnect) + _, err := client.Authenticate(client.configConnect) if err != nil { return nil, fmt.Errorf("Error re-authenticating: %s", err) } @@ -142,7 +139,7 @@ func (client *Client) retryCheckResp(httpClient *http.Client, req *http.Request) ioutil.ReadAll(resp.Body) resp.Body.Close() - req2.SetBasicAuth("", clientPersistentGlobal.client.Token) + req2.SetBasicAuth("", client.Token) resp, errBody, err = client.checkResp(httpClient.Do(req2)) if err != nil { return &http.Response{}, errors.New(errBody.Message)
Fixed reauth problem with multiple SIO instances This commit removes the global config parameter and replaces it with client specific parameter.
thecodeteam_goscaleio
train
655695c204ee3baf55bf1ec091144e5e973e0e83
diff --git a/danceschool/core/admin.py b/danceschool/core/admin.py index <HASH>..<HASH> 100644 --- a/danceschool/core/admin.py +++ b/danceschool/core/admin.py @@ -383,7 +383,7 @@ class EventChildAdmin(PolymorphicChildModelAdmin): def uuidLink(self,obj): address = reverse('singleClassRegistration', args=[obj.uuid,]) return mark_safe('<a href="%s">%s</a>' % (address, address)) - uuidLink.short_description = _('Individual Class Registration Link') + uuidLink.short_description = _('Direct Registration Link') uuidLink.allow_tags = True # This is needed so that when an event is created, the year and month @@ -464,15 +464,11 @@ class SeriesAdmin(FrontendEditableAdminMixin, EventChildAdmin): fieldsets = ( (None, { - 'fields': ('classDescription','location','pricingTier',('special','allowDropins')) + 'fields': ('classDescription','location','pricingTier',('special','allowDropins'),('uuidLink',)), }), - (_('Override Display/Registration'), { + (_('Override Display/Registration/Capacity'), { 'classes': ('collapse',), - 'fields': ('status','closeAfterDays','uuidLink',), - }), - (_('Override Default Capacity/Drop-In Price'), { - 'classes': ('collapse',), - 'fields': ('capacity','dropinPrice'), + 'fields': ('status','closeAfterDays','capacity',), }), )
Fixed SeriesAdmin issue with deleted dropinPrice field.
django-danceschool_django-danceschool
train
eaaa551739d9d33d241b1c13ca9bf44d7224c82e
diff --git a/Controller/ContactSourceController.php b/Controller/ContactSourceController.php index <HASH>..<HASH> 100644 --- a/Controller/ContactSourceController.php +++ b/Controller/ContactSourceController.php @@ -42,6 +42,17 @@ class ContactSourceController extends FormController */ public function indexAction($page = 1) { + // When the user inserts a numeric value, assume they want to find the entity by ID. + $session = $this->get('session'); + $search = $this->request->get('search', $session->get('mautic.'.$this->getSessionBase().'.filter', '')); + if (isset($search) && is_numeric(trim($search))) { + $search = 'ids:'.trim($search); + $query = $this->request->query->all(); + $query['search'] = $search; + $this->request = $this->request->duplicate($query); + $session->set('mautic.'.$this->getSessionBase().'.filter', $search); + } + return parent::indexStandard($page); }
[ENG-<I>] Improve search by ID.
TheDMSGroup_mautic-contact-source
train
9f231faf298ec898422aef43208acfa23e3c1d19
diff --git a/mpd/base.py b/mpd/base.py index <HASH>..<HASH> 100644 --- a/mpd/base.py +++ b/mpd/base.py @@ -288,7 +288,6 @@ class MPDClientBase(object): @mpd_commands('list', is_direct=True) def _parse_list_groups(self, lines): - lines = iter(lines) return self._parse_objects_direct(lines, lookup_delimiter=True) @mpd_commands('readmessages', is_direct=True)
Fix: error with command 'list' in asyncio When the command 'list' is executed with using the asyncio module, the following error raises: TypeError: 'Queue' object is not iterable
Mic92_python-mpd2
train
24239317471f91015fa4d800d8cc1b450aad30e7
diff --git a/Pragma/ORM/Relation.php b/Pragma/ORM/Relation.php index <HASH>..<HASH> 100644 --- a/Pragma/ORM/Relation.php +++ b/Pragma/ORM/Relation.php @@ -255,6 +255,11 @@ class Relation{ } } + $loaders = isset($overriding['loaders']) ? $overriding['loaders'] : $this->conditionnal_loading; + if( ! empty($loaders) ){ + call_user_func($loaders, $qb); + } + return $this->type == 'has_one' || $this->type == 'belongs_to' ? $qb->first() : $qb->get_objects(); break; case 'has_many_through': @@ -263,6 +268,23 @@ class Relation{ throw \Exception("Missing part(s) of sub_relation ".$this->name); } + $loading_left = $loading_right = null; + $source = isset($overriding['loaders']) ? $overriding['loaders'] : $this->conditionnal_loading; + if( !empty($source) ){ + if( ! is_array($source) ){ + $loading_left = $loading_right = $source; + } + else{ + if( isset($source['left']) ){ + $loading_left = $source['left']; + } + + if( isset($source['right']) ){ + $loading_right = $source['right']; + } + } + } + $matchers = isset($this->sub_relation['matchers']) ? $this->sub_relation['matchers'] : []; if(isset($overriding['matchers'])){ $matchers = $overriding['matchers']; @@ -284,6 +306,10 @@ class Relation{ $qb1->where($to, '=', $model->$on); } + if( ! is_null($loading_left) ){ + call_user_func($loading_left, $qb1); + } + $remote_ids = $qb1->select([$this->sub_relation['right']['on']])->get_arrays($this->sub_relation['right']['on']); if(empty($remote_ids)){ @@ -309,6 +335,10 @@ class Relation{ $qb2->where($to, '=', $model->$on); } + if( ! is_null($loading_right) ){ + call_user_func($loading_right, $qb2); + } + return $qb2->get_objects(); break;
Relation: apply loaders in fetch() method
pragma-framework_core
train
dec2bab173ff867324a8d5c515fca7e0d1b8a465
diff --git a/pkg/image/registry/imagestreamimport/rest.go b/pkg/image/registry/imagestreamimport/rest.go index <HASH>..<HASH> 100644 --- a/pkg/image/registry/imagestreamimport/rest.go +++ b/pkg/image/registry/imagestreamimport/rest.go @@ -100,30 +100,28 @@ func (r *REST) Create(ctx apirequest.Context, obj runtime.Object) (runtime.Objec if !ok { return nil, kapierrors.NewBadRequest("unable to get user from context") } - isCreateImage, err := r.sarClient.Create( + isCreateImage, err := r.sarClient.Create(authorizationapi.AddUserToSAR(user, &authorizationapi.SubjectAccessReview{ - User: user.GetName(), Action: authorizationapi.Action{ Verb: "create", Group: api.GroupName, Resource: "images", }, }, - ) + )) if err != nil { return nil, err } - isCreateImageStreamMapping, err := r.sarClient.Create( + isCreateImageStreamMapping, err := r.sarClient.Create(authorizationapi.AddUserToSAR(user, &authorizationapi.SubjectAccessReview{ - User: user.GetName(), Action: authorizationapi.Action{ Verb: "create", Group: api.GroupName, Resource: "imagestreammapping", }, }, - ) + )) if err != nil { return nil, err }
Populate user in subject access review correctly
openshift_origin
train
e9b0477c6405b776abaabf8692af697cdb3eaac6
diff --git a/scratchapi.js b/scratchapi.js index <HASH>..<HASH> 100644 --- a/scratchapi.js +++ b/scratchapi.js @@ -206,11 +206,18 @@ Scratch.CloudSession.prototype.connect = function(cb) { Scratch.CloudSession.prototype.end = function() { this.connection.end(); }; +Scratch.CloudSession.prototype.get = function(name) { + return this._variables[name]; +}; +Scratch.CloudSession.prototype.set = function(name, value) { + this._variables[name] = value; + this._sendSet(name, value); +}; Scratch.CloudSession.prototype._handlePacket = function(packet) { switch (packet.method) { case 'set': if (!({}).hasOwnProperty.call(this.variables, packet.name)) { - this.addVariable(packet.name, packet.value) + this._addVariable(packet.name, packet.value) } this._variables[packet.name] = packet.value; this.emit('set', packet.name, packet.value); @@ -250,11 +257,10 @@ Scratch.CloudSession.prototype._addVariable = function(name, value) { this._variables[name] = value; Object.defineProperty(this.variables, name, { get: function() { - return self._variables; + return self.get(name); }, set: function(value) { - self._variables[name] = value; - self._sendSet(name, value); + self.set(name, value); } }); };
Add more uniform ways to access cloud variables
trumank_scratch-api
train
cc51ef69ed7cbd384dcd7326b1d2d526390cb3ac
diff --git a/src/models/entities.js b/src/models/entities.js index <HASH>..<HASH> 100644 --- a/src/models/entities.js +++ b/src/models/entities.js @@ -185,7 +185,7 @@ var EntitiesModel = Model.extend({ utils.find(this.select, function(d) { return d[dimension] === value; - }).labelOffset = xy; + }).labelOffset = [Math.round(xy[0]*1000)/1000, Math.round(xy[1]*1000)/1000]; //force the model to trigger events even if value is the same this.set("select", this.select, true);
Round labelOffset[] so that numbers are not super long in the URL
vizabi_vizabi
train
bfc900f128069b256afccb9fb7c8ec8a9b4c4db3
diff --git a/webpack.dev.config.js b/webpack.dev.config.js index <HASH>..<HASH> 100644 --- a/webpack.dev.config.js +++ b/webpack.dev.config.js @@ -32,7 +32,6 @@ module.exports = { plugins: [ new webpack.HotModuleReplacementPlugin(), - new webpack.NoErrorsPlugin(), new ExtractTextPlugin('app.css', { allChunks: true }) ] };
Don't use NoErrorsPlugin with React Hot Loader
moroshko_react-autosuggest
train
0f400b2b161c159980a0e8bf79193c16b458cc61
diff --git a/lntest/itest/lnd_payment_test.go b/lntest/itest/lnd_payment_test.go index <HASH>..<HASH> 100644 --- a/lntest/itest/lnd_payment_test.go +++ b/lntest/itest/lnd_payment_test.go @@ -581,24 +581,34 @@ func testBidirectionalAsyncPayments(net *lntest.NetworkHarness, t *harnessTest) // Next query for Bob's and Alice's channel states, in order to confirm // that all payment have been successful transmitted. - ctxt, _ = context.WithTimeout(ctxb, defaultTimeout) - bobInfo, err := getChanInfo(ctxt, net.Bob) - if err != nil { - t.Fatalf("unable to get bob's channel info: %v", err) - } + err = wait.NoError(func() error { + ctxt, _ = context.WithTimeout(ctxb, defaultTimeout) + bobInfo, err := getChanInfo(ctxt, net.Bob) + if err != nil { + t.Fatalf("unable to get bob's channel info: %v", err) + } - if bobInfo.LocalBalance != bobAmt { - t.Fatalf("bob's local balance is incorrect, got %v, expected"+ - " %v", bobInfo.LocalBalance, bobAmt) - } - if bobInfo.RemoteBalance != aliceAmt { - t.Fatalf("bob's remote balance is incorrect, got %v, "+ - "expected %v", bobInfo.RemoteBalance, aliceAmt) - } - if len(bobInfo.PendingHtlcs) != 0 { - t.Fatalf("bob's pending htlcs is incorrect, got %v, "+ - "expected %v", len(bobInfo.PendingHtlcs), 0) - } + if bobInfo.LocalBalance != bobAmt { + return fmt.Errorf("bob's local balance is incorrect, "+ + "got %v, expected %v", bobInfo.LocalBalance, + bobAmt) + } + + if bobInfo.RemoteBalance != aliceAmt { + return fmt.Errorf("bob's remote balance is incorrect, "+ + "got %v, expected %v", bobInfo.RemoteBalance, + aliceAmt) + } + + if len(bobInfo.PendingHtlcs) != 0 { + return fmt.Errorf("bob's pending htlcs is incorrect, "+ + "got %v, expected %v", + len(bobInfo.PendingHtlcs), 0) + } + + return nil + }, defaultTimeout) + require.NoError(t.t, err) // Finally, immediately close the channel. This function will also // block until the channel is closed and will additionally assert the
itests: payments test flake fix
lightningnetwork_lnd
train
191d3eba446959436d9fc4ba521129d5e71d65fa
diff --git a/src/events/keyboard.js b/src/events/keyboard.js index <HASH>..<HASH> 100644 --- a/src/events/keyboard.js +++ b/src/events/keyboard.js @@ -61,9 +61,11 @@ let __keyboardHandler = function() { this.__addEvent(container, 'keydown', (evt) => { if (!isFocused) return; + const { options, parents, movementLocked } = this; + evt = getOriginalEvent(evt); - let delta = getKeyDelta(evt.keyCode || evt.which); + const delta = getKeyDelta(evt.keyCode || evt.which); if (!delta) return; @@ -79,13 +81,13 @@ let __keyboardHandler = function() { return this.__updateThrottle(); } - if (this.__isOntoEdge('x', x) || this.__isOntoEdge('y', y)) { - this.__autoLockMovement(); - } - evt.preventDefault(); - const { speed } = this.options; + this.__unlockMovement(); // handle for multi keypress + if (x && this.__isOntoEdge('x', x)) movementLocked.x = true; + if (y && this.__isOntoEdge('y', y)) movementLocked.y = true; + + const { speed } = options; this.__addMovement(x * speed, y * speed); });
fix(keypress): fix movement lock on multi keypress
idiotWu_smooth-scrollbar
train
ded625db04c51a305d52ef9f3cc5a604c682695a
diff --git a/flex/pkg/volume/delete.go b/flex/pkg/volume/delete.go index <HASH>..<HASH> 100644 --- a/flex/pkg/volume/delete.go +++ b/flex/pkg/volume/delete.go @@ -36,8 +36,11 @@ func (p *flexProvisioner) Delete(volume *v1.PersistentVolume) error { return &controller.IgnoredError{Reason: strerr} } + extraOptions := map[string]string{} + extraOptions[optionPVorVolumeName] = volume.Name + call := p.NewDriverCall(p.execCommand, deleteCmd) - call.AppendSpec(volume.Spec.FlexVolume.Options, nil) + call.AppendSpec(volume.Spec.FlexVolume.Options, extraOptions) output, err := call.Run() if err != nil {
flex: Add PV name to delete
kubernetes-incubator_external-storage
train
35e4ee331593cf3a24d5ae254381bc3ef9cdaf15
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -262,7 +262,7 @@ AsyncHelpers.prototype.resolve = function(key, cb) { try { res = stashed.fn.apply(stashed.thisArg, args); } catch (err) { - return next(err); + return next(formatError(err, stashed)); } if (!stashed.fn.async) { return next(null, res); @@ -288,3 +288,9 @@ function once (fn) { return fn.value; }; } + +function formatError(err, helper) { + err.message += ' [resolving `' + helper.name + '`]'; + err.helper = helper; + return err; +}
Extending Error information with .helper
doowb_async-helpers
train
4379204d317f409d77656ea7895087e6f0dcdf00
diff --git a/lib/sad_panda/polarity.rb b/lib/sad_panda/polarity.rb index <HASH>..<HASH> 100644 --- a/lib/sad_panda/polarity.rb +++ b/lib/sad_panda/polarity.rb @@ -18,7 +18,7 @@ module SadPanda score_polarities_for(frequencies_for(words)) - polarities.empty? ? 5.0 : (polarities.sum / polarities.length) + polarities.empty? ? 5.0 : (polarities.inject(0){ |sum,x| sum + x } / polarities.length) end private diff --git a/lib/sad_panda/version.rb b/lib/sad_panda/version.rb index <HASH>..<HASH> 100644 --- a/lib/sad_panda/version.rb +++ b/lib/sad_panda/version.rb @@ -1,3 +1,3 @@ module SadPanda - VERSION = "1.0.1" + VERSION = '1.1.0' end diff --git a/spec/sad_panda/bank/emotions_spec.rb b/spec/sad_panda/bank/emotions_spec.rb index <HASH>..<HASH> 100644 --- a/spec/sad_panda/bank/emotions_spec.rb +++ b/spec/sad_panda/bank/emotions_spec.rb @@ -13,7 +13,7 @@ describe SadPanda::Bank do end it 'has all the emotions as keys' do - expect(emotions.keys).to eq %i[anger disgust joy surprise fear sadness] + expect(emotions.keys).to eq [:anger, :disgust, :joy, :surprise, :fear, :sadness] end context 'constants' do diff --git a/spec/sad_panda_spec.rb b/spec/sad_panda_spec.rb index <HASH>..<HASH> 100644 --- a/spec/sad_panda_spec.rb +++ b/spec/sad_panda_spec.rb @@ -89,7 +89,7 @@ describe SadPanda do end it 'returns 5.0 for a neutral' do - expect(SadPanda.polarity('This is surprising')).to be 5.0 + expect(SadPanda.polarity('This is surprising')).to eq 5.0 end end end
Why : Ruby <I> syntax needs to be reverted for other versions What : Changed .sum in Array to invect block. Version updated
mattThousand_sad_panda
train
3c5f3df8fc8071f1b6791ef251deacf62abf0cae
diff --git a/kafka_utils/kafka_cluster_manager/cmds/command.py b/kafka_utils/kafka_cluster_manager/cmds/command.py index <HASH>..<HASH> 100644 --- a/kafka_utils/kafka_cluster_manager/cmds/command.py +++ b/kafka_utils/kafka_cluster_manager/cmds/command.py @@ -19,6 +19,7 @@ import logging import sys from collections import defaultdict +import humanfriendly import six from six.moves import input @@ -227,8 +228,8 @@ class ClusterManagerCmd(object): self.log.warning( '--max-movement-size={max_movement_size} is too small, using smallest size' ' in set of partitions to move, {smallest_size} instead to force progress'.format( - max_movement_size=max_movement_size, - smallest_size=smallest_size, + max_movement_size=humanfriendly.format_size(max_movement_size), + smallest_size=humanfriendly.format_size(smallest_size), ) ) max_movement_size = smallest_size diff --git a/kafka_utils/kafka_cluster_manager/cmds/decommission.py b/kafka_utils/kafka_cluster_manager/cmds/decommission.py index <HASH>..<HASH> 100644 --- a/kafka_utils/kafka_cluster_manager/cmds/decommission.py +++ b/kafka_utils/kafka_cluster_manager/cmds/decommission.py @@ -18,6 +18,8 @@ from __future__ import print_function import logging import sys +import humanfriendly + from .command import ClusterManagerCmd from .command import DEFAULT_MAX_MOVEMENT_SIZE from kafka_utils.util import positive_float @@ -118,8 +120,9 @@ class DecommissionCmd(ClusterManagerCmd): if self.args.auto_max_movement_size: self.args.max_movement_size = largest_size self.log.info( - 'Auto-max-movement-size: using {max_movement_size} as' + 'Auto-max-movement-size: using {human_max_movement_size} ({max_movement_size}) as' ' max-movement-size.'.format( + human_max_movement_size=humanfriendly.format_size(self.args.max_movement_size), max_movement_size=self.args.max_movement_size, ) ) @@ -130,9 +133,9 @@ class DecommissionCmd(ClusterManagerCmd): 'Max partition movement size is only {max_movement_size},' ' but remaining partitions to move range from {smallest_size} to' ' {largest_size}. The decommission will not make progress'.format( - max_movement_size=self.args.max_movement_size, - smallest_size=smallest_size, - largest_size=largest_size, + max_movement_size=humanfriendly.format_size(self.args.max_movement_size), + smallest_size=humanfriendly.format_size(smallest_size), + largest_size=humanfriendly.format_size(largest_size), ) ) sys.exit(1) @@ -141,9 +144,9 @@ class DecommissionCmd(ClusterManagerCmd): 'Max partition movement size is only {max_movement_size},' ' but remaining partitions to move range from {smallest_size} to' ' {largest_size}. The decommission may be slower than expected'.format( - max_movement_size=self.args.max_movement_size, - smallest_size=smallest_size, - largest_size=largest_size, + max_movement_size=humanfriendly.format_size(self.args.max_movement_size), + smallest_size=humanfriendly.format_size(smallest_size), + largest_size=humanfriendly.format_size(largest_size), ) ) diff --git a/requirements.txt b/requirements.txt index <HASH>..<HASH> 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,6 +6,7 @@ cffi==1.12.2 ecdsa==0.13 enum34==1.1.6 futures==3.2.0 +humanfriendly==4.8 jsonschema==3.0.1 kafka-python==1.4.4 kazoo==2.6.1 diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -49,6 +49,7 @@ setup( "scripts/kafka-corruption-check", ], install_requires=[ + "humanfriendly>=4.8", "kafka-python>=1.3.2,<1.5.0", "kazoo>=2.0,<3.0.0", "PyYAML>3.10",
Human friendly output for bytes in decommission
Yelp_kafka-utils
train
25035ecbc9a86160eb232263e608c558dc2e82e4
diff --git a/source/Internal/Domain/Review/Bridge/UserReviewBridgeInterface.php b/source/Internal/Domain/Review/Bridge/UserReviewBridgeInterface.php index <HASH>..<HASH> 100644 --- a/source/Internal/Domain/Review/Bridge/UserReviewBridgeInterface.php +++ b/source/Internal/Domain/Review/Bridge/UserReviewBridgeInterface.php @@ -11,7 +11,7 @@ use OxidEsales\EshopCommunity\Internal\Framework\Dao\EntryDoesNotExistDaoExcepti /** * @stable - * @see StableAnnotation + * @see OxidEsales/EshopCommunity/Internal/README.md */ interface UserReviewBridgeInterface {
OXDEV-<I> Fix annotation for UserReviewBridgeInterface
OXID-eSales_oxideshop_ce
train
60ee2e334977ad8ec6be1458db2b3723b9eafea5
diff --git a/lib/Auth/Driver/DatabaseDriver.php b/lib/Auth/Driver/DatabaseDriver.php index <HASH>..<HASH> 100644 --- a/lib/Auth/Driver/DatabaseDriver.php +++ b/lib/Auth/Driver/DatabaseDriver.php @@ -190,7 +190,39 @@ class DatabaseDriver $this->reset(self::STATUS_SIGNED_OUT); } - public function updateSession() + protected function startSession() + { + if ($this->session !== null) + $this->session->destroy(); + + $this->session = $this->session_service->get(); + + $this->updateSessionData(); + + $this->token = $this->session->getId(); + + $token = new Token(); + $token->setToken($this->token); + $token->setUser($this->user); + $token->save(); + + // Clear old tokens in the database + $old_date = (new \DateTime())->modify('-' . $this->token_handler->getTokenLifetime() . ' second'); + + TokenQuery::create()->filterByUpdatedAt($old_date, '<')->delete(); + } + + protected function updateSession() + { + $token = TokenQuery::create()->findOneByToken($this->token); + + if ($token) + $token->setUpdatedAt(new \DateTime())->save(); + + $this->updateSessionData(); + } + + protected function updateSessionData() { $this->user->revealRoles(); @@ -201,21 +233,25 @@ class DatabaseDriver ->set('_user_delegations', $this->user->getDelegations()); } - protected function startSession() + public function invalidateSessions($user = null) { - if ($this->session !== null) - $this->session->destroy(); + if ($user === null) + $user = $this->user; - $this->session = $this->session_service->get(); + $old_date = (new \DateTime())->modify('-' . $this->token_handler->getTokenLifetime() . ' second'); - $this->updateSession(); + $tokens = TokenQuery::create() + ->filterByUser($user) + ->filterByUpdatedAt($old_date, '>=') + ->find(); - $this->token = $this->session->getId(); + foreach ($tokens as $token) + { + $session = $this->session_service->get($token->getToken()); - $token = new Token(); - $token->setToken($this->token); - $token->setUser($this->user); - $token->save(); + if ($session->has('_last_updated')) + $session->set('_last_updated', 0); + } } protected function reset($status) diff --git a/lib/Auth/TokenHandler/AbstractHandler.php b/lib/Auth/TokenHandler/AbstractHandler.php index <HASH>..<HASH> 100644 --- a/lib/Auth/TokenHandler/AbstractHandler.php +++ b/lib/Auth/TokenHandler/AbstractHandler.php @@ -9,4 +9,6 @@ abstract class AbstractHandler abstract public function setToken($token); abstract public function deleteToken(); + + abstract public function getTokenLifetime(); } \ No newline at end of file diff --git a/lib/Auth/TokenHandler/CookieHandler.php b/lib/Auth/TokenHandler/CookieHandler.php index <HASH>..<HASH> 100644 --- a/lib/Auth/TokenHandler/CookieHandler.php +++ b/lib/Auth/TokenHandler/CookieHandler.php @@ -30,4 +30,9 @@ class CookieHandler extends AbstractHandler { $this->cookie->delete('_session'); } + + public function getTokenLifetime() + { + return $this->cookie_lifetime; + } } \ No newline at end of file
Token clearing and time updating handling, invalidating user sessions
perfumer_framework
train
751d8f6874248f7243698c6f8377983879d744e9
diff --git a/rabbithole_test.go b/rabbithole_test.go index <HASH>..<HASH> 100644 --- a/rabbithole_test.go +++ b/rabbithole_test.go @@ -269,7 +269,6 @@ var _ = Describe("Rabbithole", func() { It("Set cluster name", func() { previousClusterName, err := rmqc.GetClusterName() Ω(err).Should(BeNil()) - Ω(previousClusterName.Name).Should(Equal("rabbitmq@localhost")) cnStr := "rabbitmq@rabbit-hole-test" cn := ClusterName{Name: cnStr} resp, err := rmqc.SetClusterName(cn)
Don't make any assumptions about what initial cluster name is References #<I>.
michaelklishin_rabbit-hole
train
744e4a78f0fb372c4c54e0aa13fc0fd16a49a931
diff --git a/docs/components/TimeBasedLineChartDocs.js b/docs/components/TimeBasedLineChartDocs.js index <HASH>..<HASH> 100644 --- a/docs/components/TimeBasedLineChartDocs.js +++ b/docs/components/TimeBasedLineChartDocs.js @@ -36,6 +36,7 @@ const TimeBasedLineChartDocs = React.createClass({ breakPointLabel={'This Month'} data={lineChartData} height={400} + margin={{ top: 30, right: 0, bottom: 30, left: 75 }} rangeType={'month'} shadeBelowZero={true} showZeroLine={true} @@ -145,6 +146,7 @@ const TimeBasedLineChartDocs = React.createClass({ breakPointLabel={'This Month'} data={lineChartData} height={400} + margin={{ top: 30, right: 0, bottom: 30, left: 75 }} rangeType={'month'} shadeBelowZero={true} showZeroLine={true}
decided not to change docs in this PR
mxenabled_mx-react-components
train
81144423c93c59009dcf98efa9283363ae3b8e76
diff --git a/lib/modules/apostrophe-workflow-permissions/index.js b/lib/modules/apostrophe-workflow-permissions/index.js index <HASH>..<HASH> 100644 --- a/lib/modules/apostrophe-workflow-permissions/index.js +++ b/lib/modules/apostrophe-workflow-permissions/index.js @@ -29,6 +29,11 @@ module.exports = { return; } + // Types excluded from workflow should not be subject to any of this + if (!workflow.includeType(info.type)) { + return; + } + // Flunk any access to a nonexistent locale or, if // we don't have the private-locale permission, // any access to a private locale @@ -56,9 +61,6 @@ module.exports = { if (!manager) { return; } - if (!workflow.includeType(info.type)) { - return; - } var verb = info.verb; // publish is not a separate verb in workflow since we already control whether you can edit // in draft vs. live locales
if a type is exempt from workflow, do not complain that the locale is inaccessible to the editor
apostrophecms_apostrophe-workflow
train
562321e32b41c97e01c00d8588cb7bbcb1462d3b
diff --git a/core/ViewDataTable.php b/core/ViewDataTable.php index <HASH>..<HASH> 100644 --- a/core/ViewDataTable.php +++ b/core/ViewDataTable.php @@ -135,6 +135,8 @@ abstract class Piwik_ViewDataTable * @var array */ protected $columnsToDisplay = array(); + + protected $uniqIdTable = null; /** * Method to be implemented by the ViewDataTable_*. @@ -442,7 +444,7 @@ abstract class Piwik_ViewDataTable * @see datatable.js * @return string */ - protected function getUniqueIdViewDataTable() + protected function loadUniqueIdViewDataTable() { // if we request a subDataTable the $this->currentControllerAction DIV ID is already there in the page // we make the DIV ID really unique by appending the ID of the subtable requested @@ -461,6 +463,27 @@ abstract class Piwik_ViewDataTable } return $uniqIdTable; } + + /** + * Sets the $uniqIdTable variable that is used as the DIV ID in the rendered HTML + */ + public function setUniqueIdViewDataTable($uniqIdTable) + { + $this->viewProperties['uniqueId'] = $uniqIdTable; + $this->uniqIdTable = $uniqIdTable; + } + + /** + * Returns current value of $uniqIdTable variable that is used as the DIV ID in the rendered HTML + */ + public function getUniqueIdViewDataTable() + { + if( $this->uniqIdTable == null ) + { + $this->uniqIdTable = $this->loadUniqueIdViewDataTable(); + } + return $this->uniqIdTable; + } /** * Returns array of properties, eg. "show_footer", "show_search", etc.
Added methods to get current value of and override $uniqIdTable variable that is used as the DIV ID in the rendered HTML git-svn-id: <URL>
matomo-org_matomo
train
2900cc4e2d9ef8e6889f23dd01bfcd2e4c4c991e
diff --git a/lib/with_model/base.rb b/lib/with_model/base.rb index <HASH>..<HASH> 100644 --- a/lib/with_model/base.rb +++ b/lib/with_model/base.rb @@ -5,6 +5,7 @@ end module WithModel class Base < ActiveRecord::Base + self.abstract_class = true class << self def with_model? true diff --git a/spec/active_record_behaviors_spec.rb b/spec/active_record_behaviors_spec.rb index <HASH>..<HASH> 100644 --- a/spec/active_record_behaviors_spec.rb +++ b/spec/active_record_behaviors_spec.rb @@ -1,60 +1,103 @@ require 'spec_helper' describe "ActiveRecord behaviors" do - describe "a temporary ActiveRecord model created with with_model that has a named_scope" do - before do - class RegularModel < ActiveRecord::Base - scope_method = - if respond_to?(:scope) && !protected_methods.include?('scope') - :scope # ActiveRecord 3.x - else - :named_scope # ActiveRecord 2.x - end - - send scope_method, :title_is_foo, :conditions => {:title => 'foo'} - end - RegularModel.connection.drop_table(RegularModel.table_name) rescue nil - RegularModel.connection.create_table(RegularModel.table_name) do |t| - t.string 'title' - t.text 'content' - t.timestamps + describe "a temporary ActiveRecord model created with with_model" do + context "that has a named_scope" do + before do + class RegularModel < ActiveRecord::Base + scope_method = + if respond_to?(:scope) && !protected_methods.include?('scope') + :scope # ActiveRecord 3.x + else + :named_scope # ActiveRecord 2.x + end + + send scope_method, :title_is_foo, :conditions => {:title => 'foo'} + end + RegularModel.connection.drop_table(RegularModel.table_name) rescue nil + RegularModel.connection.create_table(RegularModel.table_name) do |t| + t.string 'title' + t.text 'content' + t.timestamps + end end - end - after do - RegularModel.connection.drop_table(@model.table_name) rescue nil - end + after do + RegularModel.connection.drop_table(@model.table_name) rescue nil + end - with_model :blog_post do - table do |t| - t.string 'title' - t.text 'content' - t.timestamps + with_model :blog_post do + table do |t| + t.string 'title' + t.text 'content' + t.timestamps + end + + model do + scope_method = + if respond_to?(:scope) && !protected_methods.include?('scope') + :scope # ActiveRecord 3.x + else + :named_scope # ActiveRecord 2.x + end + + send scope_method, :title_is_foo, :conditions => {:title => 'foo'} + end end - model do - scope_method = - if respond_to?(:scope) && !protected_methods.include?('scope') - :scope # ActiveRecord 3.x - else - :named_scope # ActiveRecord 2.x - end + describe "the named scope" do + it "should work like a regular named scope" do + included = RegularModel.create!(:title => 'foo', :content => "Include me!") + excluded = RegularModel.create!(:title => 'bar', :content => "Include me!") + + RegularModel.title_is_foo.should == [included] - send scope_method, :title_is_foo, :conditions => {:title => 'foo'} + included = BlogPost.create!(:title => 'foo', :content => "Include me!") + excluded = BlogPost.create!(:title => 'bar', :content => "Include me!") + + BlogPost.title_is_foo.should == [included] + end end end - describe "the named scope" do - it "should work like a regular named scope" do - included = RegularModel.create!(:title => 'foo', :content => "Include me!") - excluded = RegularModel.create!(:title => 'bar', :content => "Include me!") + context "that has a polymorphic belongs_to" do + before do + class Animal < ActiveRecord::Base + has_many :tea_cups, :as => :pet + end + end + + with_model :tea_cup do + table do |t| + t.belongs_to :pet, :polymorphic => true + end + model do + belongs_to :pet, :polymorphic => true + end + end + + with_table :animals + + with_model :stuffed_animal do + table + model do + has_many :tea_cups, :as => :pet + end + end - RegularModel.title_is_foo.should == [included] + describe "the polymorphic belongs_to" do + it "should work like a regular polymorphic belongs_to" do + animal = Animal.create! + stuffed_animal = StuffedAnimal.create! - included = BlogPost.create!(:title => 'foo', :content => "Include me!") - excluded = BlogPost.create!(:title => 'bar', :content => "Include me!") + tea_cup_for_animal = TeaCup.create!(:pet => animal) + tea_cup_for_animal.pet_type.should == "Animal" + animal.tea_cups.should include(tea_cup_for_animal) - BlogPost.title_is_foo.should == [included] + tea_cup_for_stuffed_animal = TeaCup.create!(:pet => stuffed_animal) + tea_cup_for_stuffed_animal.pet_type.should == "StuffedAnimal" + stuffed_animal.tea_cups.should include(tea_cup_for_stuffed_animal) + end end end end diff --git a/spec/with_model_spec.rb b/spec/with_model_spec.rb index <HASH>..<HASH> 100644 --- a/spec/with_model_spec.rb +++ b/spec/with_model_spec.rb @@ -58,6 +58,10 @@ describe "a temporary ActiveRecord model created with with_model" do BlogPost.with_model?.should be_true end end + + it "should have a base_class of itself" do + BlogPost.base_class.should == BlogPost + end end context "after an example which uses with_model without shadowing an existing constant" do
WithModel::Base should have abstract_class = true.
Casecommons_with_model
train
78420e0c89c31ae9c8c6c46c3c48781accef94ff
diff --git a/src/Jira/Api/Client/MemcacheProxyClient.php b/src/Jira/Api/Client/MemcacheProxyClient.php index <HASH>..<HASH> 100644 --- a/src/Jira/Api/Client/MemcacheProxyClient.php +++ b/src/Jira/Api/Client/MemcacheProxyClient.php @@ -82,7 +82,9 @@ class MemcacheProxyClient implements ClientInterface $debug = false ) { if ( $method == 'GET' ) { - if ( $result = $this->getFromCache($url, $data, $endpoint) ) { + $result = $this->getFromCache($url, $data, $endpoint); + + if ( $result ) { // $this->setCache($url, $data, $endpoint, $result); return $result; } diff --git a/src/Jira/Api/Client/PHPClient.php b/src/Jira/Api/Client/PHPClient.php index <HASH>..<HASH> 100644 --- a/src/Jira/Api/Client/PHPClient.php +++ b/src/Jira/Api/Client/PHPClient.php @@ -99,7 +99,7 @@ class PHPClient implements ClientInterface $context = array( 'http' => array( 'method' => $method, - 'header' => join("\r\n", $header), + 'header' => implode("\r\n", $header), ), ); @@ -126,7 +126,7 @@ class PHPClient implements ClientInterface $header[] = sprintf('Content-Length: %d', strlen($__data)); - $context['http']['header'] = join("\r\n", $header); + $context['http']['header'] = implode("\r\n", $header); $context['http']['content'] = $__data; } else {
Stage 4: misc (variable assignment in loops and conditionals, join usage instead of implode, ...)
chobie_jira-api-restclient
train
e630ceaf2eb1bf57cdf297aa3464b34c2eb88829
diff --git a/lib/fn/InterpolatedFn.js b/lib/fn/InterpolatedFn.js index <HASH>..<HASH> 100644 --- a/lib/fn/InterpolatedFn.js +++ b/lib/fn/InterpolatedFn.js @@ -48,6 +48,7 @@ function InterpolatedFn( controlPoints , params = {} , noInit = false ) { PiecewiseFn.call( this ) ; this.controlPoints = controlPoints ; + this.order = params.order ?? 2 ; this.preserveExtrema = !! params.preserveExtrema ; this.atanMeanDfx = !! params.atanMeanDfx ; @@ -81,9 +82,6 @@ InterpolatedFn.prototype.dup = InterpolatedFn.prototype.clone = function() { InterpolatedFn.prototype.updateFromControlPoints = function() { - var i , cp , nextCp , previousCp , beginDfx , midDfx , endDfx , a , b , c , - cpLen = this.controlPoints.length ; - // Always remove all intervals this.intervals.length = 0 ; if ( ! this.controlPoints?.length ) { return ; } @@ -91,6 +89,39 @@ InterpolatedFn.prototype.updateFromControlPoints = function() { // First, sort controlPoints in ascending order this.controlPoints.sort( ( a_ , b_ ) => a_.x - b_.x ) ; + if ( this.order === 1 ) { this.updateFromControlPointsOrder1() ; } + if ( this.order >= 2 ) { this.updateFromControlPointsOrder2() ; } +} ; + + + +InterpolatedFn.prototype.updateFromControlPointsOrder1 = function() { + var i , cp , nextCp , a , b , + cpLen = this.controlPoints.length ; + + for ( i = 0 ; i < cpLen - 1 ; i ++ ) { + cp = this.controlPoints[ i ] ; + nextCp = this.controlPoints[ i + 1 ] ; + + // Derivative/slope for that interval + a = ( nextCp.fx - cp.fx ) / ( nextCp.x - cp.x ) ; + // ax+b => a * cp.x + b = cp.fx <=> b = cp.fx - a * cp.x + b = cp.fx - a * cp.x ; + + this.intervals.push( { + min: cp.x , + max: nextCp.x , + fn: new PolynomialFn( b , a ) + } ) ; + } +} ; + + + +InterpolatedFn.prototype.updateFromControlPointsOrder2 = function() { + var i , cp , nextCp , previousCp , beginDfx , midDfx , endDfx , a , b , c , + cpLen = this.controlPoints.length ; + // Phase 1: compute median points and mean slope (dfx) for ( i = 0 ; i < cpLen - 1 ; i ++ ) { diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "math-kit", - "version": "0.16.13", + "version": "0.16.14", "description": "Math utilities (random, stats, and more)", "main": "lib/math.js", "engines": { diff --git a/test/gfx-test/fn-test.js b/test/gfx-test/fn-test.js index <HASH>..<HASH> 100644 --- a/test/gfx-test/fn-test.js +++ b/test/gfx-test/fn-test.js @@ -60,7 +60,7 @@ describe( "InterpolatedFn" , () => { { x: 1 , fx: 1 } , { x: 3 , fx: 5 } , { x: 5 , fx: 1 } - ] ) ; + ] , { order: 2 } ) ; tracer.createImage() ; tracer.drawAxis() ; @@ -183,7 +183,7 @@ describe( "InterpolatedFn" , () => { lastFx = fx ; } - var fn = new math.fn.InterpolatedFn( array , { preserveExtrema: false , atanMeanDfx: true } ) ; + var fn = new math.fn.InterpolatedFn( array , { preserveExtrema: false , atanMeanDfx: true , order: 2 } ) ; tracer.createImage() ; tracer.drawAxis() ; @@ -225,7 +225,7 @@ describe( "Const2ndOrdDifferentialEquationFn" , () => { await tracer.saveImage( __dirname + "/differential-equation-fn.png" ) ; } ) ; - it( "zzz Spring-Damper-Mass system" , async () => { + it( "Spring-Damper-Mass system" , async () => { var tracer = new GmTracer( { height: 600 , width: 800 ,
New: InterpolatedFn order 1
cronvel_math-kit
train
eac1fa06ce9a17db93e5bb77b1bf4cdf5df920d4
diff --git a/tests/Opauth/OpauthStrategyTest.php b/tests/Opauth/OpauthStrategyTest.php index <HASH>..<HASH> 100644 --- a/tests/Opauth/OpauthStrategyTest.php +++ b/tests/Opauth/OpauthStrategyTest.php @@ -67,7 +67,6 @@ class OpauthStrategyTest extends OpauthTest{ $Opauth->run(); } - /** * @runInSeparateProcess */ @@ -107,4 +106,24 @@ class OpauthStrategyTest extends OpauthTest{ $this->assertContains("Location: $fullUrl", $headers_list); } + /** + * Instantiate OpauthStrategy with test config suitable for testing + * + * @param array $config Config changes to be merged with the default + * @param boolean $autoRun Should Opauth be run right after instantiation, defaulted to false + * @return object Opauth instance + */ + protected static function instantiateSampleStrategyForTesting($config = array(), $autoRun = false){ + $Opauth = new Opauth(self::configForTest($config), $autoRun); + + // From Opauth.php + $strategy = $Opauth->env['Strategy']['Sample']; + $safeEnv = $Opauth->env; + unset($safeEnv['Strategy']); + + require_once './tests/Opauth//Strategy/Sample/SampleStrategy.php'; + $OpauthStrategy = new SampleStrategy($strategy, $safeEnv); + return $OpauthStrategy; + } + }
instantiateSampleStrategyForTesting()
opauth_opauth
train
4daeb2395db6c985a6390e070fae708ad19c160f
diff --git a/f5/__init__.py b/f5/__init__.py index <HASH>..<HASH> 100644 --- a/f5/__init__.py +++ b/f5/__init__.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = '3.0.4' +__version__ = '3.0.5'
Bumps release to <I> (#<I>)
F5Networks_f5-common-python
train
e892dfe9f2f54a713e5b0946f5b8864541196c3d
diff --git a/pygerduty/__init__.py b/pygerduty/__init__.py index <HASH>..<HASH> 100755 --- a/pygerduty/__init__.py +++ b/pygerduty/__init__.py @@ -264,7 +264,7 @@ class Restrictions(Collection): class NotificationRules(Collection): - pass + paginated = False class ContactMethods(Collection): @@ -367,8 +367,7 @@ class Override(Container): class NotificationRule(Container): - paginated = False - + pass class ContactMethod(Container): pass
Fix bug with pagination on notification rules.
dropbox_pygerduty
train
bfd8e85d6bc09d23de383232873ed99e37464c5a
diff --git a/src/Support/Content/MimeTypeHelper.php b/src/Support/Content/MimeTypeHelper.php index <HASH>..<HASH> 100644 --- a/src/Support/Content/MimeTypeHelper.php +++ b/src/Support/Content/MimeTypeHelper.php @@ -3,6 +3,7 @@ namespace Czim\FileHandling\Support\Content; use Czim\FileHandling\Contracts\Support\MimeTypeHelperInterface; use finfo; +use RuntimeException; use Symfony\Component\HttpFoundation\File\MimeType\ExtensionGuesserInterface; use Symfony\Component\HttpFoundation\File\MimeType\MimeTypeExtensionGuesser; use Symfony\Component\HttpFoundation\File\MimeType\MimeTypeGuesser; @@ -78,7 +79,21 @@ class MimeTypeHelper implements MimeTypeHelperInterface */ public function guessExtensionForMimeType($type) { - return static::getMimeTypeExtensionGuesserInstance()->guess($type); + if (class_exists(MimeTypes::class)) { + $extensions = (new MimeTypes())->getExtensions($type); + + if (count($extensions)) { + return head($extensions); + } + + return ''; + } + + if (static::$mimeTypeExtensionGuesser !== null) { + return static::getMimeTypeExtensionGuesserInstance()->guess($type); + } + + throw new RuntimeException('Unable to guess, no extension guessin strategy available'); } /** @@ -88,11 +103,10 @@ class MimeTypeHelper implements MimeTypeHelperInterface */ public static function getMimeTypeExtensionGuesserInstance() { - if ( ! static::$mimeTypeExtensionGuesser) { + if ( ! static::$mimeTypeExtensionGuesser && class_exists(MimeTypeExtensionGuesser::class)) { static::$mimeTypeExtensionGuesser = new MimeTypeExtensionGuesser; } return static::$mimeTypeExtensionGuesser; } - }
Fixed support for extension by mime type for Symfony 4 vs 5
czim_file-handling
train
30d7f28c58def1592b84e9ba4c2c3a1c6afecc4c
diff --git a/enoslib/infra/enos_g5k/g5k_api_utils.py b/enoslib/infra/enos_g5k/g5k_api_utils.py index <HASH>..<HASH> 100644 --- a/enoslib/infra/enos_g5k/g5k_api_utils.py +++ b/enoslib/infra/enos_g5k/g5k_api_utils.py @@ -161,7 +161,10 @@ def grid_reload_from_ids(oargrid_jobids): jobs = [] for site, job_id in oargrid_jobids: jobs.append(gk.sites[site].jobs[job_id]) - return jobs + + wait_for_jobs(jobs) + + return build_resources(jobs) def build_resources(jobs: List[Job]) -> Tuple[List[str], List[OarNetwork]]:
G5k: fix static oargrid driver resources weren't build.
BeyondTheClouds_enoslib
train
488f6852263b5e893ac354c8f37f639a1ae05db4
diff --git a/src/main/java/com/wrapper/spotify/SpotifyApiThreading.java b/src/main/java/com/wrapper/spotify/SpotifyApiThreading.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/wrapper/spotify/SpotifyApiThreading.java +++ b/src/main/java/com/wrapper/spotify/SpotifyApiThreading.java @@ -1,16 +1,26 @@ package com.wrapper.spotify; import java.util.concurrent.Callable; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; public class SpotifyApiThreading { public static final ExecutorService THREADPOOL = Executors.newCachedThreadPool(); - public static <T> Future<T> executeAsync(final Callable<T> callable) { - return SpotifyApiThreading.THREADPOOL.submit(callable); + public static <T> CompletableFuture<T> executeAsync(final Callable<T> callable) { + CompletableFuture<T> future = new CompletableFuture<>(); + + SpotifyApiThreading.THREADPOOL.execute(() -> { + try { + future.complete(callable.call()); + } catch (Exception e) { + future.completeExceptionally(e); + } + }); + + return future; } }
Updated SpotifyApiThreading.executeAsync to return a CompletableFuture.
thelinmichael_spotify-web-api-java
train
bb43d43310f01998e2aa9ddb397bdb1f37a3086e
diff --git a/test/test-usemin.js b/test/test-usemin.js index <HASH>..<HASH> 100644 --- a/test/test-usemin.js +++ b/test/test-usemin.js @@ -285,11 +285,11 @@ describe('usemin', function () { grunt.file.copy(path.join(__dirname, 'fixtures/usemin.html'), 'index.html'); grunt.task.run('usemin'); grunt.task.start(); + grunt.filerev = null; var changed = grunt.file.read('index.html'); // Check replace has performed its duty assert.ok(changed.match('<img src="images/test.2134.png">')); - grunt.filerev = null; }); });
Fixes test to ensure a clean state of test fails
yeoman_grunt-usemin
train
a20f7f780ae9d2894f2af75bf5bc83ba503cae13
diff --git a/tensorflow_datasets/image/imagewang.py b/tensorflow_datasets/image/imagewang.py index <HASH>..<HASH> 100644 --- a/tensorflow_datasets/image/imagewang.py +++ b/tensorflow_datasets/image/imagewang.py @@ -66,7 +66,7 @@ class ImagewangConfig(tfds.core.BuilderConfig): def __init__(self, size, **kwargs): super(ImagewangConfig, self).__init__( - version=tfds.core.Version("0.1.0"), **kwargs) + version=tfds.core.Version("2.0.0"), **kwargs) self.size = size @@ -83,8 +83,8 @@ def _make_builder_configs(): class Imagewang(tfds.core.GeneratorBasedBuilder): """ Imagewang contains Imagenette and Imagewoof combined. """ - - VERSION = tfds.core.Version("0.1.0") + + VERSION = tfds.core.Version("2.0.0") BUILDER_CONFIGS = _make_builder_configs() def _info(self): @@ -107,14 +107,12 @@ class Imagewang(tfds.core.GeneratorBasedBuilder): if size in _SIZES: size_str = "" if size == "full-size" else "-" + size[:-2] url = os.path.join(_URL_PREFIX, "imagewang%s.tgz" % size_str) - print(url) path = dl_manager.download_and_extract(url) train_path = os.path.join(path, _SIZE_TO_DIRNAME[size], "train") val_path = os.path.join(path, _SIZE_TO_DIRNAME[size], "val") else: - raise ValueError("Size not implemented!") + raise ValueError("size must be one of %s" % _SIZES) - print(train_path, 'train_path') return [ tfds.core.SplitGenerator( name=tfds.Split.TRAIN,
clean up for PR#<I>
tensorflow_datasets
train
55d79b9389cdbe07f942ab6e4fde6ab42f36a8ff
diff --git a/js/base/Exchange.js b/js/base/Exchange.js index <HASH>..<HASH> 100644 --- a/js/base/Exchange.js +++ b/js/base/Exchange.js @@ -2089,13 +2089,4 @@ module.exports = class Exchange { params = this.omit (params, 'type'); return [ type, params ]; } - - timestampWithinXMonths (timestamp, months) { - const sinceDate = new Date (timestamp); - const xMonthsAgo = new Date (); - xMonthsAgo.setMonth (xMonthsAgo.getMonth () - months); - xMonthsAgo.setHours (0, 0, 0); - return sinceDate > xMonthsAgo; - } - } diff --git a/php/Exchange.php b/php/Exchange.php index <HASH>..<HASH> 100644 --- a/php/Exchange.php +++ b/php/Exchange.php @@ -3537,13 +3537,4 @@ class Exchange { $params = $this->omit($params, $type); return array($type, $params); } - - public function timestamp_within_x_months($timestamp, $months) { - $utc = new DateTimeZone("UTC"); - $since_date = (new DateTime('@'.$timestamp / 1000))->setTimezone($utc); - $x_months_ago = new DateTime('today midnight', $utc); - $x_months_ago->modify('-'.$months.' months'); - return $since_date > $x_months_ago; - } - } diff --git a/python/ccxt/base/exchange.py b/python/ccxt/base/exchange.py index <HASH>..<HASH> 100644 --- a/python/ccxt/base/exchange.py +++ b/python/ccxt/base/exchange.py @@ -2658,14 +2658,3 @@ class Exchange(object): type = self.safe_string(params, 'type', market_type) params = self.omit(params, 'type') return [type, params] - - def timestamp_within_x_months(self, timestamp, months): - since_date = datetime.datetime.utcfromtimestamp(timestamp / 1000) - now = datetime.datetime.now() - x_months_ago = datetime.datetime( - year=now.year, - month=now.month - months, - day=now.day, - hour=0, - ) - return since_date > x_months_ago
Removed base.Exchange timestampWithinXMonths method
ccxt_ccxt
train
93a8bf95787bab022ef84f4b790206fafb0df4d2
diff --git a/src/FrontendModule/NewsModule.php b/src/FrontendModule/NewsModule.php index <HASH>..<HASH> 100644 --- a/src/FrontendModule/NewsModule.php +++ b/src/FrontendModule/NewsModule.php @@ -49,6 +49,11 @@ abstract class NewsModule extends ModuleNews protected $manager; /** + * @var PageModel|null + */ + protected $targetPage; + + /** * Display a wildcard in the back end. * * @return string @@ -261,20 +266,18 @@ abstract class NewsModule extends ModuleNews */ protected function getTargetPage() { - static $page; - - if (null === $page) { + if (null === $this->targetPage) { if ($this->jumpTo > 0 && (int) $GLOBALS['objPage']->id !== (int) $this->jumpTo && null !== ($target = PageModel::findPublishedById($this->jumpTo)) ) { - $page = $target; + $this->targetPage = $target; } else { - $page = $GLOBALS['objPage']; + $this->targetPage = $GLOBALS['objPage']; } } - return $page; + return $this->targetPage; } /**
Fix the target page when there are multiple news category modules on one page (closes #<I>)
codefog_contao-news_categories
train
d18e31d5b0ee82092f322c39bc53812733320e5d
diff --git a/container/test-impl-base/src/main/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGenerator.java b/container/test-impl-base/src/main/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGenerator.java index <HASH>..<HASH> 100644 --- a/container/test-impl-base/src/main/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGenerator.java +++ b/container/test-impl-base/src/main/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGenerator.java @@ -19,6 +19,8 @@ package org.jboss.arquillian.container.test.impl.client.deployment; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.logging.Logger; @@ -61,7 +63,9 @@ public class AnnotationDeploymentScenarioGenerator implements DeploymentScenario validate(deploymentMethod); deployments.add(generateDeployment(deploymentMethod)); } - + + sortByDeploymentOrder(deployments); + return deployments; } @@ -177,4 +181,15 @@ public class AnnotationDeploymentScenarioGenerator implements DeploymentScenario throw new RuntimeException("Could not invoke deployment method: " + deploymentMethod, e); } } + + private void sortByDeploymentOrder(List<DeploymentDescription> deploymentDescriptions) { + // sort them by order + Collections.sort(deploymentDescriptions, new Comparator<DeploymentDescription>() + { + public int compare(DeploymentDescription d1, DeploymentDescription d2) + { + return new Integer(d1.getOrder()).compareTo(d2.getOrder()); + } + }); + } } diff --git a/container/test-impl-base/src/test/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGeneratorTestCase.java b/container/test-impl-base/src/test/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGeneratorTestCase.java index <HASH>..<HASH> 100644 --- a/container/test-impl-base/src/test/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGeneratorTestCase.java +++ b/container/test-impl-base/src/test/java/org/jboss/arquillian/container/test/impl/client/deployment/AnnotationDeploymentScenarioGeneratorTestCase.java @@ -163,6 +163,27 @@ public class AnnotationDeploymentScenarioGeneratorTestCase } @Test + public void shouldSortDeploymentsByOrder() throws Exception + { + List<DeploymentDescription> scenario = generate(MultiDeploymentsInReverseOrder.class); + + Assert.assertNotNull(scenario); + Assert.assertEquals( + "Verify all deployments were found", + 3, scenario.size()); + + Assert.assertTrue( + "Deployments are not sorted by order", + scenario.get(0).getOrder() < scenario.get(1).getOrder() + ); + Assert.assertTrue( + "Deployments are not sorted by order", + scenario.get(1).getOrder() < scenario.get(2).getOrder() + ); + } + + + @Test public void shouldReadExpectedAndOverrideDeployment() { List<DeploymentDescription> scenario = generate(ExpectedDeploymentExceptionSet.class); @@ -295,6 +316,28 @@ public class AnnotationDeploymentScenarioGeneratorTestCase } @SuppressWarnings("unused") + private static class MultiDeploymentsInReverseOrder + { + @Deployment(name = "second", order = 2) + public static Archive<?> deploymentOne() + { + return ShrinkWrap.create(JavaArchive.class); + } + + @Deployment(name = "third", order = 3) + public static Archive<?> deploymentThree() + { + return ShrinkWrap.create(JavaArchive.class); + } + + @Deployment(name = "first", order = 1) + public static Archive<?> deploymentTwo() + { + return ShrinkWrap.create(JavaArchive.class); + } + } + + @SuppressWarnings("unused") private static class ExpectedDeploymentExceptionSet { @Deployment(name = "second", testable = true) // testable should be overwritten by @Expected
ARQ-<I> Sort deployments in order so that extensions are called in deployment order
arquillian_arquillian-core
train
e4bb9a832051cb69adefeee217279ee290fbbede
diff --git a/src/Unosquare.Tubular/Javascript/tubular/services/translate.spec.js b/src/Unosquare.Tubular/Javascript/tubular/services/translate.spec.js index <HASH>..<HASH> 100644 --- a/src/Unosquare.Tubular/Javascript/tubular/services/translate.spec.js +++ b/src/Unosquare.Tubular/Javascript/tubular/services/translate.spec.js @@ -16,5 +16,34 @@ describe('Module: tubular.services', function () { it('should be defined', function () { expect(tubularTranslate).toBeDefined(); }); + + it('should have default and current language to be "en"', function () { + expect(tubularTranslate.currentLanguage && tubular.defaultLanguage).toBe('en'); + }); + + it('should have translation tables to have same keys', function () { + tubularTranslate.translationTable.forEach(value, key); + }); + + it('should change the language', function () { + tubularTranslate.setLanguage('es'); + expect(tubularTranslate.currentLanguage).toBe('es'); + + }); + + it('should translate', function () { + tubularTranslate.setLanguage('es'); + var result = tubularTranslate.translate('OP_EQUALS'); + expect(result).toBe('Equals'); + + }); + + it('should add new translation', function () { + + tubularTranslate.addTranslation('it', 'OP_STARTSWITH', 'Inizia con'); + expect(translationTable[2]).toBe('it'); + + }); + }); }); \ No newline at end of file
Adding some test cases to translate.spec
unosquare_tubular
train
1741ed15787163bb6650a38fff086812a03e5872
diff --git a/src/adapters/common.js b/src/adapters/common.js index <HASH>..<HASH> 100644 --- a/src/adapters/common.js +++ b/src/adapters/common.js @@ -10,6 +10,8 @@ import { sanitizedRaw, type DirtyRaw } from '../RawRecord' export type DirtyFindResult = RecordId | ?DirtyRaw export type DirtyQueryResult = Array<RecordId | DirtyRaw> +const shouldLogAdapterTimes = false + export function validateAdapter(adapter: DatabaseAdapter): void { if (process.env.NODE_ENV !== 'production') { const { schema, migrations } = adapter @@ -69,7 +71,7 @@ export async function devLogFind( table: string, ): Promise<CachedFindResult> { const [data, time] = await devMeasureTimeAsync(executeBlock) - logger.log(`[DB] Found ${table}#${id} in ${time}ms`) + shouldLogAdapterTimes && logger.log(`[DB] Found ${table}#${id} in ${time}ms`) return data } @@ -78,7 +80,8 @@ export async function devLogQuery( query: SerializedQuery, ): Promise<CachedQueryResult> { const [dirtyRecords, time] = await devMeasureTimeAsync(executeBlock) - logger.log(`[DB] Loaded ${dirtyRecords.length} ${query.table} in ${time}ms`) + shouldLogAdapterTimes && + logger.log(`[DB] Loaded ${dirtyRecords.length} ${query.table} in ${time}ms`) return dirtyRecords } @@ -87,7 +90,7 @@ export async function devLogCount( query: SerializedQuery, ): Promise<number> { const [count, time] = await devMeasureTimeAsync(executeBlock) - logger.log(`[DB] Counted ${count} ${query.table} in ${time}ms`) + shouldLogAdapterTimes && logger.log(`[DB] Counted ${count} ${query.table} in ${time}ms`) return count } @@ -100,7 +103,8 @@ export async function devLogBatch<T>( } const [, time] = await devMeasureTimeAsync(executeBlock) const [type, table] = operations[0] - logger.log( - `[DB] Executed batch of ${operations.length} operations (first: ${type} on ${table}) in ${time}ms`, - ) + shouldLogAdapterTimes && + logger.log( + `[DB] Executed batch of ${operations.length} operations (first: ${type} on ${table}) in ${time}ms`, + ) } diff --git a/src/observation/simpleObserver/index.js b/src/observation/simpleObserver/index.js index <HASH>..<HASH> 100644 --- a/src/observation/simpleObserver/index.js +++ b/src/observation/simpleObserver/index.js @@ -61,7 +61,7 @@ export default function simpleObserver<Record: Model>( // Note: it would be cleaner to do defer->switchMap, but that makes profiles really hard to read // hence the mutability return Observable.create(observer => { - logger.log(`Subscribing to changes in a ${query.table} query`) + // logger.log(`Subscribing to changes in a ${query.table} query`) const matcher: Matcher<Record> = encodeMatcher(query.description) let unsubscribed = false @@ -93,7 +93,7 @@ export default function simpleObserver<Record: Model>( return () => { unsubscribed = true subscription && subscription.unsubscribe() - logger.log(`Unsubscribed from changes in a ${query.table} query`) + // logger.log(`Unsubscribed from changes in a ${query.table} query`) } }) }
Disable most logging because it's annoying and the perf measurements are totally wrong
Nozbe_WatermelonDB
train
004e1d303fd7f14fb68678d9377f06e70b49107a
diff --git a/server/server.go b/server/server.go index <HASH>..<HASH> 100644 --- a/server/server.go +++ b/server/server.go @@ -196,12 +196,12 @@ func (s *Server) restore(ctx context.Context) []string { // Go through all the pods and check if it can be restored. If an error occurs, delete the pod and any containers // associated with it. Release the pod and container names as well. - for sbID, metadata := range pods { + for sbID := range pods { sb, err := s.LoadSandbox(ctx, sbID) if err == nil { continue } - log.Warnf(ctx, "could not restore sandbox %s container %s: %v", metadata.PodID, sbID, err) + log.Warnf(ctx, "could not restore sandbox %s; deleting it and containers underneath it: %v", sbID, err) for _, n := range names[sbID] { if err := s.Store().DeleteContainer(n); err != nil && err != storageTypes.ErrNotAContainer { log.Warnf(ctx, "unable to delete container %s: %v", n, err) @@ -214,17 +214,20 @@ func (s *Server) restore(ctx context.Context) []string { } } // Go through the containers and delete any container that was under the deleted pod - log.Warnf(ctx, "deleting all containers under sandbox %s since it could not be restored", sbID) for k, v := range podContainers { - if v.PodID == sbID { - for _, n := range names[k] { - if err := s.Store().DeleteContainer(n); err != nil && err != storageTypes.ErrNotAContainer { - log.Warnf(ctx, "unable to delete container %s: %v", n, err) - } - // Release the container name for future use - s.ReleaseContainerName(n) + if v.PodID != sbID { + continue + } + for _, n := range names[k] { + if err := s.Store().DeleteContainer(n); err != nil && err != storageTypes.ErrNotAContainer { + log.Warnf(ctx, "unable to delete container %s: %v", n, err) } + // Release the container name for future use + s.ReleaseContainerName(n) } + // Remove the container from the list of podContainers, or else we'll retry the delete later, + // causing a useless debug message. + delete(podContainers, k) } // Add the pod id to the list of deletedPods, to be able to call CNI DEL on the sandbox network. // Unfortunately, if we weren't able to restore a sandbox, then there's little that can be done
server: reduce log verbosity on restore
cri-o_cri-o
train
8e863cef4cadc29bdf64fbacfabaff8d597ff040
diff --git a/huff0/decompress_test.go b/huff0/decompress_test.go index <HASH>..<HASH> 100644 --- a/huff0/decompress_test.go +++ b/huff0/decompress_test.go @@ -421,30 +421,43 @@ func BenchmarkDecompress1XNoTable(b *testing.B) { continue } b.Run(test.name, func(b *testing.B) { - var s = &Scratch{} - s.Reuse = ReusePolicyNone - buf0, err := test.fn() - if err != nil { - b.Fatal(err) - } - if len(buf0) > BlockSizeMax { - buf0 = buf0[:BlockSizeMax] - } - compressed, _, err := Compress1X(buf0, s) - if err != test.err1X { - b.Fatal("unexpected error:", err) - } - s.Out = nil - s, remain, _ := ReadTable(compressed, s) - s.Decompress1X(remain) - b.ResetTimer() - b.ReportAllocs() - b.SetBytes(int64(len(buf0))) - for i := 0; i < b.N; i++ { - _, err = s.Decompress1X(remain) - if err != nil { - b.Fatal(err) - } + for _, sz := range []int{1e2, 1e4, BlockSizeMax} { + b.Run(fmt.Sprintf("%d", sz), func(b *testing.B) { + var s = &Scratch{} + s.Reuse = ReusePolicyNone + buf0, err := test.fn() + if err != nil { + b.Fatal(err) + } + for len(buf0) < sz { + buf0 = append(buf0, buf0...) + } + if len(buf0) > sz { + buf0 = buf0[:sz] + } + compressed, _, err := Compress1X(buf0, s) + if err != test.err1X { + if err == ErrUseRLE { + b.Skip("RLE") + return + } + b.Fatal("unexpected error:", err) + } + s.Out = nil + s, remain, _ := ReadTable(compressed, s) + s.Decompress1X(remain) + b.ResetTimer() + b.ReportAllocs() + b.SetBytes(int64(len(buf0))) + for i := 0; i < b.N; i++ { + _, err = s.Decompress1X(remain) + if err != nil { + b.Fatal(err) + } + } + b.ReportMetric(float64(s.actualTableLog), "log") + b.ReportMetric(100*float64(len(compressed))/float64(len(buf0)), "pct") + }) } }) } @@ -457,30 +470,44 @@ func BenchmarkDecompress4XNoTable(b *testing.B) { continue } b.Run(test.name, func(b *testing.B) { - var s = &Scratch{} - s.Reuse = ReusePolicyNone - buf0, err := test.fn() - if err != nil { - b.Fatal(err) - } - if len(buf0) > BlockSizeMax { - buf0 = buf0[:BlockSizeMax] - } - compressed, _, err := Compress4X(buf0, s) - if err != test.err1X { - b.Fatal("unexpected error:", err) - } - s.Out = nil - s, remain, _ := ReadTable(compressed, s) - s.Decompress4X(remain, len(buf0)) - b.ResetTimer() - b.ReportAllocs() - b.SetBytes(int64(len(buf0))) - for i := 0; i < b.N; i++ { - _, err = s.Decompress4X(remain, len(buf0)) - if err != nil { - b.Fatal(err) - } + for _, sz := range []int{1e2, 1e4, BlockSizeMax} { + b.Run(fmt.Sprintf("%d", sz), func(b *testing.B) { + var s = &Scratch{} + s.Reuse = ReusePolicyNone + buf0, err := test.fn() + if err != nil { + b.Fatal(err) + } + for len(buf0) < sz { + buf0 = append(buf0, buf0...) + } + if len(buf0) > sz { + buf0 = buf0[:sz] + } + compressed, _, err := Compress4X(buf0, s) + if err != test.err4X { + if err == ErrUseRLE { + b.Skip("RLE") + return + } + b.Fatal("unexpected error:", err) + } + s.Out = nil + s, remain, _ := ReadTable(compressed, s) + s.Decompress4X(remain, len(buf0)) + b.ResetTimer() + b.ReportAllocs() + b.SetBytes(int64(len(buf0))) + for i := 0; i < b.N; i++ { + _, err = s.Decompress4X(remain, len(buf0)) + if err != nil { + b.Fatal(err) + } + } + b.ReportMetric(float64(s.actualTableLog), "log") + b.ReportMetric(100*float64(len(compressed))/float64(len(buf0)), "pct") + + }) } }) }
tests: Extend huff0 NoTable benchmarks. (#<I>)
klauspost_compress
train
5ead15b07597e9cdb887cfe6aa74de4a14140bb1
diff --git a/actionpack/lib/action_dispatch/routing.rb b/actionpack/lib/action_dispatch/routing.rb index <HASH>..<HASH> 100644 --- a/actionpack/lib/action_dispatch/routing.rb +++ b/actionpack/lib/action_dispatch/routing.rb @@ -2,31 +2,11 @@ require 'active_support/core_ext/object/to_param' require 'active_support/core_ext/regexp' module ActionDispatch - # = Routing - # # The routing module provides URL rewriting in native Ruby. It's a way to # redirect incoming requests to controllers and actions. This replaces - # mod_rewrite rules. Best of all, Rails' Routing works with any web server. + # mod_rewrite rules. Best of all, Rails' \Routing works with any web server. # Routes are defined in <tt>config/routes.rb</tt>. # - # Consider the following route, which you will find commented out at the - # bottom of your generated <tt>config/routes.rb</tt>: - # - # match ':controller(/:action(/:id(.:format)))' - # - # This route states that it expects requests to consist of a - # <tt>:controller</tt> followed optionally by an <tt>:action</tt> that in - # turn is followed optionally by an <tt>:id</tt>, which in turn is followed - # optionally by a <tt>:format</tt> - # - # Suppose you get an incoming request for <tt>/blog/edit/22</tt>, you'll end - # up with: - # - # params = { :controller => 'blog', - # :action => 'edit', - # :id => '22' - # } - # # Think of creating routes as drawing a map for your requests. The map tells # them where to go based on some predefined pattern: # @@ -43,6 +23,39 @@ module ActionDispatch # # Other names simply map to a parameter as in the case of <tt>:id</tt>. # + # == Resources + # + # Resource routing allows you to quickly declare all of the common routes + # for a given resourceful controller. Instead of declaring separate routes + # for your +index+, +show+, +new+, +edit+, +create+, +update+ and +destroy+ + # actions, a resourceful route declares them in a single line of code: + # + # resources :photos + # + # Sometimes, you have a resource that clients always look up without + # referencing an ID. A common example, /profile always shows the profile of + # the currently logged in user. In this case, you can use a singular resource + # to map /profile (rather than /profile/:id) to the show action. + # + # resource :profile + # + # It's common to have resources that are logically children of other + # resources: + # + # resources :magazines do + # resources :ads + # end + # + # You may wish to organize groups of controllers under a namespace. Most + # commonly, you might group a number of administrative controllers under + # an +admin+ namespace. You would place these controllers under the + # app/controllers/admin directory, and you can group them together in your + # router: + # + # namespace "admin" do + # resources :posts, :comments + # end + # # == Named routes # # Routes can be named by passing an <tt>:as</tt> option, @@ -131,6 +144,30 @@ module ActionDispatch # Encoding regular expression modifiers are silently ignored. The # match will always use the default encoding or ASCII. # + # == Default route + # + # Consider the following route, which you will find commented out at the + # bottom of your generated <tt>config/routes.rb</tt>: + # + # match ':controller(/:action(/:id(.:format)))' + # + # This route states that it expects requests to consist of a + # <tt>:controller</tt> followed optionally by an <tt>:action</tt> that in + # turn is followed optionally by an <tt>:id</tt>, which in turn is followed + # optionally by a <tt>:format</tt>. + # + # Suppose you get an incoming request for <tt>/blog/edit/22</tt>, you'll end + # up with: + # + # params = { :controller => 'blog', + # :action => 'edit', + # :id => '22' + # } + # + # By not relying on default routes, you improve the security of your + # application since not all controller actions, which includes actions you + # might add at a later time, are exposed by default. + # # == HTTP Methods # # Using the <tt>:via</tt> option when specifying a route allows you to restrict it to a specific HTTP method. @@ -160,6 +197,20 @@ module ActionDispatch # however if your route needs to respond to more than one HTTP method (or all methods) then using the # <tt>:via</tt> option on <tt>match</tt> is preferable. # + # == External redirects + # + # You can redirect any path to another path using the redirect helper in your router: + # + # match "/stories" => redirect("/posts") + # + # == Routing to Rack Applications + # + # Instead of a String, like <tt>posts#index</tt>, which corresponds to the + # index action in the PostsController, you can specify any Rack application + # as the endpoint for a matcher: + # + # match "/application.js" => Sprockets + # # == Reloading routes # # You can reload routes if you feel you must: @@ -208,7 +259,9 @@ module ActionDispatch # # == View a list of all your routes # - # Run <tt>rake routes</tt>. + # rake routes + # + # Target specific controllers by prefixing the command with <tt>CONTROLLER=x</tt>. # module Routing autoload :DeprecatedMapper, 'action_dispatch/routing/deprecated_mapper'
Rework the routing documentation. Move the default route to the bottom, as this practise should be discouraged. Add documentation for resources, external redirects and Rack applications.
rails_rails
train
4c28d87fd0180ded8656b8e30b96d4790ed1793c
diff --git a/graphenecommon/transactionbuilder.py b/graphenecommon/transactionbuilder.py index <HASH>..<HASH> 100644 --- a/graphenecommon/transactionbuilder.py +++ b/graphenecommon/transactionbuilder.py @@ -265,6 +265,7 @@ class TransactionBuilder(dict, AbstractBlockchainInstanceProvider): auth_account = self.account_class( authority[0], blockchain_instance=self.blockchain ) + required_treshold = auth_account[perm]["weight_threshold"] r.extend( self._fetchkeys(auth_account, perm, level + 1, required_treshold) )
Nested accounts may come with a different required_threshold
xeroc_python-graphenelib
train
459aefd5fc2e4b07932d2cad94bdcddfc05e9cb8
diff --git a/etc/shinken-specific.cfg b/etc/shinken-specific.cfg index <HASH>..<HASH> 100644 --- a/etc/shinken-specific.cfg +++ b/etc/shinken-specific.cfg @@ -363,12 +363,7 @@ define receiver{ # The main goal of the receiver is to load # Passive modules, like the NSCA one -# modules NSCA - - #optionnal -# manage_sub_realms 1 ; optionnal, like for poller -# manage_arbiters 1 ; optionnal : take data from Arbiter. There should be -# ;only one broker for the arbiter + #modules NSCA timeout 3 ; 'ping' timeout data_timeout 120 ; 'data send' timeout diff --git a/shinken/daemons/receiverdaemon.py b/shinken/daemons/receiverdaemon.py index <HASH>..<HASH> 100644 --- a/shinken/daemons/receiverdaemon.py +++ b/shinken/daemons/receiverdaemon.py @@ -186,6 +186,7 @@ class Receiver(BaseSatellite): def do_loop_turn(self): + print "." # Begin to clean modules self.check_and_del_zombie_modules() diff --git a/shinken/modules/nsca.py b/shinken/modules/nsca.py index <HASH>..<HASH> 100644 --- a/shinken/modules/nsca.py +++ b/shinken/modules/nsca.py @@ -161,7 +161,6 @@ class NSCA_arbiter(BaseModule): while not self.interrupted: inputready,outputready,exceptready = select.select(input,[],[], 1) - for s in inputready: if s == server: # handle the server socket
Clean : receiver sample conf.
Alignak-monitoring_alignak
train
981efaa7071bd436a1b55078456bc44420435970
diff --git a/src/reap/commands.py b/src/reap/commands.py index <HASH>..<HASH> 100644 --- a/src/reap/commands.py +++ b/src/reap/commands.py @@ -32,6 +32,7 @@ def login(args): request.add_header('Content-Type', 'application/json') request.add_header('Accept', 'application/json') request.add_header('Authorization', 'Basic ' + auth) + request.add_header('User-Agent', 'reap') try: response = urlopen(request) set_password(base_uri, args.username, password) diff --git a/src/reap/support.py b/src/reap/support.py index <HASH>..<HASH> 100644 --- a/src/reap/support.py +++ b/src/reap/support.py @@ -57,6 +57,7 @@ def get_request(path): request.add_header('Content-Type', 'application/json') request.add_header('Accept', 'application/json') request.add_header('Authorization', 'Basic ' + auth) + request.add_header('User-Agent', 'reap') return request else: print 'Login first!'
Added User-Agent header to all requests.
jakebasile_reap
train
4a34e4037223d45642b2481bdfd3ca41155c034f
diff --git a/src/main/java/org/codehaus/plexus/archiver/zip/AbstractZipArchiver.java b/src/main/java/org/codehaus/plexus/archiver/zip/AbstractZipArchiver.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/codehaus/plexus/archiver/zip/AbstractZipArchiver.java +++ b/src/main/java/org/codehaus/plexus/archiver/zip/AbstractZipArchiver.java @@ -475,6 +475,7 @@ public abstract class AbstractZipArchiver extends AbstractArchiver { continue; } + if ( entry.getFile().isDirectory() && !name.endsWith("/") ) { name = name + "/"; @@ -503,15 +504,20 @@ public abstract class AbstractZipArchiver extends AbstractArchiver if ( !doFilesonly ) { Stack directories = new Stack(); - int slashPos = entry.length(); + + // Don't include the last entry itself if it's + // a dir; it will be added on its own. + int slashPos = entry.length() - ( entry.endsWith( "/" ) ? 1 : 0 ); while ( ( slashPos = entry.lastIndexOf( '/', slashPos - 1 ) ) != -1 ) { String dir = entry.substring( 0, slashPos + 1 ); - if ( addedDirs.get( prefix + dir ) != null) + + if ( addedDirs.contains( prefix + dir ) ) { break; } + directories.push( dir ); } diff --git a/src/test/java/org/codehaus/plexus/archiver/zip/ZipArchiverTest.java b/src/test/java/org/codehaus/plexus/archiver/zip/ZipArchiverTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/codehaus/plexus/archiver/zip/ZipArchiverTest.java +++ b/src/test/java/org/codehaus/plexus/archiver/zip/ZipArchiverTest.java @@ -48,6 +48,13 @@ public class ZipArchiverTest archiver.setDefaultFileMode( 0640 ); archiver.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), "one.txt" ); archiver.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), "two.txt", 0664 ); + archiver.setDefaultDirectoryMode( 0777 ); + + // reset default file mode for files included from now on + archiver.setDefaultFileMode( 0400 ); + archiver.addDirectory( getTestFile( "src/test/resources/manifests/" ), "worldwritable" ); + archiver.setDefaultDirectoryMode( 0070 ); + archiver.addDirectory( getTestFile( "src/test/resources/manifests/" ), "groupwritable" ); archiver.setDestFile( getTestFile( "target/output/archive.zip" ) ); archiver.createArchive(); @@ -59,7 +66,18 @@ public class ZipArchiverTest ZipEntry ze = (ZipEntry) e.nextElement(); if ( ze.isDirectory() ) { - assertEquals( 0500, UnixStat.PERM_MASK & ze.getUnixMode() ); + if ( ze.getName().equals( "worldwritable") ) + { + assertEquals( 0777, UnixStat.PERM_MASK & ze.getUnixMode() ); + } + if ( ze.getName().equals( "groupwritable") ) + { + assertEquals( 0070, UnixStat.PERM_MASK & ze.getUnixMode() ); + } + else + { + //assertEquals( 0500, UnixStat.PERM_MASK & ze.getUnixMode() ); + } } else {
Fixed 'addParentDirs' in ZipArchiver; it added the actual directory itself too, which messed with the file permissions if they were set for that specific directory. Enhanced JUnit test for this case (setting perms on dirs). TODO: enhance TarArchiver JUnit test too (although it works fine).
sonatype_plexus-archiver
train
8d778ef12153e95f8a5985075f646be0e08eba83
diff --git a/ELiDE/statgrid.py b/ELiDE/statgrid.py index <HASH>..<HASH> 100644 --- a/ELiDE/statgrid.py +++ b/ELiDE/statgrid.py @@ -180,7 +180,22 @@ class StatListView(ListView, MirrorMapping): selected_remote=self.setter('remote') ) + def init_control_config(self, key): + if key not in self.control: + self.set_control(key, 'readout') + if key not in self.config: + cfgd = dict(self.config) + cfgd[key] = default_cfg + self.remote['_config'] = cfgd + else: + cfgd = dict(self.config) + for option in default_cfg: + if option not in cfgd[key]: + cfgd[key][option] = default_cfg[option] + self.remote['_config'] = cfgd + def set_value(self, k, v): + self.init_control_config(k) self.layout.set_remote_value(self.remote, k, v) def get_adapter(self): @@ -199,20 +214,6 @@ class StatListView(ListView, MirrorMapping): allow_empty_selection=True ) - def init_control_config(self, key): - if key not in self.control: - self.set_control(key, 'readout') - if key not in self.config: - cfgd = dict(self.config) - cfgd[key] = default_cfg - self.remote['_config'] = cfgd - else: - cfgd = dict(self.config) - for option in default_cfg: - if option not in cfgd[key]: - cfgd[key][option] = default_cfg[option] - self.remote['_config'] = cfgd - def set_control(self, key, control): if '_control' not in self.mirror: ctrld = {key: control} @@ -235,7 +236,6 @@ class StatListView(ListView, MirrorMapping): self.remote['_config'][key] = newcfg def get_cls_dicts(self, key, value): - self.init_control_config(key) control_type = self.control.get(key, 'readout') cfg = self.config.get(key, default_cfg) keydict = { @@ -425,7 +425,6 @@ class StatListViewConfigurator(StatListView): ) def get_cls_dicts(self, key, value): - self.init_control_config(key) control_type = self.control.get(key, 'readout') cfg = self.config.get(key, default_cfg) deldict = {
Put init_control_config in a more logical place.
LogicalDash_LiSE
train
a02d2874c0d6e70ba7efb1e91ae8a4a48ac4ae09
diff --git a/lib/eye/process/commands.rb b/lib/eye/process/commands.rb index <HASH>..<HASH> 100644 --- a/lib/eye/process/commands.rb +++ b/lib/eye/process/commands.rb @@ -135,7 +135,7 @@ private sleep self[:stop_grace].to_f - # if process not die here, by default we kill it force + # if process not die here, by default we force kill it if process_realy_running? warn "process not die after TERM and stop_grace #{self[:stop_grace].to_f}s, so send KILL" send_signal(:KILL) diff --git a/lib/eye/process/system.rb b/lib/eye/process/system.rb index <HASH>..<HASH> 100644 --- a/lib/eye/process/system.rb +++ b/lib/eye/process/system.rb @@ -49,12 +49,13 @@ module Eye::Process::System end def send_signal(code) - info "send signal #{code} to #{self.pid}" - res = Eye::System.send_signal(self.pid, code) - error(res[:message]) if res[:status] != :ok - res[:status] == :ok + msg = "send_signal #{code} to #{self.pid}" + msg += ", error<#{res[:error]}>" if res[:error] + info msg + + res[:result] == :ok end # non blocking actor timeout diff --git a/lib/eye/system.rb b/lib/eye/system.rb index <HASH>..<HASH> 100644 --- a/lib/eye/system.rb +++ b/lib/eye/system.rb @@ -13,12 +13,9 @@ module Eye::System else false end + {:result => res} - rescue Errno::ESRCH => ex - {:error => ex} - rescue Errno::EPERM => ex - {:error => ex} - rescue Exception => ex + rescue => ex {:error => ex} end @@ -40,17 +37,15 @@ module Eye::System end code = code.to_s.upcase if code.is_a?(String) || code.is_a?(Symbol) - ::Process.kill(code, pid) if pid - {:status => :ok} - - rescue Errno::ESRCH - {:status => :error, :message => 'process not found'} - - rescue Errno::EPERM - {:status => :error, :message => 'wrong permissions to kill'} + if pid + ::Process.kill(code, pid) + {:result => :ok} + else + {:error => Exception.new('no_pid')} + end - rescue => e - {:status => :error, :message => "failed signal #{code}: #{e.message}"} + rescue => ex + {:error => ex} end # Daemonize cmd, and detach diff --git a/spec/process/system_spec.rb b/spec/process/system_spec.rb index <HASH>..<HASH> 100644 --- a/spec/process/system_spec.rb +++ b/spec/process/system_spec.rb @@ -73,12 +73,12 @@ describe "Eye::Process::System" do end it "send_signal ok" do - mock(Eye::System).send_signal(@process.pid, :TERM){ {:status => :ok} } + mock(Eye::System).send_signal(@process.pid, :TERM){ {:result => :ok} } @process.send_signal(:TERM).should == true end it "send_signal not ok" do - mock(Eye::System).send_signal(@process.pid, :TERM){ {:status => :error} } + mock(Eye::System).send_signal(@process.pid, :TERM){ {:error => Exception.new('bla')} } @process.send_signal(:TERM).should == false end diff --git a/spec/system_spec.rb b/spec/system_spec.rb index <HASH>..<HASH> 100644 --- a/spec/system_spec.rb +++ b/spec/system_spec.rb @@ -93,13 +93,13 @@ describe "Eye::System" do describe "send signal" do it "send_signal to spec" do - Eye::System.send_signal($$, 0)[:status].should == :ok + Eye::System.send_signal($$, 0)[:result].should == :ok end it "send_signal to unexisted process" do res = Eye::System.send_signal(-12234) - res[:status].should == :error - res[:message].should include("not found") + res[:status].should == nil + res[:error].message.should include("No such process") end it "send_signal to daemon" do @@ -108,7 +108,7 @@ describe "Eye::System" do # process should be alive Eye::System.pid_alive?(@pid).should == true - Eye::System.send_signal(@pid, :term)[:status].should == :ok + Eye::System.send_signal(@pid, :term)[:result].should == :ok sleep 0.2 Eye::System.pid_alive?(@pid).should == false @@ -120,7 +120,7 @@ describe "Eye::System" do sleep 4 - Eye::System.send_signal(@pid, :usr1)[:status].should == :ok + Eye::System.send_signal(@pid, :usr1)[:result].should == :ok sleep 0.5
refactor system send_signal
kostya_eye
train
02659fa9828a816c4f000adf7a48e537be5af02b
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -177,7 +177,9 @@ function getPieceList (files, pieceLength, cb) { function onFiles (files, opts, cb) { var announceList = opts.announceList !== undefined ? opts.announceList - : module.exports.announceList // default + : opts.announce !== undefined + ? opts.announce.map(function (u) { return [ u ] }) + : module.exports.announceList // default var torrent = { info: {
support `announce` key
webtorrent_create-torrent
train
5b4d28d57892e1d7bf0db897e528de003cda635a
diff --git a/electionnight/views/states.py b/electionnight/views/states.py index <HASH>..<HASH> 100644 --- a/electionnight/views/states.py +++ b/electionnight/views/states.py @@ -8,6 +8,7 @@ import json from django.shortcuts import get_object_or_404 from django.urls import reverse +from almanac.models import ElectionEvent from election.models import ElectionDay from electionnight.conf import settings from electionnight.models import PageContent @@ -70,6 +71,9 @@ class StatePage(BaseView): return template def get_nav_links(self, subpath=''): + todays_events = ElectionEvent.objects.filter( + election_day__date=self.election_date + ).values_list('division__label', flat=True) state_level = DivisionLevel.objects.get(name=DivisionLevel.STATE) # All states except DC states = Division.objects.filter( @@ -89,7 +93,7 @@ class StatePage(BaseView): state.slug ), 'name': state.label, - 'live': state.has_elections(self.election_date) + 'live': state.label in todays_events } for state in states ], }
rewrite live query to use almanac, not division
The-Politico_politico-civic-election-night
train
2d96c1a4ba6c4599dbaf4b500f05e9f8ae344162
diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -933,6 +933,37 @@ class SuperORCAHess(unittest.TestCase): [ 1.49837000e+01], [ 1.61995000e+01]]) + polders = np.matrix([[ -5.94341300e+00, 2.97188100e+00, 2.97161800e+00, + -2.47000000e-04, 0.00000000e+00, 0.00000000e+00], + [ -1.68000000e-04, -4.20303600e+00, 4.20264600e+00, + 2.96567900e+00, 0.00000000e+00, -0.00000000e+00], + [ -3.00000000e-06, -5.00000000e-06, 2.00000000e-06, + 2.00000000e-06, 2.96532200e+00, 4.19346600e+00], + [ 9.07849800e+00, 2.17638000e-01, 2.17719000e-01, + -5.10000000e-05, 1.00000000e-06, 0.00000000e+00], + [ 2.23000000e-04, 6.46863000e-01, -6.46912000e-01, + 1.99675500e+00, -0.00000000e+00, 1.00000000e-06], + [ 6.00000000e-06, -7.00000000e-06, 1.00000000e-06, + 3.00000000e-06, 1.99668100e+00, -6.40485000e-01], + [ -1.04399600e+00, -1.44161300e+00, -6.85449000e-01, + 2.58596400e+00, 0.00000000e+00, -1.00000000e-06], + [ 9.05935000e-01, 8.07488700e+00, -1.13410000e-02, + -2.03614200e+00, -1.00000000e-06, -2.00000000e-06], + [ -2.30000000e-05, 1.00000000e-06, -3.00000000e-06, + -2.00000000e-06, -1.26977800e+00, 1.66877300e+00], + [ -1.04451200e+00, -8.74492000e-01, -1.25246500e+00, + -1.29282500e+00, -2.23936600e+00, -3.30511000e-01], + [ -4.53132000e-01, -2.25978900e+00, -1.77146400e+00, + -1.46218500e+00, -3.30621000e-01, -2.47206900e+00], + [ -7.84977000e-01, -1.01697600e+00, -5.96557900e+00, + -3.30327000e-01, -1.84400900e+00, -2.60853300e+00], + [ -1.04451700e+00, -8.74493000e-01, -1.25246600e+00, + -1.29282600e+00, 2.23935900e+00, 3.30504000e-01], + [ -4.53141000e-01, -2.25978900e+00, -1.77145900e+00, + -1.46217900e+00, 3.30622000e-01, 2.47207600e+00], + [ 7.84977000e-01, 1.01696600e+00, 5.96557600e+00, + 3.30327000e-01, -1.84401000e+00, -2.60853100e+00]]) + #=== Defining ways to break the .hess ===# class names(object): @@ -945,12 +976,13 @@ class SuperORCAHess(unittest.TestCase): modes = 'modes' dipders = 'dipders' ir = 'ir' + polders = 'polders' suffix_dim2 = '_dim2' suffix_badfreq = '_badfreq' E = frozenset([hess, geom, atsym, energy, temp, freqs, modes, \ - dipders, ir]) + dipders, ir, polders]) suffixes = frozenset([suffix_dim2, suffix_badfreq])
tests: Added polders known-good matrix & name code Still have to write the test suite for the polarizability derivatives, though.
bskinn_opan
train
b47dd9292a44061615540f64bd1b76c063ed6f43
diff --git a/faq-bundle/CHANGELOG.md b/faq-bundle/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/faq-bundle/CHANGELOG.md +++ b/faq-bundle/CHANGELOG.md @@ -1,5 +1,9 @@ # Contao FAQ bundle change log +### 4.2.0-RC1 (2016-05-XX) + + * Add listeners to subscribe to the core hooks (see #2). + ### 4.2.0-beta1 (2016-04-25) * Add an SVG icon and adjust to the new back end theme (see contao/core#4608). diff --git a/faq-bundle/src/DependencyInjection/ContaoFaqExtension.php b/faq-bundle/src/DependencyInjection/ContaoFaqExtension.php index <HASH>..<HASH> 100644 --- a/faq-bundle/src/DependencyInjection/ContaoFaqExtension.php +++ b/faq-bundle/src/DependencyInjection/ContaoFaqExtension.php @@ -29,7 +29,7 @@ class ContaoFaqExtension extends Extension { $loader = new YamlFileLoader( $container, - new FileLocator(__DIR__ . '/../Resources/config') + new FileLocator(__DIR__.'/../Resources/config') ); $loader->load('listener.yml'); diff --git a/faq-bundle/src/EventListener/FileMetaInformationListener.php b/faq-bundle/src/EventListener/FileMetaInformationListener.php index <HASH>..<HASH> 100644 --- a/faq-bundle/src/EventListener/FileMetaInformationListener.php +++ b/faq-bundle/src/EventListener/FileMetaInformationListener.php @@ -57,7 +57,7 @@ class FileMetaInformationListener } /** - * Fetches result from database. + * Fetches the result from the database. * * @param string $query * @param mixed $params @@ -69,7 +69,7 @@ class FileMetaInformationListener $this->framework->initialize(); /** @var Database $database */ - $database = $this->framework->getAdapter('Contao\Database')->getInstance(); + $database = $this->framework->createInstance('Contao\Database'); return $database->prepare($query)->execute($params); } diff --git a/faq-bundle/src/EventListener/InsertTagsListener.php b/faq-bundle/src/EventListener/InsertTagsListener.php index <HASH>..<HASH> 100644 --- a/faq-bundle/src/EventListener/InsertTagsListener.php +++ b/faq-bundle/src/EventListener/InsertTagsListener.php @@ -48,7 +48,7 @@ class InsertTagsListener public function onReplaceInsertTags($tag) { $elements = explode('::', $tag); - $key = strtolower($elements[0]); + $key = strtolower($elements[0]); if (!in_array($key, ['faq', 'faq_open', 'faq_url', 'faq_title'], true)) { return false; @@ -107,6 +107,6 @@ class InsertTagsListener /** @var Config $config */ $config = $this->framework->getAdapter('Contao\Config'); - return $jumpTo->getFrontendUrl(($config->get('useAutoItem') ? '/' : '/items/') . ($faq->alias ?: $faq->id)); + return $jumpTo->getFrontendUrl(($config->get('useAutoItem') ? '/' : '/items/').($faq->alias ?: $faq->id)); } }
[Faq] Fix the coding style.
contao_contao
train
f596bab9677a3447cb7a149603ddde92729fd0f7
diff --git a/executor/opensubmit/executor/__init__.py b/executor/opensubmit/executor/__init__.py index <HASH>..<HASH> 100755 --- a/executor/opensubmit/executor/__init__.py +++ b/executor/opensubmit/executor/__init__.py @@ -248,7 +248,7 @@ def _fetch_job(config): headers = result.info() if headers["Action"] == "get_config": # The server does not know us, so it demands registration before hand. - logger.info("Machine unknown on server, perform registration first.") + logger.info("Machine unknown on server, perform 'opensubmit-exec configure' first.") return [None]*5 submid = headers["SubmissionFileId"] action = headers["Action"] @@ -282,6 +282,9 @@ def _unpack_job(config, fname, submid, action): Returns on success, or terminates the executor after notifying the OpenSubmit server about the problem. + If decompression fails, we leave the file as it is. This is intended to support + uncompressed file submission, e.g. for reports that are validity-scanned. + Returns None on error, or path were the compressed data now lives ''' basepath = config.get("Execution","directory") @@ -291,10 +294,10 @@ def _unpack_job(config, fname, submid, action): except Exception as e: logger.error("ERROR could not create temp dir: " + str(e)) return None - + if not finalpath.endswith(os.sep): finalpath += os.sep - + if zipfile.is_zipfile(fname): logger.debug("Valid ZIP file") try: @@ -312,15 +315,9 @@ def _unpack_job(config, fname, submid, action): tar.extractall(finalpath) os.remove(fname) except Exception as e: - logger.error("ERROR extracting TAR: " + str(e)) + logger.error("ERROR extracting TAR: " + str(e)) else: - try: - os.remove(fname) - except Exception as e: - logger.error("ERROR could not remove: " + str(e)) - _send_result(config, "This is not a valid compressed file.",-1, submid, action) - _cleanup_files(config, finalpath) - return None + return finalpath dircontent = os.listdir(finalpath) logger.debug("Content after decompression: "+str(dircontent)) if len(dircontent) == 0:
Accept non-archive files on executor side
troeger_opensubmit
train
ea22520573671ad2a5467f8b5c68672befba10d5
diff --git a/src/Type/MessageType.php b/src/Type/MessageType.php index <HASH>..<HASH> 100644 --- a/src/Type/MessageType.php +++ b/src/Type/MessageType.php @@ -131,9 +131,7 @@ class MessageType extends ItemType * * @since Exchange 2007 * - * @var string - * - * @see \jamesiarmes\PhpEws\Enumeration\SensitivityChoicesType + * @var \jamesiarmes\PhpEws\Type\SingleRecipientType */ public $Sender;
Documentation of "Sender" corrected
jamesiarmes_php-ews
train
580cc5677c0ff2a8e76ec91b54cdd27d6d0f9e45
diff --git a/src/Assets/Package/Cluster.php b/src/Assets/Package/Cluster.php index <HASH>..<HASH> 100755 --- a/src/Assets/Package/Cluster.php +++ b/src/Assets/Package/Cluster.php @@ -439,7 +439,9 @@ class Cluster extends AbstractAssetsPackage $this->setName($package->getPrettyName()); $package_dir = $main_package ? '' : str_replace( - DirectoryHelper::slashDirname($this->getRootDirectory()) . DirectoryHelper::slashDirname($this->getAssetsDirectory()), + DirectoryHelper::slashDirname($this->getRootDirectory()) . + DirectoryHelper::slashDirname($this->getAssetsDirectory()) . + DirectoryHelper::slashDirname($this->getAssetsVendorDirectory()), '', $installer->getInstallPath($package) );
Correction on the relative path of clusters
atelierspierrot_templatengine
train
79f02ec2a1a3299464112e141bdccd8316169679
diff --git a/docs/javascript/samples/compute/RESTHopper/DelaunayMesh/app.js b/docs/javascript/samples/compute/RESTHopper/DelaunayMesh/app.js index <HASH>..<HASH> 100644 --- a/docs/javascript/samples/compute/RESTHopper/DelaunayMesh/app.js +++ b/docs/javascript/samples/compute/RESTHopper/DelaunayMesh/app.js @@ -72,7 +72,7 @@ function compute(){ RhinoCompute.computeFetch("grasshopper", args).then(result => { console.log(result); - let data = JSON.parse(result.values[0].Values[0][0].data); + let data = JSON.parse(result.values[0].InnerTree['{ 0; }'][0].data); let mesh = rhino.CommonObject.decode(data); let material = new THREE.MeshBasicMaterial( {wireframe: true, color: 0x00ff00 } ); @@ -90,15 +90,17 @@ var scene, camera, renderer, controls; function init(){ scene = new THREE.Scene(); scene.background = new THREE.Color(1,1,1); - camera = new THREE.PerspectiveCamera( 75, window.innerWidth/window.innerHeight, 1, 10000 ); - controls = new THREE.OrbitControls( camera ); + camera = new THREE.PerspectiveCamera( 45, window.innerWidth/window.innerHeight, 1, 1000 ); + renderer = new THREE.WebGLRenderer({antialias: true}); renderer.setPixelRatio( window.devicePixelRatio ); renderer.setSize( window.innerWidth, window.innerHeight ); document.body.appendChild( renderer.domElement ); - camera.position.z = 50; + controls = new THREE.OrbitControls( camera, renderer.domElement ); + + camera.position.z = 300; window.addEventListener( 'resize', onWindowResize, false );
Tweaked DMesh example to get it working again
mcneel_rhino3dm
train
41cd81310c65cf1958b039d92615ddcc126c3744
diff --git a/src/parser/Scanner.js b/src/parser/Scanner.js index <HASH>..<HASH> 100644 --- a/src/parser/Scanner.js +++ b/src/parser/Scanner.js @@ -4,7 +4,7 @@ function Scanner(source) { let _lastIndex = source.length - 1; let _currentIndex = -1; let _source = source; - + let _makeChar = (c, line, col, idx) => { return { c: c, @@ -13,37 +13,35 @@ function Scanner(source) { index: idx || _currentIndex }; }; - + this.get = function get() { - ++_currentIndex; - - - if (_currentIndex > 0 && _source[_currentIndex - 1] == "\n") { - ++_line; + _currentIndex += 1; + + if (_currentIndex > 0 && _source[_currentIndex - 1] === '\n') { + _line += 1; _col = -1; } - ++_col; - - + _col += 1; + let char = null; if (_currentIndex <= _lastIndex) { char = _source[_currentIndex]; } - + return _makeChar(char); - } - + }; + this.lookahead = function lookahead() { let lookaheadIdx = _currentIndex + 1; let lookaheadLine = _line; let lookaheadCol = _col; - - if (lookaheadIdx > 0 && _source[lookaheadIdx - 1] == "\n") { - ++lookaheadLine; + + if (lookaheadIdx > 0 && _source[lookaheadIdx - 1] === '\n') { + lookaheadLine += 1; lookaheadCol = -1; } - ++lookaheadCol; - + lookaheadCol += 1; + let char = null; if (lookaheadIdx <= _lastIndex) { char = _source[lookaheadIdx];
lint fixes for Scanner
lps-js_lps.js
train
352d99292defaee6b704084d2cea45b8836a90fd
diff --git a/tests/OtpTest.php b/tests/OtpTest.php index <HASH>..<HASH> 100644 --- a/tests/OtpTest.php +++ b/tests/OtpTest.php @@ -120,4 +120,45 @@ class OtpTest extends \PHPUnit_Framework_TestCase $this->Otp->hotp($this->secret, 'a'); } + /** + * Tests Otp->checkHotpResync() + */ + public function testHotpResync() + { + $secret = $this->secret; + + // test default counter window + $this->assertEquals(0, $this->Otp->checkHotpResync($secret, 0, '755224')); + $this->assertEquals(1, $this->Otp->checkHotpResync($secret, 0, '287082')); + $this->assertEquals(2, $this->Otp->checkHotpResync($secret, 0, '359152')); + + // test provided counter window + $this->assertEquals(3, $this->Otp->checkHotpResync($secret, 0, '969429', 3)); + $this->assertEquals(4, $this->Otp->checkHotpResync($secret, 0, '338314', 4)); + $this->assertEquals(5, $this->Otp->checkHotpResync($secret, 0, '254676', 5)); + + // test failures + $this->assertFalse($this->Otp->checkHotpResync($secret, 7, '287922')); + $this->assertFalse($this->Otp->checkHotpResync($secret, 8, '162583')); + $this->assertFalse($this->Otp->checkHotpResync($secret, 9, '399871')); + } + + /** + * @expectedException InvalidArgumentException + * @expectedExceptionMessage Counter must be integer + */ + public function testHotpResyncInvalidCounter() + { + $this->Otp->checkHotpResync($this->secret, 'a', '755224'); + } + + /** + * @expectedException InvalidArgumentException + * @expectedExceptionMessage Invalid counterwindow supplied + */ + public function testHotpResyncInvalidCounterWindow() + { + $this->Otp->checkHotpResync($this->secret, 0, '755224', 'a'); + } + }
Add Unit Tests for HotpResync
ChristianRiesen_otp
train
a7b0bc608e0baba765d385aea0cd7c245dd922ac
diff --git a/graylog2-server/src/main/java/org/graylog2/periodical/IndexerClusterCheckerThread.java b/graylog2-server/src/main/java/org/graylog2/periodical/IndexerClusterCheckerThread.java index <HASH>..<HASH> 100644 --- a/graylog2-server/src/main/java/org/graylog2/periodical/IndexerClusterCheckerThread.java +++ b/graylog2-server/src/main/java/org/graylog2/periodical/IndexerClusterCheckerThread.java @@ -49,8 +49,15 @@ public class IndexerClusterCheckerThread extends Periodical { @Override public void doRun() { - if (!notificationService.isFirst(Notification.Type.ES_OPEN_FILES)) + if (!notificationService.isFirst(Notification.Type.ES_OPEN_FILES)) { return; + } + + if(null == indexer.cluster()) { + LOG.info("Indexer not fully initialized yet. Skipping periodic cluster check."); + return; + } + boolean allHigher = true; for (NodeInfo node : indexer.cluster().getDataNodes()) { // Check number of maximum open files. @@ -70,6 +77,7 @@ public class IndexerClusterCheckerThread extends Periodical { allHigher = false; } } + if (allHigher) { Notification notification = notificationService.build().addType(Notification.Type.ES_OPEN_FILES); notificationService.fixed(notification);
Don't run periodic cluster check if ES client hasn't been initialized completely, fixes #<I>
Graylog2_graylog2-server
train
783e1ffe6931ee749b05f29bd82848c716ef5fdf
diff --git a/nobot/widgets.py b/nobot/widgets.py index <HASH>..<HASH> 100644 --- a/nobot/widgets.py +++ b/nobot/widgets.py @@ -7,8 +7,8 @@ from .client import ReCaptchaClient, HumanCaptchaClient class ReCaptchaWidget(forms.widgets.Widget): client_class = ReCaptchaClient - def __init__(self, attrs={}, *args, **kwargs): - self.js_attrs = attrs + def __init__(self, attrs=None, *args, **kwargs): + self.js_attrs = {} if attrs is None else attrs self.client = self.client_class() # TODO: move to client?
Fix mutable attrs, fixes #1
EnTeQuAk_nobot
train
9cef26c8a360be9b4a10cecd43f092a128f5599a
diff --git a/src/DoctrineModule/Validator/UniqueObject.php b/src/DoctrineModule/Validator/UniqueObject.php index <HASH>..<HASH> 100644 --- a/src/DoctrineModule/Validator/UniqueObject.php +++ b/src/DoctrineModule/Validator/UniqueObject.php @@ -161,7 +161,7 @@ class UniqueObject extends ObjectExists $result = array(); foreach ($this->getIdentifiers() as $identifierField) { - if (!isset($context[$identifierField])) { + if (!array_key_exists($identifierField, $context)) { throw new Exception\RuntimeException(\sprintf('Expected context to contain %s', $identifierField)); }
Changed isset for array_key_exists in context check
doctrine_DoctrineModule
train
94eb23196b1d3f5f4c4886098e5846082d5fa93f
diff --git a/lib/libnmap.js b/lib/libnmap.js index <HASH>..<HASH> 100644 --- a/lib/libnmap.js +++ b/lib/libnmap.js @@ -93,7 +93,7 @@ var version = 'v0.2.24' opts.flags = [ '-oX -', '-sn', - '--disable-arp-ping' + '-PR' ]; }
Applied fix for IPv6 & IPv4 discover mode by enabling ARP ping for IPv4 & Neighbor Discovery (RFC <I>) for IPv6
jas-_node-libnmap
train
1d79066826557ef3efcea9b1d3bb47134a489c20
diff --git a/includes/diffop/diff/Diff.php b/includes/diffop/diff/Diff.php index <HASH>..<HASH> 100644 --- a/includes/diffop/diff/Diff.php +++ b/includes/diffop/diff/Diff.php @@ -178,7 +178,20 @@ class Diff extends \GenericArrayObject implements IDiff { $serializationData = parent::unserialize( $serialization ); $this->typePointers = $serializationData['typePointers']; - $this->isAssociative = $serializationData['assoc'] === 'n' ? null : $serializationData['assoc'] === 't'; + + if ( array_key_exists( 'assoc', $serializationData ) ) { + $this->isAssociative = $serializationData['assoc'] === 'n' ? null : $serializationData['assoc'] === 't'; + } // The below cases are compat with < 0.4. + elseif ( $this instanceof MapDiff ) { + $this->isAssociative = true; + } + elseif ( $this instanceof ListDiff ) { + $this->isAssociative = false; + } + else { + $this->isAssociative = null; + } + return $serializationData; }
Added compatibility with Diff < <I> in Diff::unserialize Change-Id: I<I>d9fc<I>eb1f3ab<I>effb<I>f<I>c<I>eb
wmde_Diff
train
87baee8998fea50b72b2faaec13f16ede9bf17b8
diff --git a/tests/pyexample/example/example.py b/tests/pyexample/example/example.py index <HASH>..<HASH> 100644 --- a/tests/pyexample/example/example.py +++ b/tests/pyexample/example/example.py @@ -38,3 +38,28 @@ class Foo(object): We parse naively on commas, so the nested dictionary will throw this off """ return True + + def method_sphinx_docs(self, foo, bar=0): + """This method is documented with sphinx style docstrings. + + :param foo: The first argument. + :type foo: int + + :param int bar: The second argument. + + :returns: The sum of foo and bar. + :rtype: int + """ + return foo + bar + + def method_google_docs(self, foo, bar=0): + """This method is documented with google style docstrings. + + Args: + foo (int): The first argument. + bar (int): The second argument. + + Returns: + int: The sum of foo and bar. + """ + return foo + bar diff --git a/tests/test_integration.py b/tests/test_integration.py index <HASH>..<HASH> 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -7,12 +7,14 @@ import unittest from contextlib import contextmanager from mock import patch +import pytest +import sphinx from sphinx.application import Sphinx @contextmanager -def sphinx_build(test_dir): +def sphinx_build(test_dir, confoverrides=None): os.chdir('tests/{0}'.format(test_dir)) try: app = Sphinx( @@ -21,6 +23,7 @@ def sphinx_build(test_dir): outdir='_build/text', doctreedir='_build/.doctrees', buildername='text', + confoverrides=confoverrides, ) app.build(force_all=True) yield @@ -104,6 +107,53 @@ class PythonTests(LanguageIntegrationTests): index_file ) + @pytest.mark.skipif(sphinx.version_info < (1, 4), + reason="Cannot override extensions in Sphinx 1.3") + def test_napoleon_integration(self): + with sphinx_build('pyexample'): + example_path = '_build/text/autoapi/example/index.txt' + with io.open(example_path, encoding='utf8') as example_handle: + example_file = example_handle.read() + + # Check that docstrings are not transformed without napoleon loaded + self.assertIn( + 'Args', + example_file + ) + + self.assertIn( + 'Returns', + example_file + ) + + confoverrides={ + 'extensions': [ + 'autoapi.extension', + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + ], + } + + with sphinx_build('pyexample', confoverrides=confoverrides): + example_path = '_build/text/autoapi/example/index.txt' + with io.open(example_path, encoding='utf8') as example_handle: + example_file = example_handle.read() + + self.assertIn( + 'Parameters', + example_file + ) + + self.assertIn( + 'Return type', + example_file + ) + + self.assertNotIn( + 'Args', + example_file + ) + class DotNetTests(LanguageIntegrationTests):
Added integration tests for napoleon conversions
rtfd_sphinx-autoapi
train
bf2e4adc4975bfe2b366318b5f5bb523b4f5d31d
diff --git a/tests/test_general.py b/tests/test_general.py index <HASH>..<HASH> 100644 --- a/tests/test_general.py +++ b/tests/test_general.py @@ -1,10 +1,18 @@ """ Tests for general matchers. """ +import platform + +from taipan.testing import skipIf, skipUnless + +from callee._compat import IS_PY3 import callee.general as __unit__ from tests import MatcherTestCase +IS_PYPY3 = IS_PY3 and platform.python_implementation() == 'PyPy' + + class Any(MatcherTestCase): test_none = lambda self: self.assert_match(None) test_zero = lambda self: self.assert_match(0) @@ -121,7 +129,15 @@ class Function(MatcherTestCase): pass self.assert_match(func) - test_method = lambda self: self.assert_no_match(str.upper) + @skipIf(IS_PYPY3, "requires non-PyPy3 interpreter") + def test_method__non_pypy3(self): + self.assert_no_match(str.upper) + # TODO: accept unbound methods as functions + + @skipUnless(IS_PYPY3, "requires PyPy3") + def test_method__pypy3(self): + self.assert_match(str.upper) + test_type = lambda self: self.assert_no_match(object) def test_callable_object(self):
Skip a test that fails on pypy3
Xion_callee
train
d10ef64876174b80d1a9b8bae4c1be22c27a2312
diff --git a/src/Drupal/DrupalExtension/Context/DrupalContext.php b/src/Drupal/DrupalExtension/Context/DrupalContext.php index <HASH>..<HASH> 100644 --- a/src/Drupal/DrupalExtension/Context/DrupalContext.php +++ b/src/Drupal/DrupalExtension/Context/DrupalContext.php @@ -1027,4 +1027,25 @@ class DrupalContext extends MinkContext implements DrupalAwareInterface { /** * @} End of defgroup "drupal extensions" */ + /** + * @defgroup "debugging steps" + * @{ + */ + + /** + * Pauses the scenario until the user presses a key. Useful when debugging a scenario. + * + * @Then /^(?:|I )break$/ + */ + public function iPutABreakpoint() + { + fwrite(STDOUT, "\033[s \033[93m[Breakpoint] Press \033[1;93m[RETURN]\033[0;93m to continue...\033[0m"); + while (fgets(STDIN, 1024) == '') {} + fwrite(STDOUT, "\033[u"); + return; + } + + /** + * @} End of defgroup "debugging steps" + */ }
Issue #<I> by eliza<I>: Added debugging breakpoint.
jhedstrom_drupalextension
train
3337d2811975fc24ea7836ca9b82ec72e3a4dd59
diff --git a/ptypes/duration.go b/ptypes/duration.go index <HASH>..<HASH> 100644 --- a/ptypes/duration.go +++ b/ptypes/duration.go @@ -82,7 +82,7 @@ func Duration(p *durpb.Duration) (time.Duration, error) { return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) } if p.Nanos != 0 { - d += time.Duration(p.Nanos) + d += time.Duration(p.Nanos) * time.Nanosecond if (d < 0) != (p.Nanos < 0) { return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) }
ptypes: Avoid assuming time.Duration is nanoseconds (#<I>) It is spec'd that way, but best practice is to avoid it.
golang_protobuf
train
9d123cd0ecc9959eca08c88f2acc68d5b2018bcb
diff --git a/src/Graviton/I18nBundle/Command/LoadMissingCommand.php b/src/Graviton/I18nBundle/Command/LoadMissingCommand.php index <HASH>..<HASH> 100644 --- a/src/Graviton/I18nBundle/Command/LoadMissingCommand.php +++ b/src/Graviton/I18nBundle/Command/LoadMissingCommand.php @@ -34,7 +34,7 @@ class LoadMissingCommand extends ContainerAwareCommand protected function configure() { $this - ->setName('graviton:i118n:load:missing') + ->setName('graviton:i18n:load:missing') ->setDescription('Generate translatables for strings in en.'); }
Fix typo in name of command I recently stumbled upon this and it's still itching. I don't think anyone is actually using the command so this change should not have any impacts.
libgraviton_graviton
train
39b98c6379611c7f871be6e7d5f5484a26084492
diff --git a/api/v1/cmd/example-scheduler/app/app.go b/api/v1/cmd/example-scheduler/app/app.go index <HASH>..<HASH> 100644 --- a/api/v1/cmd/example-scheduler/app/app.go +++ b/api/v1/cmd/example-scheduler/app/app.go @@ -53,7 +53,7 @@ func buildControllerConfig(state *internalState, shutdown <-chan struct{}) contr var ( frameworkIDStore = store.NewInMemorySingleton() controlContext = &controller.ContextAdapter{ - DoneFunc: state.isDone, + DoneFunc: state.done.Closed, FrameworkIDFunc: frameworkIDStore.Get, ErrorFunc: func(err error) { if err != nil { @@ -61,7 +61,7 @@ func buildControllerConfig(state *internalState, shutdown <-chan struct{}) contr log.Println(err) } if _, ok := err.(StateError); ok { - state.markDone() + state.done.Close() } return } @@ -257,7 +257,7 @@ func statusUpdate(state *internalState, s mesos.TaskStatus) { if state.tasksFinished == state.totalTasks { log.Println("mission accomplished, terminating") - state.markDone() + state.done.Close() } else { tryReviveOffers(state) } @@ -268,7 +268,7 @@ func statusUpdate(state *internalState, s mesos.TaskStatus) { " with reason " + s.GetReason().String() + " from source " + s.GetSource().String() + " with message '" + s.GetMessage() + "'") - state.markDone() + state.done.Close() } } diff --git a/api/v1/cmd/example-scheduler/app/state.go b/api/v1/cmd/example-scheduler/app/state.go index <HASH>..<HASH> 100644 --- a/api/v1/cmd/example-scheduler/app/state.go +++ b/api/v1/cmd/example-scheduler/app/state.go @@ -8,12 +8,12 @@ import ( "math/rand" "net/http" "os" - "sync" "time" proto "github.com/gogo/protobuf/proto" "github.com/mesos/mesos-go/api/v1/lib" "github.com/mesos/mesos-go/api/v1/lib/backoff" + "github.com/mesos/mesos-go/api/v1/lib/extras/latch" "github.com/mesos/mesos-go/api/v1/lib/httpcli" "github.com/mesos/mesos-go/api/v1/lib/httpcli/httpsched" "github.com/mesos/mesos-go/api/v1/lib/scheduler/calls" @@ -209,26 +209,11 @@ func newInternalState(cfg Config) (*internalState, error) { metricsAPI: metricsAPI, cli: buildHTTPSched(cfg, creds), random: rand.New(rand.NewSource(time.Now().Unix())), - done: make(chan struct{}), + done: latch.New(), } return state, nil } -func (state *internalState) markDone() { - state.doneOnce.Do(func() { - close(state.done) - }) -} - -func (state *internalState) isDone() bool { - select { - case <-state.done: - return true - default: - return false - } -} - type internalState struct { tasksLaunched int tasksFinished int @@ -242,7 +227,6 @@ type internalState struct { reviveTokens <-chan struct{} metricsAPI *metricsAPI err error - done chan struct{} - doneOnce sync.Once + done latch.Interface random *rand.Rand } diff --git a/api/v1/cmd/msh/msh.go b/api/v1/cmd/msh/msh.go index <HASH>..<HASH> 100644 --- a/api/v1/cmd/msh/msh.go +++ b/api/v1/cmd/msh/msh.go @@ -45,7 +45,7 @@ var ( frameworkIDStore store.Singleton shouldDecline bool - refuseSeconds = calls.RefuseSeconds(8 * time.Hour) + refuseSeconds = calls.RefuseSeconds(5 * time.Second) stop func() exitCode int wantsResources mesos.Resources @@ -94,7 +94,7 @@ func main() { func buildControllerConfig(user string) controller.Config { var ( - done = new(latch.L).Reset() + done = latch.New() caller = calls.Decorators{ calls.SubscribedCaller(frameworkIDStore.Get), }.Apply(buildClient()) diff --git a/api/v1/lib/extras/latch/latch.go b/api/v1/lib/extras/latch/latch.go index <HASH>..<HASH> 100644 --- a/api/v1/lib/extras/latch/latch.go +++ b/api/v1/lib/extras/latch/latch.go @@ -2,12 +2,22 @@ package latch import "sync" +type Interface interface { + Done() <-chan struct{} + Close() + Closed() bool +} + // Latch is Closed by default and should be Reset() in order to be useful. type L struct { sync.Once line chan struct{} } +func New() Interface { + return new(L).Reset() +} + func (l *L) Done() <-chan struct{} { return l.line } func (l *L) Close() {
latch: API cleanup; examples: use new latch API; msh: better default for refuseSeconds
mesos_mesos-go
train
d48979465f47dd486d8fa9caa2bd870baed352a2
diff --git a/lib/express-handlebars.js b/lib/express-handlebars.js index <HASH>..<HASH> 100644 --- a/lib/express-handlebars.js +++ b/lib/express-handlebars.js @@ -26,7 +26,7 @@ function ExpressHandlebars(config) { extname : '.handlebars', layoutsDir : undefined, // Default layouts directory is relative to `express settings.view` + `layouts/` partialsDir : undefined, // Default partials directory is relative to `express settings.view` + `partials/` - defaultLayout : undefined, + defaultLayout : 'default', helpers : undefined, compilerOptions: undefined, }, config);
Update express-handlebars.js
ericf_express-handlebars
train
74d721845e41283fbc74b6dc0bb9387d605d15bd
diff --git a/core/src/main/java/hudson/model/Run.java b/core/src/main/java/hudson/model/Run.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/model/Run.java +++ b/core/src/main/java/hudson/model/Run.java @@ -171,6 +171,15 @@ public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,Run protected transient final long timestamp; /** + * When the build has started running. + * + * For historical reasons, 0 means no value is recorded. + * + * @see #getStartTime() + */ + private long startTime; + + /** * The build result. * This value may change while the state is in {@link State#BUILDING}. */ @@ -520,6 +529,8 @@ public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,Run /** * When the build is scheduled. + * + * @see #getStartTimeInMillis() */ @Exported public Calendar getTimestamp() { @@ -542,6 +553,19 @@ public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,Run return timestamp; } + /** + * When the build has started running in an executor. + * + * For example, if a build is scheduled 1pm, and stayed in the queue for 1 hour (say, no idle slaves), + * then this method returns 2pm, which is the time the job moved from the queue to the building state. + * + * @see #getTimestamp() + */ + public final long getStartTimeInMillis() { + if (startTime==0) return timestamp; // fallback: approximate by the queuing time + return startTime; + } + @Exported public String getDescription() { return description; @@ -1620,6 +1644,7 @@ public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,Run */ protected void onStartBuilding() { state = State.BUILDING; + startTime = System.currentTimeMillis(); if (runner!=null) RunnerStack.INSTANCE.push(runner); }
keep track of the time when the build has actually started
jenkinsci_jenkins
train
646ae61c055fc26eff5cce2e45c02075eb6f6ce2
diff --git a/integration/integration_test.go b/integration/integration_test.go index <HASH>..<HASH> 100644 --- a/integration/integration_test.go +++ b/integration/integration_test.go @@ -7,6 +7,7 @@ import ( "github.com/cloudfoundry-incubator/app-manager/integration/app_manager_runner" Bbs "github.com/cloudfoundry-incubator/runtime-schema/bbs" "github.com/cloudfoundry-incubator/runtime-schema/models" + steno "github.com/cloudfoundry/gosteno" "github.com/cloudfoundry/gunk/timeprovider" "github.com/cloudfoundry/yagnats" @@ -22,7 +23,17 @@ var _ = Describe("Starting apps", func() { BeforeEach(func() { natsClient = natsRunner.MessageBus - bbs = Bbs.NewBBS(etcdRunner.Adapter(), timeprovider.NewTimeProvider()) + + logSink := steno.NewTestingSink() + + steno.Init(&steno.Config{ + Sinks: []steno.Sink{logSink}, + }) + + logger := steno.NewLogger("the-logger") + steno.EnterTestMode() + + bbs = Bbs.NewBBS(etcdRunner.Adapter(), timeprovider.NewTimeProvider(), logger) var err error var presenceStatus <-chan bool diff --git a/main.go b/main.go index <HASH>..<HASH> 100644 --- a/main.go +++ b/main.go @@ -140,5 +140,5 @@ func initializeBbs(logger *steno.Logger) Bbs.AppManagerBBS { logger.Fatalf("Error connecting to etcd: %s\n", err) } - return Bbs.NewAppManagerBBS(etcdAdapter, timeprovider.NewTimeProvider()) + return Bbs.NewAppManagerBBS(etcdAdapter, timeprovider.NewTimeProvider(), logger) }
Provide logger to bbs [#<I>]
cloudfoundry-attic_app-manager
train
0f2d9c91b5b2b4f32ca4f1298c2549c2b03bc967
diff --git a/GPflow/sgpr.py b/GPflow/sgpr.py index <HASH>..<HASH> 100644 --- a/GPflow/sgpr.py +++ b/GPflow/sgpr.py @@ -209,11 +209,12 @@ class GPRFITC(GPModel): tmp = tf.matrix_triangular_solve(tf.transpose(L), gamma, lower=False) mean = tf.matmul( tf.transpose(w), tmp ) + self.mean_function(Xnew) + intermediateA = tf.matrix_triangular_solve(L, w, lower=True) if full_cov: - raise NotImplementedError + var = self.kern.K(Xnew) - tf.matmul(tf.transpose(w), w) + tf.matmul(tf.transpose(intermediateA),intermediateA) + var = tf.tile(tf.expand_dims(var, 2), tf.pack([1,1, tf.shape(self.Y)[1]])) else: - intermediateA = tf.matrix_triangular_solve(L, w, lower=True) var = self.kern.Kdiag(Xnew) - tf.reduce_sum(tf.square(w), 0) + tf.reduce_sum(tf.square(intermediateA), 0) # size( Xnew, ) var = tf.tile(tf.expand_dims(var, 1), tf.pack([1, tf.shape(self.Y)[1]]))
Adding full covariance predictions to FITC.
GPflow_GPflow
train