hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
0a5632c529ce1f3f0c6988db7e9d9337a1377982 | diff --git a/test/tests/revwalk.js b/test/tests/revwalk.js
index <HASH>..<HASH> 100644
--- a/test/tests/revwalk.js
+++ b/test/tests/revwalk.js
@@ -9,7 +9,7 @@ describe("Revwalk", function() {
var Oid = require("../../lib/oid");
// Set a reasonable timeout here now that our repository has grown.
- this.timeout(150000);
+ this.timeout(60000);
before(function(done) {
var test = this;
@@ -99,11 +99,13 @@ describe("Revwalk", function() {
return done();
}
- for (var i = 0; i < 1000; i++) {
+ for (var i = 0; i < 500; i++) {
commit.author().name();
commit.author().email();
- global.gc();
+ if ( i % 250 === 0) {
+ global.gc();
+ }
}
});
}); | stop thrashing cpu with GCs | nodegit_nodegit | train |
2248f7540686661aa17a7a7768e5935689fb71d2 | diff --git a/lib/ruote/exp/ro_variables.rb b/lib/ruote/exp/ro_variables.rb
index <HASH>..<HASH> 100644
--- a/lib/ruote/exp/ro_variables.rb
+++ b/lib/ruote/exp/ro_variables.rb
@@ -148,14 +148,12 @@ module Ruote::Exp
Ruote.unset(h.variables, var)
end
- return unless should_persist
-
- if r = try_persist # persist failed, have to retry
+ if should_persist && r = try_persist # persist failed, have to retry
@h = r
un_set_variable(op, var, val, true)
- else # success
+ else # success (even when should_persist == false)
@context.storage.put_msg("variable_#{op}", 'var' => var, 'fei' => h.fei)
end | made sure "variable_[un]set" is always emitted | jmettraux_ruote | train |
dc9cbca9bdfbb5278aacaefe4e895be094f8eebc | diff --git a/lib/json_api_resource/version.rb b/lib/json_api_resource/version.rb
index <HASH>..<HASH> 100644
--- a/lib/json_api_resource/version.rb
+++ b/lib/json_api_resource/version.rb
@@ -1,3 +1,3 @@
module JsonApiResource
- VERSION = "0.4.5"
+ VERSION = "1.0.0"
end | revving version to 1, due to breaking interface chahnges | avvo_json_api_resource | train |
b82867be46ca5e2812169cfe97f4e9a34f1fbe2f | diff --git a/boundary/api_cli.py b/boundary/api_cli.py
index <HASH>..<HASH> 100755
--- a/boundary/api_cli.py
+++ b/boundary/api_cli.py
@@ -18,6 +18,7 @@
import argparse
import logging
import os
+import sys
import requests
import urllib
from pygments import highlight, lexers, formatters
@@ -300,7 +301,10 @@ class ApiCli(object):
self.handleResults(result)
def colorize_json(self, json):
- return highlight(json, lexers.JsonLexer(), formatters.TerminalFormatter())
+ if sys.stdout.isatty():
+ return highlight(json, lexers.JsonLexer(), formatters.TerminalFormatter())
+ else:
+ return json
def handleResults(self, result):
""" | Make colorized output conditional based whether standard out is a tty or not | boundary_pulse-api-cli | train |
531752696fef5b2c2cc5fb44efab1a633ac906c1 | diff --git a/peyotl/phylesystem/git_actions.py b/peyotl/phylesystem/git_actions.py
index <HASH>..<HASH> 100644
--- a/peyotl/phylesystem/git_actions.py
+++ b/peyotl/phylesystem/git_actions.py
@@ -153,20 +153,21 @@ class GitAction(object):
pat = re.compile(r'.*_study_{i}_[0-9]+'.format(i=study_id))
head_shas = git(self.gitdir, self.gitwd, "show-ref", "--heads")
ret = {}
- for lin in head_shas:
+ _LOG.debug('find_WIP_branches head_shas = "{}"'.format(head_shas.split('\n')))
+ for lin in head_shas.split('\n'):
try:
local_branch_split = lin.split(' refs/heads/')
if len(local_branch_split) == 2:
sha, branch = local_branch_split
- if pat.match(branch):
+ if pat.match(branch) or branch == 'master':
ret[branch] = sha
except:
- pass
+ raise
return ret
def _find_head_sha(self, frag, parent_sha):
head_shas = git(self.gitdir, self.gitwd, "show-ref", "--heads")
- for lin in head_shas:
+ for lin in head_shas.split('\n'):
#_LOG.debug("lin = '{l}'".format(l=lin))
if lin.startswith(parent_sha):
local_branch_split = lin.split(' refs/heads/')
diff --git a/test_git_workflows.py b/test_git_workflows.py
index <HASH>..<HASH> 100755
--- a/test_git_workflows.py
+++ b/test_git_workflows.py
@@ -47,6 +47,7 @@ class TestPhylesystem(unittest.TestCase):
finally:
ga.release_lock()
_LOG.debug('test sha = "{}"'.format(sha))
+ self.assertEquals(wip_map.keys(), ['master'])
acurr_obj = json.loads(curr)
zcurr_obj = copy.deepcopy(acurr_obj)
ac = acurr_obj['nexml'].get("^acount", 0)
@@ -66,6 +67,25 @@ class TestPhylesystem(unittest.TestCase):
self.assertNotEqual(v1b['sha'], v2b['sha'])
self.assertEqual(v1b['sha'], ga.get_master_sha()) # not locked!
self.assertTrue(v2b['merge_needed'])
+
+ # fetching studies (GETs in the API) should report
+ # the existence of multiple branches for this study...
+ ga.acquire_lock()
+ try:
+ t, ts, wip_map = ga.return_study(_SID)
+ finally:
+ ga.release_lock()
+ self.assertEquals(wip_map['master'], v1b['sha'])
+ self.assertEquals(wip_map['test_gh_login_study_9_0'], v2b['sha'])
+
+ # but not for other studies...
+ ga.acquire_lock()
+ try:
+ t, ts, wip_map = ga.return_study('10')
+ finally:
+ ga.release_lock()
+ self.assertEquals(wip_map['master'], v1b['sha'])
+ self.assertEquals(wip_map.keys(), ['master'])
# add a fourth commit onto commit 2. This should merge to master
ac += 1
@@ -89,6 +109,7 @@ class TestPhylesystem(unittest.TestCase):
self.assertEqual(mblob["error"], 0)
self.assertEqual(mblob["resource_id"], _SID)
+
# add a 7th commit onto commit 6. This should NOT merge to master because we don't give it the secret arg.
zc += 1
zcurr_obj['nexml']["^zcount"] = zc
@@ -96,7 +117,7 @@ class TestPhylesystem(unittest.TestCase):
self.assertNotEqual(v3b['sha'], v5b['sha'])
self.assertTrue(v5b['merge_needed'])
- # add a 7th commit onto commit 6. This should NOT merge to master because we don't give it the secret arg.
+ # add a 7th commit onto commit 6. This should merge to master because we don't give it the secret arg.
zc += 1
zcurr_obj['nexml']["^zcount"] = zc
v6b = commit_and_try_merge2master(ga, zcurr_obj, _SID, _AUTH, v5b['sha'], merged_sha=mblob['merged_sha'])
@@ -105,6 +126,14 @@ class TestPhylesystem(unittest.TestCase):
self.assertFalse(v6b['merge_needed'])
self.assertEqual(v6b['branch_name'], 'master')
+ # after the merge we should be back down to 1 branch for this study
+ ga.acquire_lock()
+ try:
+ t, ts, wip_map = ga.return_study(_SID)
+ finally:
+ ga.release_lock()
+ self.assertEquals(wip_map.keys(), ['master'])
+
if __name__ == "__main__":
unittest.main()
\ No newline at end of file | returning map of WIPs for return_study. Tested. | OpenTreeOfLife_peyotl | train |
e33abedb67fd9233c2d869fe42c80ed37952ccde | diff --git a/media/boom/js/boom.assets.js b/media/boom/js/boom.assets.js
index <HASH>..<HASH> 100755
--- a/media/boom/js/boom.assets.js
+++ b/media/boom/js/boom.assets.js
@@ -518,8 +518,6 @@ $.extend($.boom.asset, {
this.rid = rid;
- $.boom.events.register('asset.clickBefore', 'browser', { rid: rid });
-
var url = '/cms/assets/view/' + this.rid;
return $.get( url );
@@ -556,8 +554,6 @@ $.extend($.boom.asset, {
var self = this;
var rids = $.boom.history.getHash().split('/')[1].split('-');
- $.boom.events.register('asset.clickAfter', 'browser', { rid: this.rid });
-
// Make the tag editor work.
$('#b-tags').tagger({
type: 'asset',
@@ -760,7 +756,6 @@ $.extend($.boom.assets.tag, {
},
bind : function() {
- $.boom.events.register('tag.clickAfter', 'browser');
$('.b-items-thumbs .thumb').captions($.boom.config.captions);
}
diff --git a/media/boom/js/boom.core.js b/media/boom/js/boom.core.js
index <HASH>..<HASH> 100755
--- a/media/boom/js/boom.core.js
+++ b/media/boom/js/boom.core.js
@@ -468,8 +468,6 @@ $.ajaxSetup({
$.boom.loader.hide().hide('dialog');
- $.boom.events.register('page.saveError', 'sites');
-
if ( showError ) {
var errString;
diff --git a/media/boom/js/boom.page.js b/media/boom/js/boom.page.js
index <HASH>..<HASH> 100755
--- a/media/boom/js/boom.page.js
+++ b/media/boom/js/boom.page.js
@@ -229,8 +229,6 @@ $.extend($.boom, {
self.settings.bind();
- $.boom.hooks.register('page.init');
-
return this;
},
@@ -300,15 +298,6 @@ $.extend($.boom, {
});
- $.boom.hooks.register('page.saveError', 'sites', function(vars){
-
- if (window.console) {
-
- console.debug('POST data debug:', data);
- }
-
- }, true);
-
requestdata = $.extend({
data: JSON.stringify(data)
diff --git a/media/boom/js/boom.people.js b/media/boom/js/boom.people.js
index <HASH>..<HASH> 100755
--- a/media/boom/js/boom.people.js
+++ b/media/boom/js/boom.people.js
@@ -140,8 +140,6 @@ $.extend($.boom.person, {
this.rid = rid;
- $.boom.events.register('person.clickBefore', 'browser', { rid: rid });
-
var segments = [
rid
].join('/'),
@@ -155,8 +153,6 @@ $.extend($.boom.person, {
bind: function(){
var self = this;
-
- $.boom.events.register('person.clickAfter', 'browser', { rid: this.rid });
$.boom.dialog.bind({
image: $('.boom-asset-preview')
diff --git a/media/boom/js/boom.tagmanager.js b/media/boom/js/boom.tagmanager.js
index <HASH>..<HASH> 100755
--- a/media/boom/js/boom.tagmanager.js
+++ b/media/boom/js/boom.tagmanager.js
@@ -173,16 +173,6 @@ $.widget( 'boom.browser', {
$('.b-tags-tree')
.tree(editableTreeConfig);
- $.boom.hooks.register('tag.clickAfter', 'browser', function(){
-
- $( '#boom-tagmanager-sortby-select' ).change(function(event){
-
- self.options.sortby = this.value;
-
- $.boom.history.refresh();
- });
- });
-
this.route();
}, | delete references to $.boom.hooks and $.boom.events | boomcms_boom-core | train |
556ad95eaf52550cae4b2cc9ce0d35bba6cc5f1b | diff --git a/chemlab/core/attributes.py b/chemlab/core/attributes.py
index <HASH>..<HASH> 100644
--- a/chemlab/core/attributes.py
+++ b/chemlab/core/attributes.py
@@ -168,16 +168,36 @@ class NDArrayAttr(AtomicArrayAttr):
class BondsAttr(object):
def __init__(self):
self.name = 'bonds'
+ self.fieldname = 'bonds'
def from_array(self, sys, arr):
if arr == None:
- self.on_empty()
+ self.on_empty(sys)
else:
sys.bonds = arr
-
+
+ def assign(self, sys, arr):
+ getattr(sys, self.name)[:] = arr
+
def on_add_molecule(self, sys, mol):
- sys.bonds = np.concatenate((sys.bonds, mol.bonds + sys._at_counter))
+ if sys.bonds.size == 0:
+ sys.bonds = mol.bonds.copy()
+ else:
+ sys.bonds = np.concatenate((sys.bonds, mol.bonds.copy() + sys._at_counter))
+ def on_get_molecule_entry(self, sys, index):
+ bond_mask = np.zeros(sys.n_bonds, 'bool')
+ for i, (s, e) in enumerate(sys.bonds):
+ sel_ind_start = sys.mol_indices[index]
+ sel_ind_end = sel_ind_start + sys.mol_n_atoms[index]
+
+ is_start = (s >= sel_ind_start) & (s <= sel_ind_end)
+ is_end = (e >= sel_ind_start) & (e <= sel_ind_end)
+
+ bond_mask[i] = is_start and is_end
+
+ return getattr(sys, self.name)[bond_mask] - sys.mol_indices[index]
+
def on_empty(self, sys):
# No idea how many bonds
sys.bonds = np.zeros((0, 2), 'int')
@@ -239,6 +259,13 @@ class BondsAttr(object):
return new_b
+ def concatenate(self, sys, othersys):
+ # Need to add an offset when concatenating
+ if sys.bonds.size == 0:
+ return othersys.bonds + sys.n_atoms
+ else:
+ return np.concatenate((sys.bonds, othersys.bonds + sys.n_atoms))
+
class MArrayAttr(object):
def __init__(self, name, fieldname, dtype, default=None):
self.name = name | Worked out also concatenations | chemlab_chemlab | train |
5b127f887949a69dc3d401b214aad6e825e737a1 | diff --git a/resources/lang/he-IL/forms.php b/resources/lang/he-IL/forms.php
index <HASH>..<HASH> 100644
--- a/resources/lang/he-IL/forms.php
+++ b/resources/lang/he-IL/forms.php
@@ -154,6 +154,7 @@ return [
'about-this-page' => 'About this page',
'days-of-incidents' => 'ืืื ืืืื ืฉื ืืืจืืขืื ืืืจืืืช?',
'time_before_refresh' => 'Status page refresh rate (in seconds)',
+ 'major_outage_rate' => 'Major outage threshold (in %)',
'banner' => 'Banner Image',
'banner-help' => "It's recommended that you upload files no bigger than 930px wide",
'subscribers' => 'Allow people to signup to email notifications?',
@@ -177,8 +178,10 @@ return [
'incident-date-format' => 'Incident timestamp format',
],
'security' => [
- 'allowed-domains' => 'Allowed domains',
- 'allowed-domains-help' => 'Comma separated. The domain set above is automatically allowed by default.',
+ 'allowed-domains' => 'Allowed domains',
+ 'allowed-domains-help' => 'Comma separated. The domain set above is automatically allowed by default.',
+ 'always-authenticate' => 'Always authenticate',
+ 'always-authenticate-help' => 'Require login to view any Cachet page',
],
'stylesheet' => [
'custom-css' => 'Custom Stylesheet', | New translations forms.php (Hebrew) | CachetHQ_Cachet | train |
fec81da49ee125c3810bac92d472b27a544bef36 | diff --git a/core/core.js b/core/core.js
index <HASH>..<HASH> 100644
--- a/core/core.js
+++ b/core/core.js
@@ -822,6 +822,18 @@ exports._setup = function() {
_globals.core.GamepadManager.prototype._gpButtonsPollInterval
+ _globals.core.GamepadManager.prototype._gpPollInterval
+
+ _globals.core.GamepadManager.prototype._pollGamepads = function(self) {
+ clearInterval(self._gpPollInterval)
+ var gamepads = navigator.getGamepads ? navigator.getGamepads() : (navigator.webkitGetGamepads ? navigator.webkitGetGamepads : []);
+ for (var i = 0; i < gamepads.length; ++i) {
+ var gamepad = gamepads[i]
+ if (gamepad)
+ self._onGamepadConnected({ 'gamepad': gamepad })
+ }
+ }
+
_globals.core.GamepadManager.prototype._gpButtonCheckLoop = function(self) {
clearInterval(self._gpButtonsPollInterval);
var gamepads = navigator.getGamepads ? navigator.getGamepads() : (navigator.webkitGetGamepads ? navigator.webkitGetGamepads : []);
@@ -2036,19 +2048,9 @@ exports._bootstrap = function(self, name) {
break;
case 'core.GamepadManager':
- var gpPollInterval
if (!('ongamepadconnected' in window))
- gpPollInterval = setInterval(pollGamepads, 1000)
-
- function pollGamepads() {
- clearInterval(gpPollInterval)
- var gamepads = navigator.getGamepads ? navigator.getGamepads() : (navigator.webkitGetGamepads ? navigator.webkitGetGamepads : []);
- for (var i = 0; i < gamepads.length; ++i) {
- var gamepad = gamepads[i]
- if (gamepad)
- self._onGamepadConnected({ 'gamepad': gamepad })
- }
- }
+ self._gpPollInterval = setInterval(function() { self._pollGamepads(self) }, 1000)
+
window.addEventListener('gamepadconnected', self._onGamepadConnected.bind(self));
window.addEventListener('gamepaddisconnected', self._onGamepadDisconnected.bind(self));
break; | Fix warning: move gamepad polling code to considered prototype. | pureqml_qmlcore | train |
6c38dffd1792f653ff2d8bb85bf35528ed707537 | diff --git a/commands/remote.go b/commands/remote.go
index <HASH>..<HASH> 100644
--- a/commands/remote.go
+++ b/commands/remote.go
@@ -89,7 +89,6 @@ func transformRemoteArgs(args *Args) {
}
host = hostConfig.Host
- isPrivate := parseRemotePrivateFlag(args)
numWord := 0
for i, p := range args.Params {
if !looksLikeFlag(p) && (i < 1 || args.Params[i-1] != "-t") {
@@ -107,10 +106,19 @@ func transformRemoteArgs(args *Args) {
if strings.ToLower(owner) == strings.ToLower(hostConfig.User) {
owner = hostConfig.User
- isPrivate = true
}
project := github.NewProject(owner, name, host)
+ gh := github.NewClient(project.Host)
+ repo, err := gh.Repository(project)
+ if err != nil {
+ if strings.Contains(err.Error(), "HTTP 404") {
+ err = fmt.Errorf("Error: repository %s/%s doesn't exist", project.Owner, project.Name)
+ }
+ utils.Check(err)
+ }
+
+ isPrivate := repo.Private || repo.Permissions.Push || parseRemotePrivateFlag(args)
url := project.GitURL("", "", isPrivate || project.Host != github.GitHubHost)
args.AppendParams(url)
} | Add repo visibility and permission check for remote add command | github_hub | train |
4b5038eda2f6238333a3f54706f031f5999e4dc3 | diff --git a/src/Forms/Fields/AbstractField.php b/src/Forms/Fields/AbstractField.php
index <HASH>..<HASH> 100644
--- a/src/Forms/Fields/AbstractField.php
+++ b/src/Forms/Fields/AbstractField.php
@@ -149,6 +149,14 @@ abstract class AbstractField extends HtmlContainer
}
/**
+ * @return string
+ */
+ public function getId()
+ {
+ return $this->field->getAttribute('id');
+ }
+
+ /**
* {@inheritdoc}
*/
public function setId(string $value) : parent
@@ -157,7 +165,9 @@ abstract class AbstractField extends HtmlContainer
$this->label->setFor($value);
}
- return parent::setId($value);
+ $this->field->setId($value);
+
+ return $this;
}
/** | bugfix : Field setId must done on fied not container | cawaphp_html | train |
7285ac43990ebdb2f6de0aa428053a779d135412 | diff --git a/src/org/joml/Quaterniond.java b/src/org/joml/Quaterniond.java
index <HASH>..<HASH> 100644
--- a/src/org/joml/Quaterniond.java
+++ b/src/org/joml/Quaterniond.java
@@ -1166,10 +1166,14 @@ public class Quaterniond implements Externalizable {
double q1x = x, q1y = y, q1z = z, q1w = w;
double q2x = q.x, q2y = q.y, q2z = q.z, q2w = q.w;
double dot = q1x * q2x + q1y * q2y + q1z * q2z + q1w * q2w;
+ double absDot = Math.abs(dot);
+ if (1.0 - 1E-6 < absDot) {
+ return dest.set(this);
+ }
double alphaN = alpha;
- while (Math.abs(dot) < dotThreshold) {
+ while (absDot < dotThreshold) {
double scale0 = 0.5;
- double scale1 = dot >= 0.0 ? alphaN : -alphaN;
+ double scale1 = dot >= 0.0 ? 0.5 : -0.5;
if (alphaN < 0.5) {
q2x = scale0 * q2x + scale1 * q1x;
q2y = scale0 * q2y + scale1 * q1y;
@@ -1194,6 +1198,7 @@ public class Quaterniond implements Externalizable {
alphaN = alphaN * 2.0 - 1.0;
}
dot = q1x * q2x + q1y * q2y + q1z * q2z + q1w * q2w;
+ absDot = Math.abs(dot);
}
double scale0 = 1.0 - alphaN;
double scale1 = dot >= 0.0 ? alphaN : -alphaN;
diff --git a/src/org/joml/Quaternionf.java b/src/org/joml/Quaternionf.java
index <HASH>..<HASH> 100644
--- a/src/org/joml/Quaternionf.java
+++ b/src/org/joml/Quaternionf.java
@@ -1307,10 +1307,14 @@ public class Quaternionf implements Externalizable {
float q1x = x, q1y = y, q1z = z, q1w = w;
float q2x = q.x, q2y = q.y, q2z = q.z, q2w = q.w;
float dot = q1x * q2x + q1y * q2y + q1z * q2z + q1w * q2w;
+ float absDot = Math.abs(dot);
+ if (1.0f - 1E-6f < absDot) {
+ return dest.set(this);
+ }
float alphaN = alpha;
- while (Math.abs(dot) < dotThreshold) {
+ while (absDot < dotThreshold) {
float scale0 = 0.5f;
- float scale1 = dot >= 0.0f ? alphaN : -alphaN;
+ float scale1 = dot >= 0.0f ? 0.5f : -0.5f;
if (alphaN < 0.5f) {
q2x = scale0 * q2x + scale1 * q1x;
q2y = scale0 * q2y + scale1 * q1y;
@@ -1335,6 +1339,7 @@ public class Quaternionf implements Externalizable {
alphaN = alphaN * 2.0f - 1.0f;
}
dot = q1x * q2x + q1y * q2y + q1z * q2z + q1w * q2w;
+ absDot = Math.abs(dot);
}
float scale0 = 1.0f - alphaN;
float scale1 = dot >= 0.0f ? alphaN : -alphaN; | Fix Quaternion.nlerpIterative
Also:
- early-out if the two quaternions are almost identical | JOML-CI_JOML | train |
e6638f7409a3589d11a247a39b124a201a1ba2e3 | diff --git a/stan_test.go b/stan_test.go
index <HASH>..<HASH> 100644
--- a/stan_test.go
+++ b/stan_test.go
@@ -21,11 +21,22 @@ import (
"github.com/nats-io/go-nats"
"github.com/nats-io/go-nats-streaming/pb"
"github.com/nats-io/nats-streaming-server/server"
- "github.com/nats-io/nats-streaming-server/test"
)
func RunServer(ID string) *server.StanServer {
- return test.RunServer(ID)
+ s, err := server.RunServer(ID)
+ if err != nil {
+ panic(err)
+ }
+ return s
+}
+
+func runServerWithOpts(sOpts *server.Options) *server.StanServer {
+ s, err := server.RunServerWithOpts(sOpts, nil)
+ if err != nil {
+ panic(err)
+ }
+ return s
}
// Dumb wait program to sync on callbacks, etc... Will timeout
@@ -1354,7 +1365,7 @@ func TestMaxChannels(t *testing.T) {
opts.MaxChannels = 10
// Run a NATS Streaming server
- s := test.RunServerWithOpts(opts, nil)
+ s := runServerWithOpts(opts)
defer s.Shutdown()
sc := NewDefaultConnection(t)
@@ -1615,7 +1626,7 @@ func TestTimeoutOnRequests(t *testing.T) {
opts := server.GetDefaultOptions()
opts.ID = clusterName
opts.NATSServerURL = nats.DefaultURL
- s := test.RunServerWithOpts(opts, nil)
+ s := runServerWithOpts(opts)
defer s.Shutdown()
sc := NewDefaultConnection(t) | Fixed tests references to RunServer and RunServerWithOpts | nats-io_go-nats-streaming | train |
2b38f82b25dfc93188c63dc1d0894c4bfe364536 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -28,6 +28,9 @@ setuptools.setup(
long_description_content_type="text/markdown",
url="https://github.com/ladybug-tools/ladybug",
packages=setuptools.find_packages(),
+ install_requires=[
+ 'euclid3'
+ ],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6", | fix(pypi): add elucid3 as a requirement
Resolved #<I> | ladybug-tools_ladybug | train |
31ee9f3026981201736e9eefc2f4dd8950b0b344 | diff --git a/lib/common/error_codes.js b/lib/common/error_codes.js
index <HASH>..<HASH> 100644
--- a/lib/common/error_codes.js
+++ b/lib/common/error_codes.js
@@ -5,18 +5,19 @@ module.exports = {
nonUnicodeCharacterInInputStream: 'non-unicode-character-in-input-stream',
selfClosingNonVoidHtmlElement: 'self-closing-non-void-html-element',
endTagWithAttributes: 'end-tag-with-attributes',
- selfClosingStartTag: 'self-closing-start-tag',
selfClosingEndTag: 'self-closing-end-tag',
+ abruptionOfTagSelfClosure: 'abruption-of-tag-self-closure',
unexpectedNullCharacter: 'unexpected-null-character',
startOfProcessingInstructionOrXMLDeclaration: 'start-of-processing-instruction-or-xml-declaration',
invalidFirstCharacterOfTagName: 'invalid-first-character-of-tag-name',
unexpectedEqualsSignBeforeAttributeName: 'unexpected-equals-sign-before-attribute-name',
missingEndTagName: 'missing-end-tag-name',
unexpectedCharacterInAttributeName: 'unexpected-character-in-attribute-name',
- unexpectedCharacterInAttributeValue: 'unexpected-character-in-attribute-value',
eofBeforeTagName: 'eof-before-tag-name',
eofInTag: 'eof-in-tag',
- eofInAttributeValue: 'eof-in-attribute-value',
+ missingAttributeValue: 'missing-attribute-value',
+ missingWhitespaceAfterAttributeValue: 'missing-whitespace-after-attribute-value',
+ unexpectedCharacterInUnquotedAttributeValue: 'unexpected-character-in-unquoted-attribute-value',
cdataInHtmlContent: 'cdata-in-html-content',
malformedComment: 'malformed-comment',
eofInScriptHtmlComment: 'eof-in-script-html-comment',
diff --git a/lib/tokenizer/index.js b/lib/tokenizer/index.js
index <HASH>..<HASH> 100644
--- a/lib/tokenizer/index.js
+++ b/lib/tokenizer/index.js
@@ -1414,7 +1414,7 @@ _[BEFORE_ATTRIBUTE_VALUE_STATE] = function beforeAttributeValueState(cp) {
else if (cp === $.APOSTROPHE)
this.state = ATTRIBUTE_VALUE_SINGLE_QUOTED_STATE;
else if (cp === $.GREATER_THAN_SIGN) {
- this._err(ERR.unexpectedCharacterInAttributeValue);
+ this._err(ERR.missingAttributeValue);
this._reconsumeInState(ATTRIBUTE_VALUE_UNQUOTED_STATE);
}
else
@@ -1439,7 +1439,7 @@ _[ATTRIBUTE_VALUE_DOUBLE_QUOTED_STATE] = function attributeValueDoubleQuotedStat
}
else if (cp === $.EOF) {
- this._err(ERR.eofInAttributeValue);
+ this._err(ERR.eofInTag);
this._emitEOFToken();
}
@@ -1465,7 +1465,7 @@ _[ATTRIBUTE_VALUE_SINGLE_QUOTED_STATE] = function attributeValueSingleQuotedStat
}
else if (cp === $.EOF) {
- this._err(ERR.eofInAttributeValue);
+ this._err(ERR.eofInTag);
this._emitEOFToken();
}
@@ -1497,12 +1497,12 @@ _[ATTRIBUTE_VALUE_UNQUOTED_STATE] = function attributeValueUnquotedState(cp) {
else if (cp === $.QUOTATION_MARK || cp === $.APOSTROPHE || cp === $.LESS_THAN_SIGN ||
cp === $.EQUALS_SIGN || cp === $.GRAVE_ACCENT) {
- this._err(ERR.unexpectedCharacterInAttributeValue);
+ this._err(ERR.unexpectedCharacterInUnquotedAttributeValue);
this.currentAttr.value += toChar(cp);
}
else if (cp === $.EOF) {
- this._err(ERR.eofInAttributeValue);
+ this._err(ERR.eofInTag);
this._emitEOFToken();
}
@@ -1525,12 +1525,12 @@ _[AFTER_ATTRIBUTE_VALUE_QUOTED_STATE] = function afterAttributeValueQuotedState(
}
else if (cp === $.EOF) {
- this._err(ERR.eofInAttributeValue);
+ this._err(ERR.eofInTag);
this._emitEOFToken();
}
else {
- this._err(ERR.unexpectedCharacterInAttributeValue);
+ this._err(ERR.missingWhitespaceAfterAttributeValue);
this._reconsumeInState(BEFORE_ATTRIBUTE_NAME_STATE);
}
};
@@ -1551,7 +1551,7 @@ _[SELF_CLOSING_START_TAG_STATE] = function selfClosingStartTagState(cp) {
}
else {
- this._err(ERR.selfClosingStartTag);
+ this._err(ERR.abruptionOfTagSelfClosure);
this._reconsumeInState(BEFORE_ATTRIBUTE_NAME_STATE);
}
}; | Generalize eof-in-tag error, Rename attrValue errors (more accurate) | inikulin_parse5 | train |
6956f8458a8c798141b137d2db2cbd10fbc8a583 | diff --git a/tests/benchmark_accuracy_real_data.py b/tests/benchmark_accuracy_real_data.py
index <HASH>..<HASH> 100755
--- a/tests/benchmark_accuracy_real_data.py
+++ b/tests/benchmark_accuracy_real_data.py
@@ -212,12 +212,20 @@ def scrub_documents(documents: Dict[str, str], known_filth_items: List[KnownFilt
def load_complicated_detectors() -> Dict[str, bool]:
detector_available = {
'address': False,
+ 'address_sklearn': False,
'spacy': False,
'stanford': False,
'text_blob': False,
}
try:
+ import scrubadub.detectors.sklearn_address
+ detector_name = scrubadub.detectors.sklearn_address.SklearnAddressDetector.name
+ scrubadub.detectors.detector_configuration[detector_name]['autoload'] = True
+ detector_available['address_sklearn'] = True
+ except ImportError:
+ pass
+ try:
import scrubadub.detectors.stanford
detector_name = scrubadub.detectors.stanford.StanfordEntityDetector.name
scrubadub.detectors.detector_configuration[detector_name]['autoload'] = True | add in testing for the address_sklearn model | datascopeanalytics_scrubadub | train |
a43b713cbd852bac26f4774992929ad6d7033346 | diff --git a/girder/api/describe.py b/girder/api/describe.py
index <HASH>..<HASH> 100644
--- a/girder/api/describe.py
+++ b/girder/api/describe.py
@@ -336,7 +336,7 @@ class Describe(Resource):
'apiVersion': API_VERSION,
'swaggerVersion': SWAGGER_VERSION,
'basePath': getApiUrl(),
- 'models': docs.models,
+ 'models': docs.models[resource],
'apis': [{
'path': route,
'operations': sorted(
diff --git a/girder/api/docs.py b/girder/api/docs.py
index <HASH>..<HASH> 100644
--- a/girder/api/docs.py
+++ b/girder/api/docs.py
@@ -20,8 +20,8 @@
import collections
import functools
-models = {}
-# A dict of dicts of lists
+models = collections.defaultdict(dict)
+# routes is dict of dicts of lists
routes = collections.defaultdict(
functools.partial(collections.defaultdict, list))
@@ -112,13 +112,25 @@ def removeRouteDocs(resource, route, method, info, handler):
del routes[resource]
-def addModel(name, model):
+def addModel(resource, name, model):
"""
- This is called to add a model to the swagger documentation.
+ Add a model to the Swagger documentation.
+ :param resource: The type of existing resource to add the model to.
+ :param resource: str
:param name: The name of the model.
:type name: str
:param model: The model to add.
:type model: dict
+
+ .. warning:: This is a low-level API which does not validate the format of
+ ``model``. See the `Swagger Model documentation`_ for a complete
+ specification of the correct format for ``model``.
+
+ .. _Swagger Model documentation: https://github.com/swagger-api/
+ swagger-spec/blob/d79c205485d702302003d4de2f2c980d1caf10f9/
+ versions/1.2.md#527-model-object
"""
- models[name] = model
+ if resource not in routes:
+ raise Exception('"%s" is not a known resource type' % resource)
+ models[resource][name] = model | Make api.docs.addModel require a resource type
Per the Swagger specification, each model should be applied only to a
specific resource or set of resources. More rationale is provided in issue
#<I>
This commit changes the undocumented, but public, addModel function to
require each new model to specify a resource type which it will be
associated with. The same model may be added to multiple resources via
repeated calls to the function. This also updates the API resource
description endpoint to only return models which are associated with
each given resource type.
Closes #<I>. | girder_girder | train |
dadc71c4d523024d04c903fcf53a2b1a93c0e619 | diff --git a/test/doubleshot_test.js b/test/doubleshot_test.js
index <HASH>..<HASH> 100644
--- a/test/doubleshot_test.js
+++ b/test/doubleshot_test.js
@@ -18,20 +18,26 @@ var basicTests = {
}
};
+var intermediateTests = {
+ 'doubleshot': {
+ 'can be run against a folder not labelled `test` and options exclusively': true,
+ 'can be run against a .yaml file': true,
+ 'resets timer for long running tests': true,
+ 'can chain global hooks': true,
+ 'can chain local hooks': true
+ }
+};
+
// Kitchen sink
var kitchenSink = {
'doubleshot': {
'warns user when keys are unused': true,
'warns user when keys are not found': true,
'warns user when aliases are not found': true,
- 'can be run against a folder not labelled `test` and options exclusively': true,
- 'can be run against a .yaml file': true,
'runs batches in isolation': true,
'throws errors for invalid values': true,
'runs global and local `before`, `beforeEach`, `afterEach` and `after` hooks': true,
- 'resets timer for long running tests': true,
- 'can chain global hooks': true,
- 'can chain local hooks': true
+ 'throws errors for aliasing within objects to other objects': true
}
};
@@ -365,4 +371,20 @@ describe('doubleshot', function () {
}
], done);
});
+
+ it('throws errors for aliasing within objects to other objects', function (done) {
+ // Move to the current directory for execution
+ var cwd = process.cwd();
+ process.chdir(__dirname + '/test_files');
+
+ // Run doubleshot against unused keys files
+ var cmd = doubleshot + ' invalid_object_object_alias';
+ exec(cmd, function handleDblInvalidValues (err, stdout, stderr) {
+ // Assert stderr contains info about failing items
+ expect(stderr).to.contain('Aliasing within an object to another object is not allowed');
+
+ // Callback
+ done();
+ });
+ });
});
\ No newline at end of file | Added new test for aliasing within objects to other objects. It's failing =D | twolfson_doubleshot | train |
8ae1f15f1445e904dd94435e8efbec6a9b99c9eb | diff --git a/neon-server/src/main/javascript/eventing/eventBus.js b/neon-server/src/main/javascript/eventing/eventBus.js
index <HASH>..<HASH> 100644
--- a/neon-server/src/main/javascript/eventing/eventBus.js
+++ b/neon-server/src/main/javascript/eventing/eventBus.js
@@ -59,6 +59,8 @@ neon.eventing.EventBus.prototype.subscribe = function (channel, callback, messen
callback(data.payload);
}
});
+ // Save the messenger ID to allow unsubscribing by id.
+ subscription.messengerId = messengerId;
if (this.subscriptions_[channel]) {
this.subscriptions_[channel].push(subscription);
@@ -73,14 +75,16 @@ neon.eventing.EventBus.prototype.subscribe = function (channel, callback, messen
* @param {String|Object} subscription The channel or individual subscription to remove from the bus
* @method unsubscribe
*/
-neon.eventing.EventBus.prototype.unsubscribe = function(subscription) {
+neon.eventing.EventBus.prototype.unsubscribe = function(subscription, messengerId) {
// If given a channel name string, unsubscribe all subscriptions on that channel. Otherwise, unsubscribe just that subscription.
if (typeof(subscription) === "string" && this.subscriptions_[subscription]) {
var length = this.subscriptions_[subscription].length;
for (var i = 0; i < length; i++) {
- this.subscriptions_[subscription][i].unsubscribe();
+ if (this.subscriptions_[subscription][i].messengerId === messengerId) {
+ this.subscriptions_[subscription][i].unsubscribe();
+ }
}
- delete this.subscriptions_[subscription];
+ //delete this.subscriptions_[subscription];
} else if (typeof(subscription) === "object" && subscription !== null) {
subscription.unsubscribe();
}
diff --git a/neon-server/src/main/javascript/eventing/messenger.js b/neon-server/src/main/javascript/eventing/messenger.js
index <HASH>..<HASH> 100644
--- a/neon-server/src/main/javascript/eventing/messenger.js
+++ b/neon-server/src/main/javascript/eventing/messenger.js
@@ -81,7 +81,7 @@ neon.eventing.Messenger.prototype.subscribe = function (channel, callback) {
* @method unsubscribe
*/
neon.eventing.Messenger.prototype.unsubscribe = function (channel) {
- neon.eventing.eventBus_.unsubscribe(channel);
+ neon.eventing.eventBus_.unsubscribe(channel, this.id_);
};
/**
diff --git a/neon-server/src/test/javascript/spec/eventing/eventBus.spec.js b/neon-server/src/test/javascript/spec/eventing/eventBus.spec.js
index <HASH>..<HASH> 100644
--- a/neon-server/src/test/javascript/spec/eventing/eventBus.spec.js
+++ b/neon-server/src/test/javascript/spec/eventing/eventBus.spec.js
@@ -41,9 +41,9 @@ describe('messaging using the standard neon event bus', function () {
var subscriber2 = eventBus.subscribe(channel, callback2, 'messengerId1');
eventBus.publish(channel, 'aMessage', 'messengerId2');
- eventBus.unsubscribe(subscriber2);
+ eventBus.unsubscribe(subscriber2, 'messengerId1');
eventBus.publish(channel, 'bMessage', 'messengerId2');
- eventBus.unsubscribe(subscriber1);
+ eventBus.unsubscribe(subscriber1, 'messengerId1');
eventBus.publish(channel, 'cMessage', 'messengerId2');
expect(callback1.callCount).toEqual(2);
@@ -60,7 +60,7 @@ describe('messaging using the standard neon event bus', function () {
var subscriber2 = eventBus.subscribe(channel, callback2, 'messengerId1');
eventBus.publish(channel, 'aMessage', 'messengerId2');
- eventBus.unsubscribe('aChannel');
+ eventBus.unsubscribe('aChannel', 'messengerId1');
eventBus.publish(channel, 'bMessage', 'messengerId2');
eventBus.publish(channel, 'cMessage', 'messengerId2'); | Modifying the JS API methods that unsubscribe from channels to avoid one Messenger blowing away handlers that were registered through another messenger. | NextCenturyCorporation_neon | train |
a17bbec842f515842b2c01c2b2b02b12ec276058 | diff --git a/src/Composer/Console/Application.php b/src/Composer/Console/Application.php
index <HASH>..<HASH> 100644
--- a/src/Composer/Console/Application.php
+++ b/src/Composer/Console/Application.php
@@ -156,7 +156,7 @@ class Application extends BaseApplication
}
// prompt user for dir change if no composer.json is present in current dir
- if ($io->isInteractive() && !$newWorkDir && !in_array($commandName, array('', 'list', 'init', 'about', 'help', 'diagnose', 'self-update', 'global', 'create-project'), true) && !file_exists(Factory::getComposerFile())) {
+ if ($io->isInteractive() && !$newWorkDir && !in_array($commandName, array('', 'list', 'init', 'about', 'help', 'diagnose', 'self-update', 'global', 'create-project', 'outdated'), true) && !file_exists(Factory::getComposerFile())) {
$dir = dirname(getcwd());
$home = realpath(getenv('HOME') ?: getenv('USERPROFILE') ?: '/'); | Avoid double warnings about composer.json when running outdated, fixes #<I> | composer_composer | train |
55b77526e1ea61064001796d8c0996c50978958b | diff --git a/lib/client.js b/lib/client.js
index <HASH>..<HASH> 100644
--- a/lib/client.js
+++ b/lib/client.js
@@ -60,7 +60,8 @@ Client.prototype = {
},
setupAPI: function(type, callback) {
this.cachedAPI = this.cachedAPI || {};
- if (type in this.cachedAPI)
+ var name = this.domainName + '_' + type;
+ if (name in this.cachedAPI)
return this.cacnedAPI[type];
var self = this;
@@ -74,7 +75,7 @@ Client.prototype = {
port: parsedEndpoint[4] || 80,
klass: definition.klass
});
- this.cachedAPI[type] = api;
+ this.cachedAPI[name] = api;
return callback(api);
}
@@ -91,7 +92,7 @@ Client.prototype = {
port: endpoint[1] || 80,
klass: definition.klass
});
- self.cachedAPI[type] = api;
+ self.cachedAPI[name] = api;
callback(api);
} catch(error) {
self.raiseFatalError(error);
diff --git a/test/scenarios/search/_setup.json b/test/scenarios/search/_setup.json
index <HASH>..<HASH> 100644
--- a/test/scenarios/search/_setup.json
+++ b/test/scenarios/search/_setup.json
@@ -32,8 +32,9 @@
"IndexFieldName": "product",
"IndexFieldType": "literal",
"LiteralOptions.FacetEnabled": "true" } },
- { "name": "upload",
- "type": "doc",
- "body": "%ROOT%/test/fixture/companies/add.sdf.json" }
+ { "name": "upload",
+ "api": "doc",
+ "domain": "companies",
+ "body": "%ROOT%/test/fixture/companies/add.sdf.json" }
]
]
diff --git a/tools/scenario-runner.js b/tools/scenario-runner.js
index <HASH>..<HASH> 100644
--- a/tools/scenario-runner.js
+++ b/tools/scenario-runner.js
@@ -282,7 +282,7 @@ ScenarioRunner.prototype._process = function(scenario, callback) {
}
};
- switch (request.type) {
+ switch (request.api) {
case 'doc':
var batches = request.body;
if (typeof batches == 'string') {
@@ -290,11 +290,13 @@ ScenarioRunner.prototype._process = function(scenario, callback) {
path = path.replace(/%ROOT%/gim, __dirname + '/../');
batches = this.client.readSDFBatch(path);
}
+ this.client.domainName = request.domain;
return this.client.setupAPI('doc', function(documentsAPI) {
documentsAPI.DocumentsBatch({ Docs: batches }, requestCallback);
});
case 'search':
+ this.client.domainName = request.domain;
return this.client.setupAPI('search', function(searchAPI) {
searchAPI.Search(request.params, requestCallback);
}); | Update clietn with domain name automatically | groonga_gcs | train |
bd1fdaa4ccf82fa3b1b2e12120a3cba9d1c394b3 | diff --git a/benchmark/benchmark.rb b/benchmark/benchmark.rb
index <HASH>..<HASH> 100644
--- a/benchmark/benchmark.rb
+++ b/benchmark/benchmark.rb
@@ -3,21 +3,63 @@ require 'saxerator'
require 'benchmark'
file = ARGV.shift
-if !File.exists?(file)
+unless File.exists?(file)
puts "Cannot find file #{file}"
exit 1
end
file = File.new(file)
-count = count2 = count3 = count4 = 0
-Benchmark.bm do |x|
- x.report('for_tag') { Saxerator.parser(file).for_tag(:artist).each { count = count + 1 } }
- x.report('at_depth') { Saxerator.parser(file).at_depth(2).each { count2 = count2 + 1 } }
- x.report('within') { Saxerator.parser(file).within(:artists).each { count3 = count3 + 1 } }
- x.report('composite') { Saxerator.parser(file).for_tag(:name).within(:artist).at_depth(3).each { count4 = count4 + 1} }
+ADAPTERS = [:nokogiri, :ox].freeze
+
+class SaxeratorBenchmark
+ def initialize(file)
+ @file = file
+ end
+
+ def with_adapter(adapter)
+ puts '@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@'
+ puts
+ puts "Benchmark with :#{adapter} parser"
+ puts
+ puts '@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@'
+ puts
+
+ count = count2 = count3 = count4 = 0
+
+ Benchmark.bm do |x|
+ x.report('for_tag') do
+ Saxerator.parser(@file) { |confing| confing.adapter = adapter }
+ .for_tag(:artist).each { count += 1 }
+ end
+
+ x.report('at_depth') do
+ Saxerator.parser(@file) { |confing| confing.adapter = adapter }
+ .at_depth(2).each { count2 += 1 }
+ end
+
+ x.report('within') do
+ Saxerator.parser(@file) { |confing| confing.adapter = adapter }
+ .within(:artists).each { count3 += 1 }
+ end
+
+ x.report('composite') do
+ Saxerator.parser(@file) { |confing| confing.adapter = adapter }
+ .for_tag(:name)
+ .within(:artist).at_depth(3).each { count4 += 1 }
+ end
+ end
+
+ puts
+ puts '##########################################################'
+ puts
+ puts "for_tag: #{count} artist elements parsed"
+ puts "at_depth: #{count2} elements parsed"
+ puts "within: #{count3} artists children parsed"
+ puts "composite: #{count4} names within artist nested 3 tags deep parsed"
+ puts
+ end
end
-puts "for_tag: #{count} artist elements parsed"
-puts "at_depth: #{count2} elements parsed"
-puts "within: #{count3} artists children parsed"
-puts "composite: #{count4} names within artist nested 3 tags deep parsed"
+saxerator_benchmark = SaxeratorBenchmark.new(file)
+
+ADAPTERS.each { |adapter| saxerator_benchmark.with_adapter(adapter) }
\ No newline at end of file
diff --git a/lib/saxerator/configuration.rb b/lib/saxerator/configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/saxerator/configuration.rb
+++ b/lib/saxerator/configuration.rb
@@ -3,6 +3,11 @@ module Saxerator
attr_reader :output_type
attr_writer :hash_key_generator
+ ALLOWED_OUTPUT_TYPES = {
+ nokogiri: [:hash, :xml],
+ ox: [:hash]
+ }.freeze
+
def initialize
@adapter = :nokogiri
@output_type = :hash
@@ -21,6 +26,10 @@ module Saxerator
def output_type=(val)
raise ArgumentError.new("Unknown output_type '#{val.inspect}'") unless Builder.valid?(val)
+ unless ALLOWED_OUTPUT_TYPES[@adapter].include?(val)
+ raise ArgumentError.new("Adapter '#{adapter.inspect}' not allow to use output_type '#{val.inspect}'")
+ end
+
@output_type = val
raise_error_if_using_put_attributes_in_hash_with_xml
end | Added validation for output_type validation for difference parsers | soulcutter_saxerator | train |
27ead088b49c363b9081b499ddbe3324cd23ec12 | diff --git a/examples/response_action_examples/server.js b/examples/response_action_examples/server.js
index <HASH>..<HASH> 100644
--- a/examples/response_action_examples/server.js
+++ b/examples/response_action_examples/server.js
@@ -38,6 +38,33 @@ function responseLiteralWithUTF16BodyResponse() {
);
}
+function jsonResponseWithUTF8Body(){
+ var mockServerClient = require('mockserver-client').mockServerClient;
+ mockServerClient("localhost", 1080).mockAnyResponse({
+ "httpRequest": {
+ "method": "GET",
+ "path": "/simple"
+ },
+ "httpResponse": {
+ "body": {
+ "type": "STRING",
+ "string": "ุณูุงู
",
+ "contentType": "text/plain; charset=utf-8"
+ }
+ },
+ "times": {
+ "unlimited": true
+ }
+ }).then(
+ function () {
+ console.log("expectation created");
+ },
+ function (error) {
+ console.log(error);
+ }
+ );
+}
+
function responseLiteralWithStatusCodeAndReasonPhrase() {
var mockServerClient = require('mockserver-client').mockServerClient;
mockServerClient("localhost", 1080).mockAnyResponse({ | #<I> added example showing json body with special character | jamesdbloom_mockserver-client-node | train |
40c89bd263867fe1da7de646a5a8672103af2d28 | diff --git a/core/src/main/java/hudson/FilePath.java b/core/src/main/java/hudson/FilePath.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/FilePath.java
+++ b/core/src/main/java/hudson/FilePath.java
@@ -1214,11 +1214,18 @@ public final class FilePath implements Serializable {
* that has this file.
* @since 1.89
*/
- public Launcher createLauncher(TaskListener listener) {
+ public Launcher createLauncher(TaskListener listener) throws IOException, InterruptedException {
if(channel==null)
return new LocalLauncher(listener);
else
- return new RemoteLauncher(listener,channel,isUnix());
+ return new RemoteLauncher(listener,channel,channel.call(new IsUnix()));
+ }
+
+ private static final class IsUnix implements Callable<Boolean,IOException> {
+ public Boolean call() throws IOException {
+ return File.pathSeparatorChar==':';
+ }
+ private static final long serialVersionUID = 1L;
}
/** | Correctly detect the platform of the target node.
This is suspected to cause <URL> | jenkinsci_jenkins | train |
8e459e14da5fd3c505427452798444929bb83ae7 | diff --git a/library/CM/Tools/Generator/FilesystemHelper.php b/library/CM/Tools/Generator/FilesystemHelper.php
index <HASH>..<HASH> 100644
--- a/library/CM/Tools/Generator/FilesystemHelper.php
+++ b/library/CM/Tools/Generator/FilesystemHelper.php
@@ -22,7 +22,6 @@ class CM_Tools_Generator_FilesystemHelper extends CM_Class_Abstract {
$gitKeepFile = $directory->joinPath('.gitkeep');
$this->notify('mkdir', $gitKeepFile->getParentDirectory()->getPath());
$gitKeepFile->ensureParentDirectory();
- $this->createFile($gitKeepFile);
}
} | Do not create .gitkeep file when creating directory | cargomedia_cm | train |
1b372c6e541ca907e4cd9f0b5ea062ab56262601 | diff --git a/allegedb/allegedb/cache.py b/allegedb/allegedb/cache.py
index <HASH>..<HASH> 100644
--- a/allegedb/allegedb/cache.py
+++ b/allegedb/allegedb/cache.py
@@ -7,9 +7,8 @@ methods. But if you need to store historical data some other way,
you might want to store it in a ``WindowDict``.
"""
-from copy import copy as copier
from operator import itemgetter
-from collections import defaultdict, deque, MutableMapping, KeysView, ItemsView, ValuesView
+from collections import defaultdict, deque, Mapping, MutableMapping, KeysView, ItemsView, ValuesView
class HistoryError(KeyError):
@@ -70,6 +69,61 @@ class WindowDictItemsView(ItemsView):
yield from self._mapping._future
+class WindowDictPastFutureKeysView(KeysView):
+ def __contains__(self, item):
+ for rev in map(itemgetter(0), self._mapping.deq):
+ if rev == item:
+ return True
+ return False
+
+
+class WindowDictPastKeysView(WindowDictPastFutureKeysView):
+ def __iter__(self):
+ yield from map(itemgetter(0), reversed(self._mapping.deq))
+
+
+class WindowDictFutureKeysView(WindowDictPastFutureKeysView):
+ def __iter__(self):
+ yield from map(itemgetter(0), self._mapping.deq)
+
+
+class WindowDictPastFutureItemsView(ItemsView):
+ def __contains__(self, item):
+ rev, v = item
+ for mrev, mv in self._mapping.deq:
+ if mrev == rev:
+ return mv == v
+ return False
+
+
+class WindowDictPastItemsView(WindowDictPastFutureItemsView):
+ def __iter__(self):
+ yield from reversed(self._mapping.deq)
+
+
+class WindowDictFutureItemsView(WindowDictPastFutureItemsView):
+ def __iter__(self):
+ yield from self._mapping.deq
+
+
+class WindowDictPastFutureValuesView(ValuesView):
+ def __contains__(self, item):
+ for v in map(itemgetter(1), self._mapping.deq):
+ if v == item:
+ return True
+ return False
+
+
+class WindowDictPastValuesView(WindowDictPastFutureValuesView):
+ def __iter__(self):
+ yield from map(itemgetter(1), reversed(self._mapping.deq))
+
+
+class WindowDictFutureValuesView(WindowDictPastFutureValuesView):
+ def __iter__(self):
+ yield from map(itemgetter(1), self._mapping.deq)
+
+
class WindowDictValuesView(ValuesView):
"""Look through all the values that a WindowDict contains."""
def __contains__(self, value):
@@ -88,6 +142,74 @@ class WindowDictValuesView(ValuesView):
yield v
+class WindowDictPastView(Mapping):
+ __slots__ = ['deq']
+
+ def __init__(self, past):
+ self.deq = past
+
+ def __iter__(self):
+ yield from map(itemgetter(0), reversed(self.deq))
+
+ def __len__(self):
+ return len(self.deq)
+
+ def __getitem__(self, key):
+ future = deque()
+ past = self.deq
+ while past:
+ rev, value = past.pop()
+ future.appendleft((rev, value))
+ if rev == key:
+ self.deq += future
+ return value
+ self.deq = future
+ raise KeyError
+
+ def keys(self):
+ return WindowDictPastKeysView(self)
+
+ def items(self):
+ return WindowDictPastItemsView(self)
+
+ def values(self):
+ return WindowDictPastValuesView(self)
+
+
+class WindowDictFutureView(Mapping):
+ __slots__ = ['deq']
+
+ def __init__(self, future):
+ self.deq = future
+
+ def __iter__(self):
+ yield from map(itemgetter(0), self.deq)
+
+ def __len__(self):
+ return len(self.deq)
+
+ def __getitem__(self, key):
+ future = self.deq
+ past = deque()
+ while future:
+ rev, value = future.popleft()
+ past.append((rev, value))
+ if rev == key:
+ self.deq = past + future
+ return value
+ self.deq = past
+ raise KeyError
+
+ def keys(self):
+ return WindowDictFutureKeysView(self)
+
+ def items(self):
+ return WindowDictFutureItemsView(self)
+
+ def values(self):
+ return WindowDictFutureValuesView(self)
+
+
class WindowDict(MutableMapping):
"""A dict that keeps every value that a variable has had over time.
@@ -108,6 +230,13 @@ class WindowDict(MutableMapping):
the slice.
"""
+ def future(self):
+ """Return a Mapping of future values."""
+ return WindowDictFutureView(self._future)
+
+ def past(self):
+ """Return a Mapping of past values."""
+ return WindowDictPastView(self._past)
def seek(self, rev):
"""Arrange the caches to help look up the given revision.""" | Add future() and past() views to WindowDict | LogicalDash_LiSE | train |
e36cebd6cf1cee7445079bb58d44af56c34327d2 | diff --git a/src/listeners.js b/src/listeners.js
index <HASH>..<HASH> 100644
--- a/src/listeners.js
+++ b/src/listeners.js
@@ -76,7 +76,7 @@ export default class ListenerGenerator {
if (isObject(rules)) {
Object.keys(rules).forEach(r => { // eslint-disable-line
if (/confirmed|after|before/.test(r)) {
- fieldName = rules[r];
+ fieldName = rules[r].split(',')[0];
return false;
} | #<I> - "after" validation rule new feature problem | baianat_vee-validate | train |
44c2aaf9affb79bf7f2ac7c85bae1b605e7f667d | diff --git a/bcbio/variation/varscan.py b/bcbio/variation/varscan.py
index <HASH>..<HASH> 100644
--- a/bcbio/variation/varscan.py
+++ b/bcbio/variation/varscan.py
@@ -49,6 +49,46 @@ def _get_varscan_opts(config, tmp_dir):
jvm_opts += broad.get_default_jvm_opts(tmp_dir)
return " ".join(jvm_opts)
+def _safe_to_float(x):
+ if x is None:
+ return None
+ else:
+ try:
+ return float(x)
+ except ValueError:
+ return None
+
+def spv_freq_filter(line, tumor_index):
+ """Filter VarScan calls based on the SPV value and frequency.
+
+ Removes calls with SPV < 0.05 and a tumor FREQ > 0.35.
+
+ False positives dominate these higher frequency, low SPV calls. They appear
+ to be primarily non-somatic/germline variants not removed by other filters.
+ """
+ if line.startswith("#CHROM"):
+ headers = [('##FILTER=<ID=SpvFreq,Description="High frequency (tumor FREQ > 0.35) '
+ 'and low p-value for somatic (SPV < 0.05)">')]
+ return "\n".join(headers) + "\n" + line
+ elif line.startswith("#"):
+ return line
+ else:
+ parts = line.split("\t")
+ sample_ft = {a: v for (a, v) in zip(parts[8].split(":"), parts[9 + tumor_index].split(":"))}
+ freq = _safe_to_float(sample_ft.get("FREQ"))
+ spvs = [x for x in parts[7].split(";") if x.startswith("SPV=")]
+ spv = _safe_to_float(spvs[0].split("=")[-1] if spvs else None)
+ fname = None
+ if spv is not None and freq is not None:
+ if spv < 0.05 and freq > 0.35:
+ fname = "SpvFreq"
+ if fname:
+ if parts[6] in set([".", "PASS"]):
+ parts[6] = fname
+ else:
+ parts[6] += ";%s" % fname
+ line = "\t".join(parts)
+ return line
def _varscan_paired(align_bams, ref_file, items, target_regions, out_file):
@@ -79,7 +119,7 @@ def _varscan_paired(align_bams, ref_file, items, target_regions, out_file):
with file_transaction(config, indel_file, snp_file) as (tx_indel, tx_snp):
with tx_tmpdir(items[0]) as tmp_dir:
jvm_opts = _get_varscan_opts(config, tmp_dir)
- remove_zerocoverage = "grep -v -P '\t0\t\t$'"
+ remove_zerocoverage = r"grep -v -P '\t0\t\t$'"
varscan_cmd = ("varscan {jvm_opts} somatic "
" <({normal_mpileup_cl} | {remove_zerocoverage}) "
"<({tumor_mpileup_cl} | {remove_zerocoverage}) "
@@ -108,7 +148,8 @@ def _varscan_paired(align_bams, ref_file, items, target_regions, out_file):
""" "{normal_name}", "{tumor_name}")' | """
"{fix_ambig_ref} | {fix_ambig_alt} | ifne vcfuniqalleles | "
"""bcftools filter -m + -s REJECT -e "SS != '.' && SS != '2'" 2> /dev/null | """
- " bgzip -c > {tx_fix_file}")
+ "{py_cl} -x 'bcbio.variation.varscan.spv_freq_filter(x, 1)' | "
+ "bgzip -c > {tx_fix_file}")
do.run(cmd.format(**locals()), "Varscan paired fix")
to_combine.append(fix_file) | Improve VarScan precision for somatic calling
Remove high frequency low probability (SPV) variants. False positives
dominate these results. Working on improve overall precision of VarScan. | bcbio_bcbio-nextgen | train |
5ff95df810862d9c44eeb2a23ed6b3b4ada0f79d | diff --git a/governator-providers/src/main/java/com/netflix/governator/providers/AdvisableAnnotatedMethodScanner.java b/governator-providers/src/main/java/com/netflix/governator/providers/AdvisableAnnotatedMethodScanner.java
index <HASH>..<HASH> 100644
--- a/governator-providers/src/main/java/com/netflix/governator/providers/AdvisableAnnotatedMethodScanner.java
+++ b/governator-providers/src/main/java/com/netflix/governator/providers/AdvisableAnnotatedMethodScanner.java
@@ -10,6 +10,7 @@ import com.google.inject.BindingAnnotation;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
+import com.google.inject.TypeLiteral;
import com.google.inject.spi.BindingTargetVisitor;
import com.google.inject.spi.Dependency;
import com.google.inject.spi.HasDependencies;
@@ -18,6 +19,7 @@ import com.google.inject.spi.ModuleAnnotatedMethodScanner;
import com.google.inject.spi.ProviderInstanceBinding;
import com.google.inject.spi.ProviderWithExtensionVisitor;
import com.google.inject.spi.Toolable;
+import com.google.inject.util.Types;
import java.lang.annotation.Annotation;
import java.lang.annotation.Retention;
@@ -86,7 +88,7 @@ public final class AdvisableAnnotatedMethodScanner extends ModuleAnnotatedMethod
if (annotation instanceof ProvidesWithAdvice) {
AdviceElement element = new AdviceElementImpl(elementName, AdviceElement.Type.SOURCE, annotation);
Key<T> uniqueKey = Key.get(key.getTypeLiteral(), element);
- binder.bind(key).toProvider(new AdvisedProvider<T>(element.name(), annotation, binder.getProvider(uniqueKey)));
+ binder.bind(key).toProvider(new AdvisedProvider<T>(key.getTypeLiteral(), element.name(), annotation, binder.getProvider(uniqueKey)));
return uniqueKey;
} else if (annotation instanceof Advises) {
Method method = (Method) injectionPoint.getMember();
@@ -101,10 +103,12 @@ public final class AdvisableAnnotatedMethodScanner extends ModuleAnnotatedMethod
private final String name;
private final Provider<T> delegate;
private final List<ProvisionAdviceHolder<UnaryOperator<T>>> adviceBindings = new ArrayList<>();
+ private final TypeLiteral<UnaryOperator<T>> advisesType;
- public AdvisedProvider(String name, Annotation annotation, Provider<T> delegate) {
+ public AdvisedProvider(TypeLiteral<T> typeLiteral, String name, Annotation annotation, Provider<T> delegate) {
this.name = name;
this.delegate = delegate;
+ this.advisesType = (TypeLiteral<UnaryOperator<T>>) TypeLiteral.get(Types.newParameterizedType(UnaryOperator.class, typeLiteral.getType()));
}
@Override
@@ -132,7 +136,7 @@ public final class AdvisableAnnotatedMethodScanner extends ModuleAnnotatedMethod
@Toolable
@javax.inject.Inject
protected void initialize(Injector injector) {
- for (Binding<?> binding : injector.getAllBindings().values()) {
+ for (Binding<?> binding : injector.findBindingsByType(advisesType)) {
Key<?> bindingKey = binding.getKey();
if (bindingKey.hasAttributes() && AdviceElement.class.isAssignableFrom(bindingKey.getAnnotationType())) {
AdviceElementImpl adviceElement = (AdviceElementImpl) bindingKey.getAnnotation();
diff --git a/governator-providers/src/test/java/com/netflix/governator/providers/AdvisedProviderTest.java b/governator-providers/src/test/java/com/netflix/governator/providers/AdvisedProviderTest.java
index <HASH>..<HASH> 100644
--- a/governator-providers/src/test/java/com/netflix/governator/providers/AdvisedProviderTest.java
+++ b/governator-providers/src/test/java/com/netflix/governator/providers/AdvisedProviderTest.java
@@ -48,6 +48,12 @@ public class AdvisedProviderTest {
@ProvidesWithAdvice
@Singleton
+ String getStringBuilder() {
+ return "0";
+ }
+
+ @ProvidesWithAdvice
+ @Singleton
@Named("foo")
List<String> getListWithStringQualifier() {
return new ArrayList<>(Arrays.asList("foo"));
@@ -77,6 +83,16 @@ public class AdvisedProviderTest {
}
@Advises(order=1)
+ UnaryOperator<String> adviseNoQualifierStringBuilder1() {
+ return str -> str + "1";
+ }
+
+ @Advises(order=2)
+ UnaryOperator<String> adviseNoQualifierStringBuilder2() {
+ return str -> str + "2";
+ }
+
+ @Advises(order=1)
@Named("foo")
UnaryOperator<List<String>> adviseNamedQualifier1() {
return list -> {
@@ -113,10 +129,12 @@ public class AdvisedProviderTest {
}
});
+ String noQualifierString = injector.getInstance(String.class);
List<String> noQualifier = injector.getInstance(Key.get(LIST_TYPE_LITERAL));
List<String> nameQualifier = injector.getInstance(Key.get(LIST_TYPE_LITERAL, Names.named("foo")));
Bar bar = injector.getInstance(Bar.class);
+ Assert.assertEquals("012", noQualifierString);
Assert.assertEquals(Arrays.asList("", "1", "2"), noQualifier);
Assert.assertEquals(Arrays.asList("foo", "foo1", "foo2"), nameQualifier);
Assert.assertEquals(Arrays.asList("bar", "bar1", "bar2"), bar.list); | Fix @ProvidesWithAdvice to properly isolate @Advises for different types when no qualifier is specified | Netflix_governator | train |
b802d6efc99a26d7db62baa5ff7a444b76f2717b | diff --git a/src/com/opencms/workplace/CmsAdminPicGalleries.java b/src/com/opencms/workplace/CmsAdminPicGalleries.java
index <HASH>..<HASH> 100644
--- a/src/com/opencms/workplace/CmsAdminPicGalleries.java
+++ b/src/com/opencms/workplace/CmsAdminPicGalleries.java
@@ -2,7 +2,7 @@ package com.opencms.workplace;
/*
* File : $Source: /alkacon/cvs/opencms/src/com/opencms/workplace/Attic/CmsAdminPicGalleries.java,v $
- * Date : $Date: 2000/08/08 14:08:30 $
+ * Date : $Date: 2001/01/08 10:57:56 $
* Version: $ $
*
* Copyright (C) 2000 The OpenCms Group
@@ -41,7 +41,7 @@ import javax.servlet.http.*;
* <p>
*
* @author Mario Stanke
- * @version $Revision: 1.9 $ $Date: 2000/08/08 14:08:30 $
+ * @version $Revision: 1.10 $ $Date: 2001/01/08 10:57:56 $
* @see com.opencms.workplace.CmsXmlWpTemplateFile
*/
public class CmsAdminPicGalleries extends CmsWorkplaceDefault implements I_CmsConstants, I_CmsFileListUsers {
@@ -164,14 +164,22 @@ public class CmsAdminPicGalleries extends CmsWorkplaceDefault implements I_CmsCo
// check if a new filename is given
if (newname != null) {
filename=newname;
- }
- CmsFile file=cms.createFile(foldername, filename, filecontent, C_TYPE_IMAGE_NAME);
+ }
+ try{
+ CmsFile file=cms.createFile(foldername, filename, filecontent, C_TYPE_IMAGE_NAME);
- if (title!= null) {
- String filepath= file.getAbsolutePath();
- cms.lockResource(filepath);
- cms.writeProperty(filepath, C_PROPERTY_TITLE,title);
- }
+ if (title!= null) {
+ String filepath= file.getAbsolutePath();
+ cms.lockResource(filepath);
+ cms.writeProperty(filepath, C_PROPERTY_TITLE,title);
+ }
+ }catch (CmsException ex){
+ xmlTemplateDocument.setData("details", Utils.getStackTrace(ex));
+ templateSelector="error";
+ xmlTemplateDocument.setData("link_value", foldername);
+ xmlTemplateDocument.setData("lasturl", lasturl);
+ return startProcessing(cms, xmlTemplateDocument, elementName, parameters, templateSelector);
+ }
try {
cms.getRequestContext().getResponse().sendCmsRedirect( getConfigFile(cms).getWorkplaceActionPath()+lasturl); | bugfix: picgallery didnt work whith new explorer | alkacon_opencms-core | train |
bf51e2ff980584603853cc1e7d47b8012316618f | diff --git a/tests/basics/bytearray_slice_assign.py b/tests/basics/bytearray_slice_assign.py
index <HASH>..<HASH> 100644
--- a/tests/basics/bytearray_slice_assign.py
+++ b/tests/basics/bytearray_slice_assign.py
@@ -51,6 +51,11 @@ b = bytearray(10)
b[:-1] = bytearray(500)
print(len(b), b[0], b[-1])
+# extension with self on RHS
+b = bytearray(x)
+b[4:] = b
+print(b)
+
# Assignment of bytes to array slice
b = bytearray(2)
b[1:1] = b"12345"
diff --git a/tests/basics/list_slice_assign_grow.py b/tests/basics/list_slice_assign_grow.py
index <HASH>..<HASH> 100644
--- a/tests/basics/list_slice_assign_grow.py
+++ b/tests/basics/list_slice_assign_grow.py
@@ -26,3 +26,8 @@ print(l)
l = list(x)
l[100:100] = [10, 20, 30, 40]
print(l)
+
+# growing by using itself on RHS
+l = list(range(10))
+l[4:] = l
+print(l) | tests/basics: Add tests for list and bytearray growing using themselves. | micropython_micropython | train |
aa1d9c0f6c950eaa2796c08f8d27d5caef9c20de | diff --git a/packages/NodeTypeResolver/tests/AbstractNodeTypeResolverTest.php b/packages/NodeTypeResolver/tests/AbstractNodeTypeResolverTest.php
index <HASH>..<HASH> 100644
--- a/packages/NodeTypeResolver/tests/AbstractNodeTypeResolverTest.php
+++ b/packages/NodeTypeResolver/tests/AbstractNodeTypeResolverTest.php
@@ -7,6 +7,7 @@ use Rector\NodeTraverserQueue\BetterNodeFinder;
use Rector\NodeTraverserQueue\NodeTraverserQueue;
use Rector\Tests\AbstractContainerAwareTestCase;
use SplFileInfo;
+use Symplify\PackageBuilder\Parameter\ParameterProvider;
abstract class AbstractNodeTypeResolverTest extends AbstractContainerAwareTestCase
{
@@ -20,10 +21,16 @@ abstract class AbstractNodeTypeResolverTest extends AbstractContainerAwareTestCa
*/
private $nodeTraverserQueue;
+ /**
+ * @var ParameterProvider
+ */
+ private $parameterProvider;
+
protected function setUp(): void
{
$this->betterNodeFinder = $this->container->get(BetterNodeFinder::class);
$this->nodeTraverserQueue = $this->container->get(NodeTraverserQueue::class);
+ $this->parameterProvider = $this->container->get(ParameterProvider::class);
}
/**
@@ -43,6 +50,8 @@ abstract class AbstractNodeTypeResolverTest extends AbstractContainerAwareTestCa
{
$fileInfo = new SplFileInfo($file);
+ $this->parameterProvider->changeParameter('source', [$file]);
+
[$newStmts,] = $this->nodeTraverserQueue->processFileInfo($fileInfo);
return $newStmts; | fix tests by making paramter provider aware of source | rectorphp_rector | train |
ce83904cdbb26b1ffc5fe80c5c835abb5edf1ff0 | diff --git a/openaps/vendors/medtronic.py b/openaps/vendors/medtronic.py
index <HASH>..<HASH> 100644
--- a/openaps/vendors/medtronic.py
+++ b/openaps/vendors/medtronic.py
@@ -80,9 +80,9 @@ class MedtronicTask (scan):
return out
def create_session (self):
minutes = int(self.device.fields.get('minutes', 10))
+ now = datetime.now( )
self.pump.power_control(minutes=minutes)
model = self.get_model( )
- now = datetime.now( )
offset = relativedelta.relativedelta(minutes=minutes)
out = dict(device=self.device.name
, model=model | account for delay during power's effect time | openaps_openaps | train |
bb49cbfffd8d2c21681d72612033f66f86ee40b0 | diff --git a/lib/attribute_normalizer/model_inclusions.rb b/lib/attribute_normalizer/model_inclusions.rb
index <HASH>..<HASH> 100644
--- a/lib/attribute_normalizer/model_inclusions.rb
+++ b/lib/attribute_normalizer/model_inclusions.rb
@@ -62,7 +62,9 @@ module AttributeNormalizer
def inherited(subclass)
super
- subclass.normalize_default_attributes if subclass.table_exists?
+ if subclass.respond_to?(:table_exists?) && subclass.table_exists?
+ subclass.normalize_default_attributes
+ end
end
end
end | Allow use of non-ORM subclasses | mdeering_attribute_normalizer | train |
f05bd2d6d311b10c956a9d1541e68673f3deca4f | diff --git a/packages/ember-runtime/lib/computed/reduce_computed_macros.js b/packages/ember-runtime/lib/computed/reduce_computed_macros.js
index <HASH>..<HASH> 100644
--- a/packages/ember-runtime/lib/computed/reduce_computed_macros.js
+++ b/packages/ember-runtime/lib/computed/reduce_computed_macros.js
@@ -13,6 +13,7 @@ import { isArray } from 'ember-runtime/utils';
import { A as emberA } from 'ember-runtime/system/native_array';
import isNone from 'ember-metal/is_none';
import getProperties from 'ember-metal/get_properties';
+import WeakMap from 'ember-metal/weak_map';
function reduceMacro(dependentKey, callback, initialValue) {
return computed(`${dependentKey}.[]`, function() {
@@ -608,53 +609,74 @@ function customSort(itemsKey, comparator) {
// This one needs to dynamically set up and tear down observers on the itemsKey
// depending on the sortProperties
function propertySort(itemsKey, sortPropertiesKey) {
- var cp = new ComputedProperty(function(key) {
- function didChange() {
- this.notifyPropertyChange(key);
- }
+ let cp = new ComputedProperty(function(key) {
+ let itemsKeyIsAtThis = (itemsKey === '@this');
+ let sortProperties = get(this, sortPropertiesKey);
+
+ assert(
+ `The sort definition for '${key}' on ${this} must be a function or an array of strings`,
+ isArray(sortProperties) && sortProperties.every(s => typeof s === 'string')
+ );
- var items = itemsKey === '@this' ? this : get(this, itemsKey);
- var sortProperties = get(this, sortPropertiesKey);
+ let normalizedSortProperties = normalizeSortProperties(sortProperties);
- if (items === null || typeof items !== 'object') { return emberA(); }
+ // Add/remove property observers as required.
+ let activeObserversMap = cp._activeObserverMap || (cp._activeObserverMap = new WeakMap());
+ let activeObservers = activeObserversMap.get(this);
- // TODO: Ideally we'd only do this if things have changed
- if (cp._sortPropObservers) {
- cp._sortPropObservers.forEach(args => removeObserver.apply(null, args));
+ if (activeObservers) {
+ activeObservers.forEach(args => {
+ removeObserver.apply(null, args);
+ });
}
- cp._sortPropObservers = [];
+ function sortPropertyDidChange() {
+ this.notifyPropertyChange(key);
+ }
- if (!isArray(sortProperties)) { return items; }
+ activeObservers = normalizedSortProperties.map(([prop]) => {
+ let path = itemsKeyIsAtThis ? `@each.${prop}` : `${itemsKey}.@each.${prop}`;
+ let args = [this, path, sortPropertyDidChange];
+ addObserver.apply(null, args);
+ return args;
+ });
- // Normalize properties
- var normalizedSort = sortProperties.map(p => {
- let [prop, direction] = p.split(':');
- direction = direction || 'asc';
+ activeObserversMap.set(this, activeObservers);
- return [prop, direction];
- });
+ // Sort and return the array.
+ let items = itemsKeyIsAtThis ? this : get(this, itemsKey);
- // TODO: Ideally we'd only do this if things have changed
- // Add observers
- normalizedSort.forEach(prop => {
- var args = [this, `${itemsKey}.@each.${prop[0]}`, didChange];
- cp._sortPropObservers.push(args);
- addObserver.apply(null, args);
- });
+ if (isArray(items)) {
+ return sortByNormalizedSortProperties(items, normalizedSortProperties);
+ } else {
+ return emberA();
+ }
+ });
- return emberA(items.slice().sort((itemA, itemB) => {
- for (var i = 0; i < normalizedSort.length; ++i) {
- var [prop, direction] = normalizedSort[i];
- var result = compare(get(itemA, prop), get(itemB, prop));
- if (result !== 0) {
- return (direction === 'desc') ? (-1 * result) : result;
- }
- }
+ cp._activeObserverMap = undefined;
- return 0;
- }));
+ return cp.property(`${sortPropertiesKey}.[]`).readOnly();
+}
+
+function normalizeSortProperties(sortProperties) {
+ return sortProperties.map(p => {
+ let [prop, direction] = p.split(':');
+ direction = direction || 'asc';
+
+ return [prop, direction];
});
+}
+
+function sortByNormalizedSortProperties(items, normalizedSortProperties) {
+ return emberA(items.slice().sort((itemA, itemB) => {
+ for (let i = 0; i < normalizedSortProperties.length; i++) {
+ let [prop, direction] = normalizedSortProperties[i];
+ let result = compare(get(itemA, prop), get(itemB, prop));
+ if (result !== 0) {
+ return (direction === 'desc') ? (-1 * result) : result;
+ }
+ }
- return cp.property(`${itemsKey}.[]`, `${sortPropertiesKey}.[]`).readOnly();
+ return 0;
+ }));
} | [BUGFIX beta] Fix a regression in Ember.computed.sort
Resolves a regression in `Ember.computed.sort` that has existed since
<I>. The regression occurred when there were multiple instances of a
class using `Ember.computed.sort` and caused the sorted value to stop
updating.
Closes #<I>.
Closes #<I>.
Closes #<I>.
Closes #<I>. | emberjs_ember.js | train |
536b9c89c1ea4235690bd485c8f58c9a5b202f5f | diff --git a/cluster/container.go b/cluster/container.go
index <HASH>..<HASH> 100644
--- a/cluster/container.go
+++ b/cluster/container.go
@@ -70,6 +70,14 @@ func (c *Cluster) ListContainers(opts *dcli.ListContainersOptions) ([]docker.API
}
}
+// RemoveContainer removes a container from the cluster.
+func (c *Cluster) RemoveContainer(id string) error {
+ _, err := c.runOnNodes(func(n node) (*docker.Container, error) {
+ return nil, n.RemoveContainer(id)
+ })
+ return err
+}
+
type containerFunc func(node) (*docker.Container, error)
func (c *Cluster) runOnNodes(fn containerFunc) (*docker.Container, error) {
diff --git a/cluster/container_test.go b/cluster/container_test.go
index <HASH>..<HASH> 100644
--- a/cluster/container_test.go
+++ b/cluster/container_test.go
@@ -262,3 +262,31 @@ func TestListContainersFailure(t *testing.T) {
t.Errorf("ListContainers: Want %#v. Got %#v.", expected, containers)
}
}
+
+func TestRemoveContainer(t *testing.T) {
+ var called bool
+ server1 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ http.Error(w, "No such container", http.StatusNotFound)
+ }))
+ defer server1.Close()
+ server2 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ called = true
+ w.Write([]byte("ok"))
+ }))
+ defer server2.Close()
+ cluster, err := New(
+ Node{ID: "handler0", Address: server1.URL},
+ Node{ID: "handler1", Address: server2.URL},
+ )
+ if err != nil {
+ t.Fatal(err)
+ }
+ id := "abc123"
+ err = cluster.RemoveContainer(id)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !called {
+ t.Errorf("RemoveContainer(%q): Did not call node HTTP server", id)
+ }
+} | cluster: add RemoveContainer method
This method will remove containers from the cluster. | tsuru_docker-cluster | train |
3c4cbb15598c0afabe769ef8e9b4e081e4227bc6 | diff --git a/source/rafcon/mvc/models/state_machine.py b/source/rafcon/mvc/models/state_machine.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/mvc/models/state_machine.py
+++ b/source/rafcon/mvc/models/state_machine.py
@@ -6,6 +6,7 @@ from rafcon.mvc.selection import Selection
from rafcon.statemachine.state_machine import StateMachine
from rafcon.statemachine.states.container_state import ContainerState
+from rafcon.statemachine.states.library_state import LibraryState
from rafcon.mvc.config import global_gui_config
from rafcon.utils.vividict import Vividict
@@ -64,6 +65,7 @@ class StateMachineModel(ModelMT):
self.meta = Vividict()
self.meta_signal = Signal()
self.state_meta_signal = Signal()
+ self.suppress_new_root_state_model_one_time = False
self.temp = Vividict()
@@ -157,6 +159,22 @@ class StateMachineModel(ModelMT):
assert False
return current_state_model
+ @ModelMT.observe("state_machine", after=True)
+ def root_state_assign(self, model, prop_name, info):
+ if info.method_name != 'root_state':
+ return
+ if self.suppress_new_root_state_model_one_time:
+ self.suppress_new_root_state_model_one_time = False
+ return
+ # print "ASSIGN ROOT_STATE", model, prop_name, info
+ self.root_state.unregister_observer(self)
+ if isinstance(self.state_machine.root_state, ContainerState): # could not be a LibraryState
+ self.root_state = ContainerStateModel(self.state_machine.root_state)
+ else:
+ assert not isinstance(self.state_machine.root_state, LibraryState)
+ self.root_state = StateModel(self.state_machine.root_state)
+ self.root_state.register_observer(self)
+
@ModelMT.observe("state_machine", after=True, before=True)
def change_root_state_type(self, model, prop_name, info):
if info.method_name != 'change_root_state_type':
@@ -176,6 +194,7 @@ class StateMachineModel(ModelMT):
# Extract child models of state, as they have to be applied to the new state model
child_models = statemachine_helper.extract_child_models_of_of_state(state_m, new_state_class)
self.change_root_state_type.__func__.child_models = child_models # static variable of class method
+ self.suppress_new_root_state_model_one_time = True
# After the state has been changed in the core, we create a new model for it with all information extracted
# from the old state model | introduce that state-machine-model creates new root state model by default if new root_state is assigned
- the state-type-change-functions use a flake to suspend the new model for one assignment | DLR-RM_RAFCON | train |
b898d195be7a1bd27aca0fb02f610c15e67049d6 | diff --git a/lib/events-view.js b/lib/events-view.js
index <HASH>..<HASH> 100644
--- a/lib/events-view.js
+++ b/lib/events-view.js
@@ -18,9 +18,16 @@ class EventsView extends View {
filterEvents () {
const searchText = this.searchBox.value.toLowerCase()
- this.children.forEach((child) => {
- child.filter(searchText)
- })
+ if (searchText) {
+ this.children.forEach((child) => {
+ child.filter(searchText)
+ })
+ } else {
+ this.children.forEach((child) => {
+ child.show()
+ child.collapse()
+ })
+ }
}
loadEvents () { | Show/collapse children when no search text is present | electron_devtron | train |
e93ec078dc466b577c463e96c5d637335a344314 | diff --git a/cmd/ilm/options.go b/cmd/ilm/options.go
index <HASH>..<HASH> 100644
--- a/cmd/ilm/options.go
+++ b/cmd/ilm/options.go
@@ -120,7 +120,7 @@ type LifecycleOptions struct {
// ToConfig create lifecycle.Configuration based on LifecycleOptions
func (opts LifecycleOptions) ToConfig(config *lifecycle.Configuration) (*lifecycle.Configuration, *probe.Error) {
- expiry, err := parseExpiry(opts.ExpiryDate, opts.ExpiryDays)
+ expiry, err := parseExpiry(opts.ExpiryDate, opts.ExpiryDays, opts.ExpiredObjectDeleteMarker)
if err != nil {
return nil, err.Trace(opts.ExpiryDate, opts.ExpiryDays)
}
diff --git a/cmd/ilm/parse.go b/cmd/ilm/parse.go
index <HASH>..<HASH> 100644
--- a/cmd/ilm/parse.go
+++ b/cmd/ilm/parse.go
@@ -160,7 +160,7 @@ func parseTransition(storageClass, transitionDateStr, transitionDayStr string) (
}
// Returns lifecycleExpiration to be included in lifecycleRule
-func parseExpiry(expiryDateStr, expiryDayStr string) (lfcExp lifecycle.Expiration, err *probe.Error) {
+func parseExpiry(expiryDateStr, expiryDayStr string, expiredDeleteMarker bool) (lfcExp lifecycle.Expiration, err *probe.Error) {
if expiryDateStr != "" {
date, e := time.Parse(defaultILMDateFormat, expiryDateStr)
if e != nil {
@@ -183,5 +183,9 @@ func parseExpiry(expiryDateStr, expiryDayStr string) (lfcExp lifecycle.Expiratio
lfcExp.Days = lifecycle.ExpirationDays(days)
}
+ if expiredDeleteMarker {
+ lfcExp.DeleteMarker = true
+ }
+
return lfcExp, nil
} | ilm: Fix adding ExpiredObjectDeleteMarker alone (#<I>)
It is not possible to add rule with only ExpiredObjectDeleteMarker
activated. This commit fixes the issue. | minio_mc | train |
70ecf87d905f008642bf03a3f8e9c3c21278d245 | diff --git a/main/server/index.js b/main/server/index.js
index <HASH>..<HASH> 100644
--- a/main/server/index.js
+++ b/main/server/index.js
@@ -83,8 +83,7 @@ module.exports = Promise.resolve()
process.env.ENDPOINT = process.env.ENDPOINT ||
url.format({
protocol: protocol,
- hostname: process.env.HOST_NAME,
- port: process.env.PORT
+ hostname: process.env.HOST_NAME
});
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
diff --git a/services/boot/google.js b/services/boot/google.js
index <HASH>..<HASH> 100644
--- a/services/boot/google.js
+++ b/services/boot/google.js
@@ -28,11 +28,11 @@ module.exports = function(app) {
if (!secretConfig) {
secretConfig = app.secrets.get('google');
}
-
+
//---------------------------------------------------
// Recaptcha
- if (config.recaptcha) {
+ if (!config.recaptcha) {
console.warn('Could not find Recaptcha configuration');
return;
}
diff --git a/services/models/process.json b/services/models/process.json
index <HASH>..<HASH> 100644
--- a/services/models/process.json
+++ b/services/models/process.json
@@ -37,7 +37,6 @@
"keys": {
"host": 1,
"ip": 1,
- "email": 1,
"email": 1
},
"options": { | make adjustments to use correct endpoint and other configs | agneta_platform | train |
c25be740615dfa1eefb106f8234b8f499bffa8ad | diff --git a/command/agent/config.go b/command/agent/config.go
index <HASH>..<HASH> 100644
--- a/command/agent/config.go
+++ b/command/agent/config.go
@@ -259,6 +259,8 @@ type Addresses struct {
Serf string `mapstructure:"serf"`
}
+// Interfaces provides an alternative to the Addresses configuration. We pick an
+// ip configured on the devide specified and use that to bind.
type Interfaces struct {
HTTP string `mapstructure:"http"`
RPC string `mapstructure:"rpc"`
diff --git a/command/agent/util.go b/command/agent/util.go
index <HASH>..<HASH> 100644
--- a/command/agent/util.go
+++ b/command/agent/util.go
@@ -25,7 +25,10 @@ func ipOfDevice(name string) (net.IP, error) {
if len(addrs) == 0 {
return nil, fmt.Errorf("no ips were detected on the interface: %v", name)
}
- var ipv4Addrs []net.IP
+
+ // Iterating through the IPs configured for that device and returning the
+ // the first ipv4 address configured. If no ipv4 addresses are configured,
+ // we return the first ipv6 addr if any ipv6 addr is configured.
var ipv6Addrs []net.IP
for _, addr := range addrs {
var ip net.IP
@@ -33,8 +36,7 @@ func ipOfDevice(name string) (net.IP, error) {
case *net.IPNet:
ip = v.IP
if ip.To4() != nil {
- ipv4Addrs = append(ipv4Addrs, ip)
- continue
+ return ip, nil
}
if ip.To16() != nil {
ipv6Addrs = append(ipv6Addrs, ip)
@@ -44,9 +46,6 @@ func ipOfDevice(name string) (net.IP, error) {
continue
}
}
- if len(ipv4Addrs) > 0 {
- return ipv4Addrs[0], nil
- }
if len(ipv6Addrs) > 0 {
return ipv6Addrs[0], nil
} | Added a comment about iteration of ips in a network device | hashicorp_nomad | train |
62f2540511082e7ef4f16b25add4a4415e80c437 | diff --git a/db/migrate/20101217113425_translate_page_plugin.rb b/db/migrate/20101217113425_translate_page_plugin.rb
index <HASH>..<HASH> 100644
--- a/db/migrate/20101217113425_translate_page_plugin.rb
+++ b/db/migrate/20101217113425_translate_page_plugin.rb
@@ -14,6 +14,10 @@ class TranslatePagePlugin < ActiveRecord::Migration
}, {
:migrate_data => true
})
+
+ if (seed_file = Rails.root.join('db', 'seeds', 'pages.rb')).file?
+ load seed_file.to_s unless Page.where(:link_url => '/').any?
+ end
end
def self.down
diff --git a/pages/db/migrate/20101214040815_translate_page_plugin.rb b/pages/db/migrate/20101214040815_translate_page_plugin.rb
index <HASH>..<HASH> 100644
--- a/pages/db/migrate/20101214040815_translate_page_plugin.rb
+++ b/pages/db/migrate/20101214040815_translate_page_plugin.rb
@@ -1,15 +1,27 @@
class TranslatePagePlugin < ActiveRecord::Migration
def self.up
- Page.create_translation_table! :title => :string,
- :meta_keywords => :string,
- :meta_description => :text,
- :browser_title => :string
+ PagePart.create_translation_table!({
+ :body => :text
+ }, {
+ :migrate_data => true
+ })
- PagePart.create_translation_table! :body => :text
+ Page.create_translation_table!({
+ :title => :string,
+ :meta_keywords => :string,
+ :meta_description => :text,
+ :browser_title => :string
+ }, {
+ :migrate_data => true
+ })
+
+ if (seed_file = Rails.root.join('db', 'seeds', 'pages.rb')).file?
+ load seed_file.to_s unless Page.where(:link_url => '/').any?
+ end
end
def self.down
- Page.drop_translation_table!
- PagePart.drop_translation_table!
+ Page.drop_translation_table! :migrate_data => true
+ PagePart.drop_translation_table! :migrate_data => true
end
end | Ensure migrations load seeds where there have been no seeds before (aka no home page). | refinery_refinerycms | train |
5f6e55da54dd22008641868d9521bc53ce072dc7 | diff --git a/runtests.py b/runtests.py
index <HASH>..<HASH> 100644
--- a/runtests.py
+++ b/runtests.py
@@ -11,6 +11,7 @@ DEFAULT_SETTINGS = dict(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
+ "django.contrib.sessions",
"django.contrib.sites",
"pinax.waitinglist",
"pinax.waitinglist.tests" | added django.contrib.sessions to runtests.py INSTALLED_APPS | pinax_pinax-waitinglist | train |
472b388df84eb278ee4df3e6c34371430fe9a8b4 | diff --git a/axlsx.gemspec b/axlsx.gemspec
index <HASH>..<HASH> 100644
--- a/axlsx.gemspec
+++ b/axlsx.gemspec
@@ -19,8 +19,6 @@ Gem::Specification.new do |s|
s.test_files = Dir.glob("{test/**/*}")
s.add_runtime_dependency 'nokogiri', '>= 1.4.1'
- s.add_runtime_dependency 'activesupport', '>= 2.3.9'
- s.add_runtime_dependency 'i18n', '>= 0.4.1'
s.add_runtime_dependency 'rmagick', '>= 2.12.2'
s.add_runtime_dependency 'rubyzip', '~> 0.9'
diff --git a/lib/axlsx.rb b/lib/axlsx.rb
index <HASH>..<HASH> 100644
--- a/lib/axlsx.rb
+++ b/lib/axlsx.rb
@@ -1,7 +1,4 @@
# encoding: UTF-8
-Encoding::default_internal = 'UTF-8' unless RUBY_VERSION < '1.9'
-Encoding::default_external = 'UTF-8' unless RUBY_VERSION < '1.9'
-
require 'axlsx/version.rb'
require 'axlsx/util/simple_typed_list.rb'
@@ -28,8 +25,6 @@ require 'axlsx/package.rb'
#required gems
require 'nokogiri'
-require 'active_support/core_ext/object/instance_variables'
-require 'active_support/inflector'
require 'RMagick'
require 'zip/zip'
@@ -37,6 +32,12 @@ require 'zip/zip'
require 'bigdecimal'
require 'time'
+#if object does not have this already, I am borrowing it from active_support.
+
+ Object.send :define_method, :instance_values do
+ Hash[instance_variables.map { |name| [name.to_s[1..-1], instance_variable_get(name)] }]
+ end unless Object.respond_to?(:instance_values)
+
# xlsx generation with charts, images, automated column width, customizable styles and full schema validation. Axlsx excels at helping you generate beautiful Office Open XML Spreadsheet documents without having to understand the entire ECMA specification. Check out the README for some examples of how easy it is. Best of all, you can validate your xlsx file before serialization so you know for sure that anything generated is going to load on your client's machine.
module Axlsx
diff --git a/lib/axlsx/stylesheet/styles.rb b/lib/axlsx/stylesheet/styles.rb
index <HASH>..<HASH> 100644
--- a/lib/axlsx/stylesheet/styles.rb
+++ b/lib/axlsx/stylesheet/styles.rb
@@ -253,22 +253,22 @@ module Axlsx
# Creates the default set of styles the exel requires to be valid as well as setting up the
# Axlsx::STYLE_THIN_BORDER
def load_default_styles
- @numFmts = SimpleTypedList.new NumFmt
+ @numFmts = SimpleTypedList.new NumFmt, 'numFmts'
@numFmts << NumFmt.new(:numFmtId => NUM_FMT_YYYYMMDD, :formatCode=> "yyyy/mm/dd")
@numFmts << NumFmt.new(:numFmtId => NUM_FMT_YYYYMMDDHHMMSS, :formatCode=> "yyyy/mm/dd hh:mm:ss")
@numFmts.lock
- @fonts = SimpleTypedList.new Font
+ @fonts = SimpleTypedList.new Font, 'fonts'
@fonts << Font.new(:name => "Arial", :sz => 11, :family=>1)
@fonts.lock
- @fills = SimpleTypedList.new Fill
+ @fills = SimpleTypedList.new Fill, 'fills'
@fills << Fill.new(Axlsx::PatternFill.new(:patternType=>:none))
@fills << Fill.new(Axlsx::PatternFill.new(:patternType=>:gray125))
@fills.lock
- @borders = SimpleTypedList.new Border
+ @borders = SimpleTypedList.new Border, 'borders'
@borders << Border.new
black_border = Border.new
[:left, :right, :top, :bottom].each do |item|
@@ -281,7 +281,7 @@ module Axlsx
@cellStyleXfs << Xf.new(:borderId=>0, :numFmtId=>0, :fontId=>0, :fillId=>0)
@cellStyleXfs.lock
- @cellStyles = SimpleTypedList.new CellStyle
+ @cellStyles = SimpleTypedList.new CellStyle, 'cellStyles'
@cellStyles << CellStyle.new(:name =>"Normal", :builtinId =>0, :xfId=>0)
@cellStyles.lock
diff --git a/lib/axlsx/util/simple_typed_list.rb b/lib/axlsx/util/simple_typed_list.rb
index <HASH>..<HASH> 100644
--- a/lib/axlsx/util/simple_typed_list.rb
+++ b/lib/axlsx/util/simple_typed_list.rb
@@ -154,7 +154,7 @@ module Axlsx
# @return [String]
def to_xml(xml)
classname = @allowed_types[0].name.split('::').last
- el_name = serialize_as || (classname[0,1].downcase + classname[1..-1]).pluralize
+ el_name = serialize_as || (classname[0,1].downcase + classname[1..-1])
xml.send(el_name, :count=>@list.size) {
@list.each { |item| item.to_xml(xml) }
} | Remove entirely the dependancies on i<I>n and active support. | randym_axlsx | train |
67672c531ac06f2d9bfae14080443a84e77604ac | diff --git a/src/QueryController.php b/src/QueryController.php
index <HASH>..<HASH> 100644
--- a/src/QueryController.php
+++ b/src/QueryController.php
@@ -4,6 +4,7 @@ namespace Simples\Controller;
use Simples\Http\Controller;
use Simples\Http\Response;
+use Simples\Persistence\QueryBuilder;
/**
* Class QueryController
@@ -12,6 +13,53 @@ use Simples\Http\Response;
abstract class QueryController extends Controller
{
/**
+ * @var QueryBuilder
+ */
+ protected $queryBuilder;
+
+ /**
+ * @inheritDoc
+ */
+ public function __construct(QueryBuilder $queryBuilder)
+ {
+ $this->queryBuilder = $queryBuilder;
+ }
+
+ /**
+ * @param string $source
+ * @param string $key
+ * @return string
+ */
+ protected function notDestroyed(string $source, string $key = '_destroyed_at')
+ {
+ return "({$this->queryBuilder->getDriver()::blank("`{$source}`.`{$key}`")})";
+ }
+
+ /**
+ * @param string $alias
+ * @param string $source
+ * @param string $field
+ * @param string $where
+ * @param string $group
+ * @return string
+ */
+ protected function subQuery($alias, $source, $field, $where = null, $group = null)
+ {
+ $pieces = [];
+ $pieces[] = "(";
+ $pieces[] = "SELECT {$field}";
+ $pieces[] = "FROM `{$source}` AS `{$alias}`";
+ if ($where) {
+ $pieces[] = "WHERE {$where}";
+ }
+ if ($group) {
+ $pieces[] = "GROUP BY {$group}";
+ }
+ $pieces[] = ")";
+ return implode(' ', $pieces);
+ }
+
+ /**
* @param mixed $content (null)
* @param array $meta
* @param int $code
@@ -23,4 +71,4 @@ abstract class QueryController extends Controller
->response()
->api($content, $code, $meta);
}
-}
\ No newline at end of file
+} | QueryBuilder on QueryController | phpzm_controller | train |
7ef62287d91833c4decb58cd7ed4a15b12dcbf7f | diff --git a/validator/sawtooth_validator/consensus/handlers.py b/validator/sawtooth_validator/consensus/handlers.py
index <HASH>..<HASH> 100644
--- a/validator/sawtooth_validator/consensus/handlers.py
+++ b/validator/sawtooth_validator/consensus/handlers.py
@@ -53,11 +53,13 @@ class ConsensusServiceHandler(Handler):
request_type,
response_class,
response_type,
+ handler_status=HandlerStatus.RETURN
):
self._request_class = request_class
self._request_type = request_type
self._response_class = response_class
self._response_type = response_type
+ self._handler_status = handler_status
def handle_request(self, request, response):
raise NotImplementedError()
@@ -91,7 +93,7 @@ class ConsensusServiceHandler(Handler):
self.handle_request(request, response)
return HandlerResult(
- status=HandlerStatus.RETURN,
+ status=self._handler_status,
message_out=response,
message_type=self._response_type) | Allow consensus handlers to specifier result type
Allow the consensus handlers to specify return type, so that they can be part of an
a handler chain. | hyperledger_sawtooth-core | train |
185b88f9c1fed1f6171dbf2968fe9c6e24027fad | diff --git a/lib/the86-client/conversation.rb b/lib/the86-client/conversation.rb
index <HASH>..<HASH> 100644
--- a/lib/the86-client/conversation.rb
+++ b/lib/the86-client/conversation.rb
@@ -5,6 +5,7 @@ module The86::Client
attribute :content, String # For creating new Conversation.
attribute :bumped_at, DateTime
attribute :created_at, DateTime
+ attribute :original_created_at, DateTime
attribute :updated_at, DateTime
path "conversations"
diff --git a/lib/the86-client/post.rb b/lib/the86-client/post.rb
index <HASH>..<HASH> 100644
--- a/lib/the86-client/post.rb
+++ b/lib/the86-client/post.rb
@@ -6,6 +6,7 @@ module The86::Client
attribute :content_html, String
attribute :in_reply_to_id, Integer
attribute :created_at, DateTime
+ attribute :original_created_at, DateTime
attribute :updated_at, DateTime
path "posts" | Add original_created_at for the OSQA migration | sitepoint_the86-client | train |
6f5cfff6dc1e1520918c8a5006afdce00b13c454 | diff --git a/lib/review/book.rb b/lib/review/book.rb
index <HASH>..<HASH> 100644
--- a/lib/review/book.rb
+++ b/lib/review/book.rb
@@ -16,7 +16,6 @@ require 'forwardable'
require 'nkf'
require 'review/book/base'
-require 'review/book/chapter_set'
require 'review/book/chapter'
require 'review/book/part'
require 'review/book/parameters' | Remove unwanted ChapterSet class rquire. Ref #<I> | kmuto_review | train |
69a3fa07f290a6d26a7f1ad1bc9090d4047bc99d | diff --git a/packages/cli/src/commands/run.js b/packages/cli/src/commands/run.js
index <HASH>..<HASH> 100644
--- a/packages/cli/src/commands/run.js
+++ b/packages/cli/src/commands/run.js
@@ -58,6 +58,8 @@ const run = async (command, config, declaration) => {
spinner.start();
});
runner.on(WorkerRunnerEvents.STARTED, () => {
+ // Purge process output stream
+ console.log(String());
spinner.stop();
info('Worker is up!');
});
@@ -66,6 +68,8 @@ const run = async (command, config, declaration) => {
spinner.start();
});
runner.on(WorkerRunnerEvents.RELOADED, () => {
+ // Purge process output stream
+ console.log(String());
spinner.stop();
info('Worker is up!');
}); | fix(log): purge process output stream before spinner stop
affects: @zetapush/cli
ISSUES CLOSED: #<I> | zetapush_zetapush | train |
2e3d325f71285f9812a1ddcffae21b28b648a8a5 | diff --git a/octodns/provider/yaml.py b/octodns/provider/yaml.py
index <HASH>..<HASH> 100644
--- a/octodns/provider/yaml.py
+++ b/octodns/provider/yaml.py
@@ -111,26 +111,6 @@ class YamlProvider(BaseProvider):
SUPPORTS_DYNAMIC = True
SUPPORTS_POOL_VALUE_STATUS = True
SUPPORTS_MULTIVALUE_PTR = True
- SUPPORTS = set(
- (
- 'A',
- 'AAAA',
- 'ALIAS',
- 'CAA',
- 'CNAME',
- 'DNAME',
- 'LOC',
- 'MX',
- 'NAPTR',
- 'NS',
- 'PTR',
- 'SSHFP',
- 'SPF',
- 'SRV',
- 'TXT',
- 'URLFWD',
- )
- )
def __init__(
self,
@@ -167,6 +147,14 @@ class YamlProvider(BaseProvider):
del args['log']
return self.__class__(**args)
+ @property
+ def SUPPORTS(self):
+ # The yaml provider supports all record types even those defined by 3rd
+ # party modules that we know nothing about, thus we dynamically return
+ # the types list that is registered in Record, everything that's know as
+ # of the point in time we're asked
+ return set(Record.registered_types().keys())
+
def supports(self, record):
# We're overriding this as a performance tweak, namely to avoid calling
# the implementation of the SUPPORTS property to create a set from a
diff --git a/octodns/record/__init__.py b/octodns/record/__init__.py
index <HASH>..<HASH> 100644
--- a/octodns/record/__init__.py
+++ b/octodns/record/__init__.py
@@ -103,6 +103,10 @@ class Record(EqualityTupleMixin):
cls._CLASSES[_type] = _class
@classmethod
+ def registered_types(cls):
+ return cls._CLASSES
+
+ @classmethod
def new(cls, zone, name, data, source=None, lenient=False):
name = str(name).lower()
fqdn = f'{name}.{zone.name}' if name else zone.name
diff --git a/tests/test_octodns_provider_yaml.py b/tests/test_octodns_provider_yaml.py
index <HASH>..<HASH> 100644
--- a/tests/test_octodns_provider_yaml.py
+++ b/tests/test_octodns_provider_yaml.py
@@ -15,7 +15,7 @@ from unittest import TestCase
from yaml import safe_load
from yaml.constructor import ConstructorError
-from octodns.record import Create
+from octodns.record import _NsValue, Create, Record, ValuesMixin
from octodns.provider.base import Plan
from octodns.provider.yaml import (
_list_all_yaml_files,
@@ -217,7 +217,23 @@ class TestYamlProvider(TestCase):
str(ctx.exception),
)
- def test_supports_everything(self):
+ def test_SUPPORTS(self):
+ source = YamlProvider('test', join(dirname(__file__), 'config'))
+ # make sure the provider supports all the registered types
+ self.assertEqual(Record.registered_types().keys(), source.SUPPORTS)
+
+ class YamlRecord(ValuesMixin, Record):
+ _type = 'YAML'
+ _value_type = _NsValue
+
+ # don't know anything about a yaml type
+ self.assertTrue('YAML' not in source.SUPPORTS)
+ # register it
+ Record.register_type(YamlRecord)
+ # when asked again we'll now include it in our list of supports
+ self.assertTrue('YAML' in source.SUPPORTS)
+
+ def test_supports(self):
source = YamlProvider('test', join(dirname(__file__), 'config'))
class DummyType(object):
diff --git a/tests/test_octodns_record.py b/tests/test_octodns_record.py
index <HASH>..<HASH> 100644
--- a/tests/test_octodns_record.py
+++ b/tests/test_octodns_record.py
@@ -69,6 +69,8 @@ class TestRecord(TestCase):
_type = 'AA'
_value_type = _NsValue
+ self.assertTrue('AA' not in Record.registered_types())
+
Record.register_type(AaRecord)
aa = Record.new(
self.zone,
@@ -77,6 +79,8 @@ class TestRecord(TestCase):
)
self.assertEqual(AaRecord, aa.__class__)
+ self.assertTrue('AA' in Record.registered_types())
+
def test_lowering(self):
record = ARecord(
self.zone, 'MiXeDcAsE', {'ttl': 30, 'type': 'A', 'value': '1.2.3.4'} | YamlProvider.SUPPORTS dynamically returns the list of registered types | github_octodns | train |
b504387f4cb5c3f93ee47e33d3fb5ab96236d48d | diff --git a/globus_cli/commands/login.py b/globus_cli/commands/login.py
index <HASH>..<HASH> 100644
--- a/globus_cli/commands/login.py
+++ b/globus_cli/commands/login.py
@@ -121,6 +121,12 @@ def do_local_server_login_flow():
Starts a local http server, opens a browser to have the user login,
and gets the code redirected to the server (no copy and pasting required)
"""
+ safeprint(
+ "You are running 'globus login', which should automatically open "
+ "a browser window for you to login.\n"
+ "If this fails or you experience difficulty, try "
+ "'globus login --no-local-server'"
+ "\n---")
# start local server and create matching redirect_uri
with start_local_server(listen=('127.0.0.1', 0)) as server:
_, port = server.socket.getsockname() | Update login command to print more help
If the user is doing local login, we might have issues with opening the
magic browser. Print a message about running with `--no-local-server` | globus_globus-cli | train |
bd6f9b0e6243a3846e52f2d3cd511be0d784d579 | diff --git a/server/src/com/thoughtworks/go/server/cache/GoCache.java b/server/src/com/thoughtworks/go/server/cache/GoCache.java
index <HASH>..<HASH> 100644
--- a/server/src/com/thoughtworks/go/server/cache/GoCache.java
+++ b/server/src/com/thoughtworks/go/server/cache/GoCache.java
@@ -201,9 +201,9 @@ public class GoCache {
}
public void removeAssociations(String key, Element element) {
- if (element.getValue() instanceof KeyList) {
+ if (element.getObjectValue() instanceof KeyList) {
synchronized (key.intern()) {
- for (String subkey : (KeyList) element.getValue()) {
+ for (String subkey : (KeyList) element.getObjectValue()) {
remove(compositeKey(key, subkey));
}
}
@@ -216,7 +216,7 @@ public class GoCache {
if (parent == null) {
return;
}
- GoCache.KeyList subKeys = (GoCache.KeyList) parent.getValue();
+ GoCache.KeyList subKeys = (GoCache.KeyList) parent.getObjectValue();
subKeys.remove(childKey);
}
}
diff --git a/server/test/integration/com/thoughtworks/go/server/cache/GoCacheTest.java b/server/test/integration/com/thoughtworks/go/server/cache/GoCacheTest.java
index <HASH>..<HASH> 100644
--- a/server/test/integration/com/thoughtworks/go/server/cache/GoCacheTest.java
+++ b/server/test/integration/com/thoughtworks/go/server/cache/GoCacheTest.java
@@ -87,7 +87,7 @@ public class GoCacheTest {
@Before
public void setUp() throws Exception {
- if(originalMaxElementsInMemory == 0 ){
+ if (originalMaxElementsInMemory == 0) {
originalMaxElementsInMemory = goCache.configuration().getMaxElementsInMemory();
originalTimeToLiveSeconds = goCache.configuration().getTimeToLiveSeconds();
}
@@ -343,7 +343,7 @@ public class GoCacheTest {
}
@Test
- public void shouldEvictAllSubkeyCacheEntriesCacheWhenTheParentEntryGetsEvicted() throws InterruptedException {
+ public void shouldEvictAllSubkeyCacheEntriesWhenTheParentEntryGetsEvicted() throws InterruptedException {
goCache.configuration().setMaxElementsInMemory(3);
String parentKey = "parent";
goCache.put(parentKey, new GoCache.KeyList());
@@ -356,4 +356,17 @@ public class GoCacheTest {
assertThat(((String) goCache.get("unrelatedkey")), is("value"));
}
+ @Test
+ public void shouldHandleNonSerializableValuesDuringEviction() throws InterruptedException {
+ goCache.configuration().setMaxElementsInMemory(1);
+ NonSerializableClass value = new NonSerializableClass();
+ String key = "key";
+ goCache.put(key, value);
+ Thread.sleep(1);//so that the timestamps on the cache entries are different
+ goCache.put("another_entry", "value");
+ assertThat(goCache.get(key), is(nullValue()));
+ }
+
+ private class NonSerializableClass {
+ }
} | fixing serilization exception that is thrown during cache eviction | gocd_gocd | train |
7a7dbd77d143cedd84c5e60f271f26b5e62fface | diff --git a/lib/completionlib.php b/lib/completionlib.php
index <HASH>..<HASH> 100644
--- a/lib/completionlib.php
+++ b/lib/completionlib.php
@@ -213,7 +213,7 @@ class completion_info {
* @return unknown
*/
function internal_get_state($cm, $userid, $current) {
- global $USER, $DB;
+ global $USER, $DB, $CFG;
// Get user ID
if (!$userid) {
@@ -236,6 +236,7 @@ class completion_info {
// Check grade
if (!is_null($cm->completiongradeitemnumber)) {
+ require_once($CFG->libdir.'/gradelib.php');
$item = grade_item::fetch(array('courseid'=>$cm->course, 'itemtype'=>'mod',
'itemmodule'=>$cm->modname, 'iteminstance'=>$cm->instance,
'itemnumber'=>$cm->completiongradeitemnumber)); | MDL-<I>: Completion system does not require_once gradelib when used | moodle_moodle | train |
ff648b6d84de332ed3bb3d8a37ed03d043f274ed | diff --git a/includes/general.php b/includes/general.php
index <HASH>..<HASH> 100644
--- a/includes/general.php
+++ b/includes/general.php
@@ -1058,7 +1058,9 @@ function pods_shortcode_run( $tags, $content = null ) {
$related = $pod->field( $tags['field'], array( 'output' => 'pods' ) );
/** @var Pods $rel_pod */
foreach ( $related as $rel_pod ) {
- $return .= $rel_pod->template( $tags['template'], $content );
+ if ( $rel_pod instanceof Pods ) {
+ $return .= $rel_pod->template( $tags['template'], $content );
+ }
}
} elseif ( empty( $tags['helper'] ) ) {
$return = $pod->display( $tags['field'] ); | Rel should be a Pods object | pods-framework_pods | train |
954f01c81b530b82577ac5074e88e45eff3de5d1 | diff --git a/scripts/snapshotResolver.js b/scripts/snapshotResolver.js
index <HASH>..<HASH> 100644
--- a/scripts/snapshotResolver.js
+++ b/scripts/snapshotResolver.js
@@ -5,8 +5,6 @@ const webpack = require('webpack');
const [webpackVersion] = webpack.version;
const snapshotExtension = `.snap.webpack${webpackVersion}`;
-console.log('Current webpack version:', webpackVersion);
-
module.exports = {
resolveSnapshotPath: (testPath) => path.join(path.dirname(testPath), '__snapshots__', `${path.basename(testPath)}${snapshotExtension}`),
resolveTestPath: (snapshotPath) => snapshotPath.replace(`${path.sep}__snapshots__`, '').slice(0, -snapshotExtension.length), | chore: remove version logging in each test (#<I>) | webpack_webpack-cli | train |
eb2ede5d801fb94fe1d6290fb08850582f379168 | diff --git a/hystrix-core/src/main/java/com/netflix/hystrix/metric/CumulativeThreadPoolEventCounterStream.java b/hystrix-core/src/main/java/com/netflix/hystrix/metric/CumulativeThreadPoolEventCounterStream.java
index <HASH>..<HASH> 100644
--- a/hystrix-core/src/main/java/com/netflix/hystrix/metric/CumulativeThreadPoolEventCounterStream.java
+++ b/hystrix-core/src/main/java/com/netflix/hystrix/metric/CumulativeThreadPoolEventCounterStream.java
@@ -63,10 +63,8 @@ public class CumulativeThreadPoolEventCounterStream extends BucketedCumulativeCo
new CumulativeThreadPoolEventCounterStream(threadPoolKey, numBuckets, bucketSizeInMs, reduceCommandCompletion, reduceBucket);
newStream.start();
streams.putIfAbsent(threadPoolKey.name(), newStream);
- System.out.println("Returning new stream : " + newStream);
return newStream;
} else {
- System.out.println("Returning existing stream : " + existingStream);
return existingStream;
}
} | Removing stray println | Netflix_Hystrix | train |
b7eda794159e4023bbceb1e07938660fa0afebe8 | diff --git a/src/bucket/media.js b/src/bucket/media.js
index <HASH>..<HASH> 100644
--- a/src/bucket/media.js
+++ b/src/bucket/media.js
@@ -21,6 +21,9 @@ const mediaMethods = (bucket_config) => ({
if (params.metadata) {
data.append('metadata', JSON.stringify(params.metadata))
}
+ if (params.trigger_webhook) {
+ data.append('trigger_webhook', params.trigger_webhook)
+ }
const getHeaders = ((form) => new Promise((resolve, reject) => {
if (params.media.buffer) {
form.getLength((err, length) => {
@@ -35,10 +38,10 @@ const mediaMethods = (bucket_config) => ({
)
return getHeaders(data)
.then((headers) => {
- headers["Authorization"] = `Bearer ${bucket_config.write_key}`;
+ headers.Authorization = `Bearer ${bucket_config.write_key}`
return requestHandler(HTTP_METHODS.POST, endpoint, data, headers)
}).catch((error) => {
- throw error.response.data
+ throw error.response.data
})
},
getMedia: (params) => {
@@ -66,13 +69,17 @@ const mediaMethods = (bucket_config) => ({
},
deleteMedia: (params) => {
const endpoint = `${URI}/buckets/${bucket_config.slug}/media/${params.id}`
- let headers;
+ let headers
if (bucket_config.write_key) {
headers = {
- "Authorization": `Bearer ${bucket_config.write_key}`
+ Authorization: `Bearer ${bucket_config.write_key}`
}
}
- return requestHandler(HTTP_METHODS.DELETE, endpoint, null, headers)
+ let data
+ if (params.trigger_webhook) {
+ data.trigger_webhook = params.trigger_webhook
+ }
+ return requestHandler(HTTP_METHODS.DELETE, endpoint, data, headers)
}
}) | fix: add / delete media trigger_webhook | cosmicjs_cosmicjs-node | train |
8a5588c51f2ccf62a47c7261419538d8b2f0a448 | diff --git a/spectra.js b/spectra.js
index <HASH>..<HASH> 100644
--- a/spectra.js
+++ b/spectra.js
@@ -196,13 +196,15 @@
var xyz, tc;
for (tc in rgb) {
- if (rgb[tc] > 0.04045) {
- rgb[tc] = Math.pow(((rgb[tc] + 0.055) / 1.055), 2.4);
- } else {
- rgb[tc] /= 12.92;
- }
+ if (rgb.hasOwnProperty(tc)) {
+ if (rgb[tc] > 0.04045) {
+ rgb[tc] = Math.pow(((rgb[tc] + 0.055) / 1.055), 2.4);
+ } else {
+ rgb[tc] /= 12.92;
+ }
- rgb[tc] = rgb[tc] * 100;
+ rgb[tc] = rgb[tc] * 100;
+ }
}
xyz = {
@@ -221,12 +223,14 @@
};
for (tc in xyz) {
- xyz2[tc] = xyz[tc] / white[tc];
+ if (xyz.hasOwnProperty(tc)) {
+ xyz2[tc] = xyz[tc] / white[tc];
- if (xyz2[tc] > 0.008856) {
- xyz2[tc] = Math.pow(xyz2[tc], (1 / 3));
- } else {
- xyz2[tc] = (7.787 * xyz2[tc]) + (16 / 116);
+ if (xyz2[tc] > 0.008856) {
+ xyz2[tc] = Math.pow(xyz2[tc], (1 / 3));
+ } else {
+ xyz2[tc] = (7.787 * xyz2[tc]) + (16 / 116);
+ }
}
} | Fix unfiltered for in loops. | avp_spectra | train |
d721b5739b4265f5afefd82db1d57258f1e42c56 | diff --git a/alot/commands/thread.py b/alot/commands/thread.py
index <HASH>..<HASH> 100644
--- a/alot/commands/thread.py
+++ b/alot/commands/thread.py
@@ -69,7 +69,7 @@ def determine_sender(mail, action='reply'):
for alias in acc_addresses:
if realname is not None:
break
- regex = re.compile(alias)
+ regex = re.compile(re.escape(alias))
for seen_name, seen_address in candidate_addresses:
if regex.match(seen_address):
logging.debug("match!: '%s' '%s'" % (seen_address, alias)) | Fix regexp treatment of alias addresses
Currently a regexp is created for every alias in configuration file. However
special characters are not escaped which causes a mismatch. Mail aliases are
string literals that may contain lots of special regexp chars and those
should be escaped before compiling a regexp.
Changed 'determine_sender' function to escape aliases before compiling
regexp. | pazz_alot | train |
1183037d56dc52075d6f061fb0a5579825f9d359 | diff --git a/api/nodes.go b/api/nodes.go
index <HASH>..<HASH> 100644
--- a/api/nodes.go
+++ b/api/nodes.go
@@ -131,6 +131,7 @@ type Node struct {
type HostStats struct {
Memory *HostMemoryStats
CPU []*HostCPUStats
+ Uptime uint64
}
type HostMemoryStats struct {
diff --git a/client/stats/host.go b/client/stats/host.go
index <HASH>..<HASH> 100644
--- a/client/stats/host.go
+++ b/client/stats/host.go
@@ -2,6 +2,7 @@ package stats
import (
"github.com/shirou/gopsutil/cpu"
+ "github.com/shirou/gopsutil/host"
"github.com/shirou/gopsutil/mem"
)
@@ -9,6 +10,7 @@ import (
type HostStats struct {
Memory *MemoryStats
CPU []*CPUStats
+ Uptime uint64
}
// MemoryStats represnts stats related to virtual memory usage
@@ -83,6 +85,9 @@ func (h *HostStatsCollector) Collect() (*HostStats, error) {
Memory: ms,
CPU: cs,
}
+ if uptime, err := host.Uptime(); err == nil {
+ hs.Uptime = uptime
+ }
return hs, nil
}
diff --git a/command/node_status.go b/command/node_status.go
index <HASH>..<HASH> 100644
--- a/command/node_status.go
+++ b/command/node_status.go
@@ -5,6 +5,7 @@ import (
"sort"
"strconv"
"strings"
+ "time"
"github.com/dustin/go-humanize"
@@ -59,6 +60,7 @@ func (c *NodeStatusCommand) Synopsis() string {
func (c *NodeStatusCommand) Run(args []string) int {
var short, verbose, list_allocs, self, stats bool
+ var hostStats *api.HostStats
flags := c.Meta.FlagSet("node-status", FlagSetClient)
flags.Usage = func() { c.Ui.Output(c.Help()) }
@@ -200,6 +202,12 @@ func (c *NodeStatusCommand) Run(args []string) int {
return 1
}
+ if stats {
+ if hostStats, err = client.Nodes().Stats(node.ID, nil); err != nil {
+ c.Ui.Error(fmt.Sprintf("error fetching node resource utilization stats: %v", err))
+ }
+ }
+
// Format the output
basic := []string{
fmt.Sprintf("ID|%s", limit(node.ID, length)),
@@ -209,6 +217,10 @@ func (c *NodeStatusCommand) Run(args []string) int {
fmt.Sprintf("Drain|%v", node.Drain),
fmt.Sprintf("Status|%s", node.Status),
}
+ if stats && hostStats != nil {
+ uptime := time.Duration(hostStats.Uptime * uint64(time.Second))
+ basic = append(basic, fmt.Sprintf("Uptime|%s", uptime.String()))
+ }
c.Ui.Output(formatKV(basic))
if !short {
@@ -219,15 +231,11 @@ func (c *NodeStatusCommand) Run(args []string) int {
}
c.Ui.Output("\n==> Resource Utilization")
c.Ui.Output(formatList(resources))
- if stats {
- if hostStats, err := client.Nodes().Stats(node.ID, nil); err == nil {
- c.Ui.Output("\n===> Node CPU Stats")
- c.printCpuStats(hostStats)
- c.Ui.Output("\n===> Node Memory Stats")
- c.printMemoryStats(hostStats)
- } else {
- c.Ui.Output(fmt.Sprintf("error getting node stats", err))
- }
+ if stats && hostStats != nil {
+ c.Ui.Output("\n===> Node CPU Stats")
+ c.printCpuStats(hostStats)
+ c.Ui.Output("\n===> Node Memory Stats")
+ c.printMemoryStats(hostStats)
}
allocs, err := getAllocs(client, node, length) | Added uptime to node stats | hashicorp_nomad | train |
c71cc8d895eac238390e38f48b087f7ba003febb | diff --git a/gopdf.go b/gopdf.go
index <HASH>..<HASH> 100644
--- a/gopdf.go
+++ b/gopdf.go
@@ -444,6 +444,8 @@ func (gp *GoPdf) MeasureTextWidth(text string) (float64, error) {
fontSize := gp.Curr.Font_Size
sfont := gp.Curr.Font_ISubset
+ sfont.AddChars(text) //AddChars for create CharacterToGlyphIndex
+
sumWidth := uint64(0)
//sum width of each rune.
for _, r := range text { | fixed MeasureTextWidth befor pdf.Cell caused char not found Error | signintech_gopdf | train |
2f32c4521383c9c2116377930855a0419cc4d7ee | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -34,6 +34,15 @@ import os
import shutil
from distutils.errors import DistutilsSetupError
+# Workaround for Python 2.6 issue https://bugs.python.org/issue15881
+# This causes this module to be referenced and prevents the premature
+# unloading and subsequent garbage collection causing the error.
+if sys.version_info[0:2] == (2, 6):
+ try:
+ import multiprocessing
+ except ImportError:
+ pass
+
import os_setup
from os_setup import shell, shell_check, import_setuptools | Added workaround for Python <I> for Python issue <I>. | pywbem_pywbem | train |
0942556248ad867bd71130c0b29142e7a5ba38b9 | diff --git a/lib/draper/version.rb b/lib/draper/version.rb
index <HASH>..<HASH> 100644
--- a/lib/draper/version.rb
+++ b/lib/draper/version.rb
@@ -1,3 +1,3 @@
module Draper
- VERSION = "0.5.0"
+ VERSION = "0.6.0"
end | Bump minor version for new ApplicationDecorator structure | drapergem_draper | train |
ee4985c574cbec7f33d9496ee7a771b695b026fd | diff --git a/lib/waterline/utils/query/forge-stage-two-query.js b/lib/waterline/utils/query/forge-stage-two-query.js
index <HASH>..<HASH> 100644
--- a/lib/waterline/utils/query/forge-stage-two-query.js
+++ b/lib/waterline/utils/query/forge-stage-two-query.js
@@ -340,6 +340,16 @@ module.exports = function forgeStageTwoQuery(query, orm) {
return;
}//-โข
+ // If trying to populate an association that is ALSO being omitted,
+ // then we say this is highly irregular.
+ if (_.contains(query.criteria.omit, populateAttrName)) {
+ throw flaverr('E_INVALID_POPULATES', new Error(
+ 'Could not populate `'+populateAttrName+'`. '+
+ 'This query also indicates that this attribute should be omitted. '+
+ 'Cannot populate AND omit an association at the same time!'
+ ));
+ }//-โข
+
// โฌ โโโโโโโฌโโ โฌ โฌโโโ โโโโโฆโโโฆโโฆโโ โโฆโโโโโโโ โโโโโโโฌโโ โโโโโโโโโโโโโโโโฌโโโโโฌโโฌโโโโโโ
// โ โ โโ โโโดโ โ โโโโ โ โโฃ โ โ โ โฆโ โโโโฃ โ โฃ โโค โ โโโฌโ โโโคโโโโโโโ โโ โโโโค โ โโ โโโโ | Don't allow populating AND omitting an attribute at the same time. | balderdashy_waterline | train |
865abf1ba43715af94bf29b8648d18519838ae69 | diff --git a/js/bootstrap-select.js b/js/bootstrap-select.js
index <HASH>..<HASH> 100644
--- a/js/bootstrap-select.js
+++ b/js/bootstrap-select.js
@@ -1567,13 +1567,13 @@
}
}
- for (var len = selectOptions.length; startIndex < len; startIndex++) {
- var item = selectOptions[startIndex];
+ for (var len = selectOptions.length, i = startIndex; i < len; i++) {
+ var item = selectOptions[i];
if (item.tagName !== 'OPTGROUP') {
addOption(item, {});
} else {
- addOptgroup(startIndex, selectOptions);
+ addOptgroup(i, selectOptions);
}
} | don't increase startIndex (to prevent issues with calculating "previous" in addOptgroup) | snapappointments_bootstrap-select | train |
f94c93e95c2e3806ef35843d8a267fea51841f7f | diff --git a/ber.go b/ber.go
index <HASH>..<HASH> 100644
--- a/ber.go
+++ b/ber.go
@@ -133,6 +133,7 @@ func readObject(ber []byte, offset int) (asn1Object, int, error) {
fmt.Printf("--> length bytes: %x\n", ber[offset:offset+numberOfBytes])
for i := 0; i < numberOfBytes; i++ {
length = length*0xFF + (int)(ber[offset])
+ length = length*256 + (int)(ber[offset])
offset++
}
} else if l == 0x80 {
@@ -141,7 +142,6 @@ func readObject(ber []byte, offset int) (asn1Object, int, error) {
if markerIndex == -1 {
return nil, 0, errors.New("ber2der: Invalid BER format")
}
- length = markerIndex - offset
fmt.Printf("--> Undefined Length Found: %d\n", length)
} else {
length = (int)(l)
@@ -163,7 +163,7 @@ func readObject(ber []byte, offset int) (asn1Object, int, error) {
for offset < contentEnd {
var subObj asn1Object
var err error
- subObj, offset, err = readObject(ber, offset)
+ subObj, offset, err = readObject(ber[:contentEnd], offset)
if err != nil {
return nil, 0, err
} | Fixes to BER decoding
- 0xFF != <I>
- markerIndex _is_ the length, since we the search slice starts at offset
- use a sub slice when reading sub objects | mastahyeti_cms | train |
c703a80b1c8f06e9b46df6fc6805d97b4c90e34b | diff --git a/bw-util-deployment/src/main/java/org/bedework/util/deployment/Process.java b/bw-util-deployment/src/main/java/org/bedework/util/deployment/Process.java
index <HASH>..<HASH> 100644
--- a/bw-util-deployment/src/main/java/org/bedework/util/deployment/Process.java
+++ b/bw-util-deployment/src/main/java/org/bedework/util/deployment/Process.java
@@ -138,6 +138,10 @@ public class Process extends AbstractMojo {
outDirPath = val;
}
+ public void setDeployDirPath(final String val) {
+ deployDirPath = val;
+ }
+
public void setWarsOnly(final boolean val) {
warsonly = val;
props.setProperty(propWarsOnly, String.valueOf(val)); | Reinstate method needed for maven plugin | Bedework_bw-util | train |
bd8cc1952d017decfd58413208378d459e4eba89 | diff --git a/src/Block/Service/AbstractCategoriesBlockService.php b/src/Block/Service/AbstractCategoriesBlockService.php
index <HASH>..<HASH> 100755
--- a/src/Block/Service/AbstractCategoriesBlockService.php
+++ b/src/Block/Service/AbstractCategoriesBlockService.php
@@ -27,6 +27,7 @@ use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\OptionsResolver\OptionsResolver;
+use Twig\Environment;
/**
* @author Christian Gripp <[email protected]>
diff --git a/src/Block/Service/AbstractCollectionsBlockService.php b/src/Block/Service/AbstractCollectionsBlockService.php
index <HASH>..<HASH> 100755
--- a/src/Block/Service/AbstractCollectionsBlockService.php
+++ b/src/Block/Service/AbstractCollectionsBlockService.php
@@ -27,6 +27,7 @@ use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\OptionsResolver\OptionsResolver;
+use Twig\Environment;
/**
* @author Christian Gripp <[email protected]>
diff --git a/src/Block/Service/AbstractTagsBlockService.php b/src/Block/Service/AbstractTagsBlockService.php
index <HASH>..<HASH> 100755
--- a/src/Block/Service/AbstractTagsBlockService.php
+++ b/src/Block/Service/AbstractTagsBlockService.php
@@ -27,6 +27,7 @@ use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\OptionsResolver\OptionsResolver;
+use Twig\Environment;
/**
* @author Christian Gripp <[email protected]> | Add missing import for PHPDoc in block services | sonata-project_SonataClassificationBundle | train |
fd5af2dfb8729407a33fb6aa0a4d9c6e35f70b21 | diff --git a/cli/src/main/java/org/jboss/as/cli/impl/CLIModelControllerClient.java b/cli/src/main/java/org/jboss/as/cli/impl/CLIModelControllerClient.java
index <HASH>..<HASH> 100644
--- a/cli/src/main/java/org/jboss/as/cli/impl/CLIModelControllerClient.java
+++ b/cli/src/main/java/org/jboss/as/cli/impl/CLIModelControllerClient.java
@@ -80,6 +80,7 @@ public class CLIModelControllerClient extends AbstractModelControllerClient {
try {
endpoint = Remoting.createEndpoint("cli-client", OptionMap.create(Options.THREAD_DAEMON, true));
endpoint.addConnectionProvider("remote", new RemoteConnectionProviderFactory(), OptionMap.EMPTY);
+ endpoint.addConnectionProvider("remoting", new RemoteConnectionProviderFactory(), OptionMap.EMPTY);
endpoint.addConnectionProvider("http-remoting", new HttpUpgradeConnectionProviderFactory(), OptionMap.create(Options.SSL_ENABLED, Boolean.FALSE));
endpoint.addConnectionProvider("https-remoting", new HttpUpgradeConnectionProviderFactory(), OptionMap.create(Options.SSL_ENABLED, Boolean.TRUE));
} catch (IOException e) { | [WFLY-<I>] Add support for remoting:// URIs
was: 5ffbd5a2eb5e0d0dc<I>bc1c<I>e<I> | wildfly_wildfly-core | train |
fb8c823243b6ab98a55c45af10eeac117cb7d50d | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -21,11 +21,10 @@ exports.levels = require('./levels');
* Exposes a sub-format on the main format object
* as a lazy-loaded getter.
*/
-function exposeFormat(name, path) {
- path = path || name;
+function exposeFormat(name, requireFormat) {
Object.defineProperty(format, name, {
get() {
- return require(`./${path}.js`);
+ return requireFormat();
},
configurable: true
});
@@ -34,20 +33,20 @@ function exposeFormat(name, path) {
//
// Setup all transports as lazy-loaded getters.
//
-exposeFormat('align');
-exposeFormat('errors');
-exposeFormat('cli');
-exposeFormat('combine');
-exposeFormat('colorize');
-exposeFormat('json');
-exposeFormat('label');
-exposeFormat('logstash');
-exposeFormat('metadata');
-exposeFormat('ms');
-exposeFormat('padLevels', 'pad-levels');
-exposeFormat('prettyPrint', 'pretty-print');
-exposeFormat('printf');
-exposeFormat('simple');
-exposeFormat('splat');
-exposeFormat('timestamp');
-exposeFormat('uncolorize');
+exposeFormat('align', function () { return require('./align') });
+exposeFormat('errors', function () { return require('./errors') });
+exposeFormat('cli', function () { return require('./cli') });
+exposeFormat('combine', function () { return require('./combine') });
+exposeFormat('colorize', function () { return require('./colorize') });
+exposeFormat('json', function () { return require('./json') });
+exposeFormat('label', function () { return require('./label') });
+exposeFormat('logstash', function () { return require('./logstash') });
+exposeFormat('metadata', function () { return require('./metadata') });
+exposeFormat('ms', function () { return require('./ms') });
+exposeFormat('padLevels', function () { return require('./pad-levels') });
+exposeFormat('prettyPrint', function () { return require('./pretty-print') });
+exposeFormat('printf', function () { return require('./printf') });
+exposeFormat('simple', function () { return require('./simple') });
+exposeFormat('splat', function () { return require('./splat') });
+exposeFormat('timestamp', function () { return require('./timestamp') });
+exposeFormat('uncolorize', function () { return require('./uncolorize') }); | fix: avoid dynamic requires (#<I>) | winstonjs_logform | train |
903390e86ba97ce63a4133defba1258417e8506c | diff --git a/lib/expressView.js b/lib/expressView.js
index <HASH>..<HASH> 100644
--- a/lib/expressView.js
+++ b/lib/expressView.js
@@ -32,7 +32,7 @@ function ReactEngineView(name, options) {
this.useRouter = (name[0] === '/');
if (this.useRouter) {
- name = url.parse(name).pathname;
+ name = url.parse(name).path;
}
View.call(this, name, options); | Use path instead of pathname to ensure querystring is not stripped | paypal_react-engine | train |
34b07ea9e2f202a36b51c4544b46f9a70fa4ba8d | diff --git a/lxd/storage/drivers/driver_cephfs_volumes.go b/lxd/storage/drivers/driver_cephfs_volumes.go
index <HASH>..<HASH> 100644
--- a/lxd/storage/drivers/driver_cephfs_volumes.go
+++ b/lxd/storage/drivers/driver_cephfs_volumes.go
@@ -12,6 +12,7 @@ import (
"github.com/lxc/lxd/lxd/rsync"
"github.com/lxc/lxd/shared"
"github.com/lxc/lxd/shared/ioprogress"
+ log "github.com/lxc/lxd/shared/log15"
"github.com/lxc/lxd/shared/units"
)
@@ -42,7 +43,7 @@ func (d *cephfs) CreateVolume(vol Volume, filler *VolumeFiller, op *operations.O
// Fill the volume.
if filler != nil && filler.Fill != nil {
- d.logger.Debug("Running filler function")
+ d.logger.Debug("Running filler function", log.Ctx{"path": volPath})
err = filler.Fill(volPath, "")
if err != nil {
return err
diff --git a/lxd/storage/drivers/driver_dir_volumes.go b/lxd/storage/drivers/driver_dir_volumes.go
index <HASH>..<HASH> 100644
--- a/lxd/storage/drivers/driver_dir_volumes.go
+++ b/lxd/storage/drivers/driver_dir_volumes.go
@@ -11,6 +11,7 @@ import (
"github.com/lxc/lxd/lxd/rsync"
"github.com/lxc/lxd/lxd/storage/quota"
"github.com/lxc/lxd/shared"
+ log "github.com/lxc/lxd/shared/log15"
)
// CreateVolume creates an empty volume and can optionally fill it by executing the supplied
@@ -49,7 +50,7 @@ func (d *dir) CreateVolume(vol Volume, filler *VolumeFiller, op *operations.Oper
// Run the volume filler function if supplied.
if filler != nil && filler.Fill != nil {
- d.logger.Debug("Running filler function")
+ d.logger.Debug("Running filler function", log.Ctx{"path": volPath})
err = filler.Fill(volPath, rootBlockPath)
if err != nil {
return err | lxd/storage/drivers: Filler logging | lxc_lxd | train |
55cffdb7325977fd69e4155f3c82de7d3bc16182 | diff --git a/polls/views.py b/polls/views.py
index <HASH>..<HASH> 100644
--- a/polls/views.py
+++ b/polls/views.py
@@ -9,3 +9,8 @@ class PollListView(ListView):
class PollDetailView(DetailView):
model = Poll
+
+ def get_context_data(self, **kwargs):
+ context = super(PollDetailView, self).get_context_data(**kwargs)
+ context['poll'].votable = self.object.can_vote(self.request.user)
+ return context | override get_context_data in poll detail view to set votable flag | byteweaver_django-polls | train |
7a635956632f474f51c9660eda00a1aac6e8a8a9 | diff --git a/src/Workflow/Activity/CallTask.php b/src/Workflow/Activity/CallTask.php
index <HASH>..<HASH> 100644
--- a/src/Workflow/Activity/CallTask.php
+++ b/src/Workflow/Activity/CallTask.php
@@ -63,10 +63,19 @@ class CallTask extends ProcessTask
return $this->calledElement;
}
- public function setWorkflow(Workflow $workflow): void
+ /**
+ * {@inheritdoc}
+ */
+ public function getProcessDefinition()
{
- parent::setWorkflow($workflow);
- $this->processDefinition = $workflow->getProcessDefinition()->getProcessDefinitions()->getLatestById($this->calledElement);
+ if ($this->processDefinition === null) {
+ // by the time this is called we assume that our process definition
+ // is already in our repository. Maybe we should throw an error
+ // if we don't find it there
+ $this->processDefinition = $this->getWorkflow()->getProcessDefinition()->getProcessDefinitions()->getLatestById($this->calledElement);
+ }
+
+ return $this->processDefinition;
}
}
\ No newline at end of file
diff --git a/src/Workflow/Activity/ProcessTask.php b/src/Workflow/Activity/ProcessTask.php
index <HASH>..<HASH> 100644
--- a/src/Workflow/Activity/ProcessTask.php
+++ b/src/Workflow/Activity/ProcessTask.php
@@ -25,13 +25,21 @@ class ProcessTask extends Task
}
}
+ /**
+ * @return ProcessDefinitionInterface
+ */
+ public function getProcessDefinition()
+ {
+ return $this->processDefinition;
+ }
+
protected function createWorkItem($data)
{
if ($this->isClosed()) {
throw new UnexpectedActivityStateException(sprintf('The activity "%s" is closed.', $this->getId()));
}
- $instance = $this->processDefinition->createProcessInstance();
+ $instance = $this->getProcessDefinition()->createProcessInstance();
$instance->setParentProcessInstance($this->getWorkflow());
$instance->setParentActivity($this); | CallTask - fetch process definition at runtime | phpmentors-jp_workflower | train |
f65e03adec8b7cde46f79c012b8d435b81113fae | diff --git a/openquake/risk/job/aggregate_loss_curve.py b/openquake/risk/job/aggregate_loss_curve.py
index <HASH>..<HASH> 100644
--- a/openquake/risk/job/aggregate_loss_curve.py
+++ b/openquake/risk/job/aggregate_loss_curve.py
@@ -22,7 +22,6 @@ Module to compute and plot an aggregate loss curve.
import os
-from openquake.logs import LOG
from openquake.output import curve
@@ -62,8 +61,8 @@ def plot_aggregate_curve(job, aggregate_curve):
"""
if not job.has("AGGREGATE_LOSS_CURVE"):
- LOG.debug("AGGREGATE_LOSS_CURVE parameter not specified, " \
- "skipping aggregate loss curve computation...")
+ job.logger.debug("AGGREGATE_LOSS_CURVE parameter not specified, " \
+ "skipping aggregate loss curve computation...")
return
@@ -75,4 +74,4 @@ def plot_aggregate_curve(job, aggregate_curve):
job.params["INVESTIGATION_TIME"]), autoscale_y=False)
plotter.close()
- LOG.debug("Aggregate loss curve stored at %s" % path)
+ job.logger.debug("Aggregate loss curve stored at %s", path)
diff --git a/openquake/risk/job/general.py b/openquake/risk/job/general.py
index <HASH>..<HASH> 100644
--- a/openquake/risk/job/general.py
+++ b/openquake/risk/job/general.py
@@ -38,7 +38,7 @@ from openquake.utils.tasks import check_job_status
from celery.decorators import task
-LOG = logs.LOG
+
BLOCK_SIZE = 100 | openquake/risk/job/* use specific job logger | gem_oq-engine | train |
71c024e545aa355cc3a90ecd6428fc9779fa5aeb | diff --git a/spec/unit/resource_controller_spec.rb b/spec/unit/resource_controller_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/resource_controller_spec.rb
+++ b/spec/unit/resource_controller_spec.rb
@@ -254,6 +254,19 @@ RSpec.describe "A specific resource controller", type: :controller do
end
end
+ describe "when params batch_action matches existing BatchAction and form inputs defined" do
+ let(:batch_action) { ActiveAdmin::BatchAction.new :flag, "Flag", form: { type: ["a", "b"] }, &batch_action_block }
+
+ let(:http_params) do
+ { batch_action: "flag", collection_selection: ["1"], batch_action_inputs: '{ "type": "a", "bogus": "param" }' }
+ end
+
+ it "should filter permitted params" do
+ expect(controller).to receive(:instance_exec).with(["1"], { "type" => "a" })
+ controller.batch_action
+ end
+ end
+
describe "when params batch_action doesn't match a BatchAction" do
let(:http_params) do
{ batch_action: "derp", collection_selection: ["1"] } | Add a spec that makes the alternative fix fail | activeadmin_activeadmin | train |
17268ed67aed0640b568e7b914bfe39f88c73720 | diff --git a/lib/rufus/tokyo/cabinet/abstract.rb b/lib/rufus/tokyo/cabinet/abstract.rb
index <HASH>..<HASH> 100644
--- a/lib/rufus/tokyo/cabinet/abstract.rb
+++ b/lib/rufus/tokyo/cabinet/abstract.rb
@@ -252,10 +252,13 @@ module Rufus::Tokyo
lib.abs_put(@db, k, Rufus::Tokyo.blen(k), v, Rufus::Tokyo.blen(v))
end
- # No comment
+ # Like #put but doesn't overwrite the value if already set. Returns true
+ # only if there no previous entry for k.
#
def putkeep (k, v)
- lib.abs_putkeep2(@db, k, v) == 1
+
+ (lib.abs_putkeep(
+ @db, k, Rufus::Tokyo.blen(k), v, Rufus::Tokyo.blen(v)) == 1)
end
# (The actual #[] method is provided by HashMethods
diff --git a/lib/rufus/tokyo/cabinet/lib.rb b/lib/rufus/tokyo/cabinet/lib.rb
index <HASH>..<HASH> 100644
--- a/lib/rufus/tokyo/cabinet/lib.rb
+++ b/lib/rufus/tokyo/cabinet/lib.rb
@@ -90,7 +90,7 @@ module Rufus::Tokyo
attfunc :abs_out, :tcadbout, [ :pointer, :pointer, :int ], :int
- attfunc :abs_putkeep2, :tcadbputkeep2, [ :pointer, :string, :string ], :int
+ attfunc :abs_putkeep, :tcadbputkeep, [ :pointer, :pointer, :int, :pointer, :int ], :int
attfunc :abs_iterinit, :tcadbiterinit, [ :pointer ], :int
attfunc :abs_iternext, :tcadbiternext, [ :pointer, :pointer ], :pointer
diff --git a/lib/rufus/tokyo/tyrant/lib.rb b/lib/rufus/tokyo/tyrant/lib.rb
index <HASH>..<HASH> 100644
--- a/lib/rufus/tokyo/tyrant/lib.rb
+++ b/lib/rufus/tokyo/tyrant/lib.rb
@@ -80,7 +80,7 @@ module Rufus::Tokyo
attfunc :abs_out, :tcrdbout, [ :pointer, :pointer, :int ], :int
- attfunc :abs_putkeep2, :tcrdbputkeep2, [ :pointer, :string, :string ], :int
+ attfunc :abs_putkeep, :tcrdbputkeep, [ :pointer, :pointer, :int, :pointer, :int ], :int
attfunc :abs_iterinit, :tcrdbiterinit, [ :pointer ], :int
attfunc :abs_iternext, :tcrdbiternext, [ :pointer, :pointer ], :pointer
diff --git a/spec/cabinet_spec.rb b/spec/cabinet_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/cabinet_spec.rb
+++ b/spec/cabinet_spec.rb
@@ -458,5 +458,13 @@ describe 'Rufus::Tokyo::Cabinet#putkeep' do
@db['pillow'] = 'Shonagon'
@db.putkeep('pillow', 'Ruby').should.equal(false)
end
+
+ it 'should accept binary data \0' do
+
+ s = "Sei#{0.chr}Shonagon"
+
+ @db.putkeep(s, s).should.be.true
+ @db[s].should.equal(s)
+ end
end
diff --git a/spec/edo_cabinet_spec.rb b/spec/edo_cabinet_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/edo_cabinet_spec.rb
+++ b/spec/edo_cabinet_spec.rb
@@ -431,7 +431,14 @@ if defined?(TokyoCabinet)
@db['pillow'] = 'Shonagon'
@db.putkeep('pillow', 'Ruby').should.equal(false)
end
- end
+ it 'should accept binary data \0' do
+
+ s = "Sei#{0.chr}Shonagon"
+
+ @db.putkeep(s, s).should.be.true
+ @db[s].should.equal(s)
+ end
+ end
end
diff --git a/spec/edo_ntyrant_spec.rb b/spec/edo_ntyrant_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/edo_ntyrant_spec.rb
+++ b/spec/edo_ntyrant_spec.rb
@@ -286,6 +286,14 @@ if defined?(Rufus::Edo)
@db['pillow'] = 'Shonagon'
@db.putkeep('pillow', 'Ruby').should.equal(false)
end
+
+ it 'should accept binary data \0' do
+
+ s = "Sei#{0.chr}Shonagon"
+
+ @db.putkeep(s, s).should.be.true
+ @db[s].should.equal(s)
+ end
end
end
diff --git a/spec/tyrant_spec.rb b/spec/tyrant_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/tyrant_spec.rb
+++ b/spec/tyrant_spec.rb
@@ -297,4 +297,13 @@ describe 'Rufus::Tokyo::Tyrant#putkeep' do
@db['pillow'] = 'Shonagon'
@db.putkeep('pillow', 'Ruby').should.equal(false)
end
+
+ it 'should accept binary data \0' do
+
+ s = "Sei#{0.chr}Shonagon"
+
+ @db.putkeep(s, s).should.be.true
+ @db[s].should.equal(s)
+ end
end
+ | making sure putkeep accepts binary data | jmettraux_rufus-tokyo | train |
9eccd82044e91bd99f5e28e5e166bee2a7451355 | diff --git a/src/main/java/org/asteriskjava/live/AsteriskServer.java b/src/main/java/org/asteriskjava/live/AsteriskServer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/asteriskjava/live/AsteriskServer.java
+++ b/src/main/java/org/asteriskjava/live/AsteriskServer.java
@@ -208,8 +208,10 @@ public interface AsteriskServer
* Returns the active channels of the Asterisk server.
*
* @return a Collection of active channels.
+ * @throws ManagerCommunicationException if there is a problem communication
+ * with Asterisk
*/
- Collection<AsteriskChannel> getChannels();
+ Collection<AsteriskChannel> getChannels() throws ManagerCommunicationException;
/**
* Returns a channel by its name.
@@ -217,8 +219,10 @@ public interface AsteriskServer
* @param name name of the channel to return
* @return the channel with the given name or <code>null</code> if there
* is no such channel.
+ * @throws ManagerCommunicationException if there is a problem communication
+ * with Asterisk
*/
- AsteriskChannel getChannelByName(String name);
+ AsteriskChannel getChannelByName(String name) throws ManagerCommunicationException;
/**
* Returns a channel by its unique id.
@@ -226,15 +230,19 @@ public interface AsteriskServer
* @param id the unique id of the channel to return
* @return the channel with the given unique id or <code>null</code> if
* there is no such channel.
+ * @throws ManagerCommunicationException if there is a problem communication
+ * with Asterisk
*/
- AsteriskChannel getChannelById(String id);
+ AsteriskChannel getChannelById(String id) throws ManagerCommunicationException;
/**
* Returns the acitve MeetMe rooms on the Asterisk server.
*
* @return a Collection of MeetMeRooms
+ * @throws ManagerCommunicationException if there is a problem communication
+ * with Asterisk
*/
- Collection<MeetMeRoom> getMeetMeRooms();
+ Collection<MeetMeRoom> getMeetMeRooms() throws ManagerCommunicationException;
/**
* Returns the MeetMe room with the given number, if the room does not yet
@@ -242,15 +250,19 @@ public interface AsteriskServer
*
* @param roomNumber the number of the room to return
* @return the MeetMe room with the given number.
+ * @throws ManagerCommunicationException if there is a problem communication
+ * with Asterisk
*/
- MeetMeRoom getMeetMeRoom(String roomNumber);
+ MeetMeRoom getMeetMeRoom(String roomNumber) throws ManagerCommunicationException;
/**
* Returns the queues served by the Asterisk server.
*
* @return a Collection of queues.
+ * @throws ManagerCommunicationException if there is a problem communication
+ * with Asterisk
*/
- Collection<AsteriskQueue> getQueues();
+ Collection<AsteriskQueue> getQueues() throws ManagerCommunicationException;
/**
* Returns the exact version string of this Asterisk server.
diff --git a/src/main/java/org/asteriskjava/live/DefaultAsteriskServer.java b/src/main/java/org/asteriskjava/live/DefaultAsteriskServer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/asteriskjava/live/DefaultAsteriskServer.java
+++ b/src/main/java/org/asteriskjava/live/DefaultAsteriskServer.java
@@ -20,7 +20,6 @@ import java.util.Collection;
import java.util.Map;
import org.asteriskjava.live.internal.AsteriskServerImpl;
-import org.asteriskjava.manager.AuthenticationFailedException;
import org.asteriskjava.manager.DefaultManagerConnection;
import org.asteriskjava.manager.ManagerConnection;
@@ -110,7 +109,7 @@ public class DefaultAsteriskServer implements AsteriskServer
impl.setManagerConnection(eventConnection);
}
- public void initialize() throws AuthenticationFailedException, ManagerCommunicationException
+ public void initialize() throws ManagerCommunicationException
{
impl.initialize();
}
@@ -165,32 +164,32 @@ public class DefaultAsteriskServer implements AsteriskServer
impl.originateToExtensionAsync(channel, context, exten, priority, timeout, cb);
}
- public Collection<AsteriskChannel> getChannels()
+ public Collection<AsteriskChannel> getChannels() throws ManagerCommunicationException
{
return impl.getChannels();
}
- public AsteriskChannel getChannelByName(String name)
+ public AsteriskChannel getChannelByName(String name) throws ManagerCommunicationException
{
return impl.getChannelByName(name);
}
- public AsteriskChannel getChannelById(String id)
+ public AsteriskChannel getChannelById(String id) throws ManagerCommunicationException
{
return impl.getChannelById(id);
}
- public Collection<MeetMeRoom> getMeetMeRooms()
+ public Collection<MeetMeRoom> getMeetMeRooms() throws ManagerCommunicationException
{
return impl.getMeetMeRooms();
}
- public MeetMeRoom getMeetMeRoom(String name)
+ public MeetMeRoom getMeetMeRoom(String name) throws ManagerCommunicationException
{
return impl.getMeetMeRoom(name);
}
- public Collection<AsteriskQueue> getQueues()
+ public Collection<AsteriskQueue> getQueues() throws ManagerCommunicationException
{
return impl.getQueues();
}
@@ -200,7 +199,7 @@ public class DefaultAsteriskServer implements AsteriskServer
return impl.getVersion();
}
- public int[] getVersion(String file)
+ public int[] getVersion(String file) throws ManagerCommunicationException
{
return impl.getVersion(file);
} | ManagerConnection is now implicitly initialized if needed | asterisk-java_asterisk-java | train |
67ea2e9305cd6db0b8a481d5d6b7a9ff903f918f | diff --git a/drivers/docker/coordinator.go b/drivers/docker/coordinator.go
index <HASH>..<HASH> 100644
--- a/drivers/docker/coordinator.go
+++ b/drivers/docker/coordinator.go
@@ -65,6 +65,7 @@ type DockerImageClient interface {
// LogEventFn is a callback which allows Drivers to emit task events.
type LogEventFn func(message string, annotations map[string]string)
+// noopLogEventFn satisfies the LogEventFn type but noops when called
func noopLogEventFn(string, map[string]string) {}
// dockerCoordinatorConfig is used to configure the Docker coordinator.
diff --git a/drivers/docker/driver.go b/drivers/docker/driver.go
index <HASH>..<HASH> 100644
--- a/drivers/docker/driver.go
+++ b/drivers/docker/driver.go
@@ -869,9 +869,14 @@ func (d *Driver) createContainerConfig(task *drivers.TaskConfig, driverConfig *T
hostConfig.ReadonlyRootfs = driverConfig.ReadonlyRootfs
+ // set the docker network mode
hostConfig.NetworkMode = driverConfig.NetworkMode
+
+ // if the driver config does not specify a network mode then try to use the
+ // shared alloc network
if hostConfig.NetworkMode == "" {
if task.NetworkIsolation.Path != "" {
+ // find the previously created parent container to join networks with
netMode := fmt.Sprintf("container:%s", task.NetworkIsolation.Labels[dockerNetSpecLabelKey])
logger.Debug("configuring network mode for task group", "network_mode", netMode)
hostConfig.NetworkMode = netMode
diff --git a/drivers/docker/network.go b/drivers/docker/network.go
index <HASH>..<HASH> 100644
--- a/drivers/docker/network.go
+++ b/drivers/docker/network.go
@@ -7,7 +7,12 @@ import (
"github.com/hashicorp/nomad/plugins/drivers"
)
+// infraContainerImage is the image used for the parent namespace container
const infraContainerImage = "gcr.io/google_containers/pause-amd64:3.0"
+
+// dockerNetSpecLabelKey is used when creating a parent container for
+// shared networking. It is a label whos value identifies the container ID of
+// the parent container so tasks can configure their network mode accordingly
const dockerNetSpecLabelKey = "docker_sandbox_container_id"
func (d *Driver) CreateNetwork(allocID string) (*drivers.NetworkIsolationSpec, error) {
@@ -73,11 +78,13 @@ func (d *Driver) DestroyNetwork(allocID string, spec *drivers.NetworkIsolationSp
func (d *Driver) createSandboxContainerConfig(allocID string) (*docker.CreateContainerOptions, error) {
return &docker.CreateContainerOptions{
- Name: fmt.Sprintf("nomad_%s", allocID),
+ Name: fmt.Sprintf("nomad_init_%s", allocID),
Config: &docker.Config{
Image: infraContainerImage,
},
HostConfig: &docker.HostConfig{
+ // set the network mode to none which creates a network namespace with
+ // only a loopback interface
NetworkMode: "none",
},
}, nil | docker: add additional commens | hashicorp_nomad | train |
1d95d5bb29bf5d5319820c41308d732bba81ae20 | diff --git a/src/libtcod.py b/src/libtcod.py
index <HASH>..<HASH> 100644
--- a/src/libtcod.py
+++ b/src/libtcod.py
@@ -44,11 +44,9 @@ _lib_ctypes = _ctypes.CDLL(
# add Mac dylib's to DYLD_LIBRARY_PATH
if 'darwin' in _sys.platform:
if 'DYLD_LIBRARY_PATH' in _os.environ:
- _os.environ['DYLD_LIBRARY_PATH'] += ':' + _os.path.join(__path__[0],
- _get_lib_path_crossplatform())
+ _os.environ['DYLD_LIBRARY_PATH'] += ':' + _os.path.realpath(_os.path.join(__path__[0], _get_lib_path_crossplatform())
else:
- _os.environ['DYLD_LIBRARY_PATH'] = _os.path.join(__path__[0],
- _get_lib_path_crossplatform())
+ _os.environ['DYLD_LIBRARY_PATH'] = _os.path.realpath(_os.path.join(__path__[0], _get_lib_path_crossplatform())
from . import _libtcod | try using realpath for dyld path | libtcod_python-tcod | train |
c3538219c1f50bb2dbaab5a467e288d22c12a76c | diff --git a/aiohttp/multidict.py b/aiohttp/multidict.py
index <HASH>..<HASH> 100644
--- a/aiohttp/multidict.py
+++ b/aiohttp/multidict.py
@@ -154,7 +154,7 @@ class _CIMultiDictProxy(_CIBase, _MultiDictProxy):
return _CIMultiDict(self.items())
-class MultiDictMixin:
+class _MultiDict(_Base, abc.MutableMapping):
def __init__(self, *args, **kwargs):
if len(args) > 1:
@@ -253,12 +253,7 @@ class MultiDictMixin:
raise NotImplementedError("Use extend method instead")
-class _MultiDict(_Base, MultiDictMixin, abc.MutableMapping):
- """An ordered dictionary that can have multiple values for each key."""
-
-
class _CIMultiDict(_CIBase, _MultiDict):
- """An ordered dictionary that can have multiple values for each key."""
def _fill(self, ipairs):
for key, value in ipairs: | Get rid of MultiDictMixin | aio-libs_aiohttp | train |
ea0f0b509dcb886b3cd88dc885077f196584c590 | diff --git a/src/animation.js b/src/animation.js
index <HASH>..<HASH> 100644
--- a/src/animation.js
+++ b/src/animation.js
@@ -167,7 +167,7 @@ Crafty.c("SpriteAnimation", {
}
- if (data.currentSlideNumber === data.currentReel.length && this._frame.frameNumberBetweenSlides === data.numberOfFramesBetweenSlides) {
+ if (data.currentSlideNumber === data.currentReel.length) {
data.currentSlideNumber = 0;
if (this._frame.repeatInfinitly === true || this._frame.repeat > 0) {
if (this._frame.repeat) this._frame.repeat--; | Fix spriteanimation so that the final frame doesn't display twice | craftyjs_Crafty | train |
b2e453786d4c6a9e2689a1afbd2cb3a2e75c64c0 | diff --git a/src/org/joml/Matrix4x3d.java b/src/org/joml/Matrix4x3d.java
index <HASH>..<HASH> 100644
--- a/src/org/joml/Matrix4x3d.java
+++ b/src/org/joml/Matrix4x3d.java
@@ -1603,7 +1603,7 @@ public class Matrix4x3d implements Externalizable, Matrix4x3dc {
if (Options.NO_UNSAFE)
throw new UnsupportedOperationException("Not supported when using joml.nounsafe");
MemUtil.MemUtilUnsafe.get(this, address);
- return this;
+ return determineProperties();
}
//#endif | Determine matrix properties in Matrix4x3d.setFromAddress() | JOML-CI_JOML | train |
e066771207297be12998868874f110c6a4178d99 | diff --git a/core/router/api_builder.go b/core/router/api_builder.go
index <HASH>..<HASH> 100644
--- a/core/router/api_builder.go
+++ b/core/router/api_builder.go
@@ -188,20 +188,28 @@ func (api *APIBuilder) Handle(method string, relativePath string, handlers ...co
//
// This method is used behind the scenes at the `Controller` function
// in order to handle more than one paths for the same controller instance.
-func (api *APIBuilder) HandleMany(method string, relativePath string, handlers ...context.Handler) (routes []*Route) {
- trimmedPath := strings.Trim(relativePath, " ")
+func (api *APIBuilder) HandleMany(methodOrMulti string, relativePathorMulti string, handlers ...context.Handler) (routes []*Route) {
+ trimmedPath := strings.Trim(relativePathorMulti, " ")
+ trimmedMethod := strings.Trim(methodOrMulti, " ")
// at least slash
// a space
// at least one other slash for the next path
// app.Controller("/user /user{id}", new(UserController))
paths := strings.Split(trimmedPath, " ")
+ methods := strings.Split(trimmedMethod, " ")
for _, p := range paths {
if p != "" {
- if method == "ANY" || method == "ALL" {
- routes = append(routes, api.Any(p, handlers...)...)
- continue
+ for _, method := range methods {
+ if method == "" {
+ method = "ANY"
+ }
+ if method == "ANY" || method == "ALL" {
+ routes = append(routes, api.Any(p, handlers...)...)
+ continue
+ }
+ routes = append(routes, api.Handle(method, p, handlers...))
}
- routes = append(routes, api.Handle(method, p, handlers...))
+
}
}
return
@@ -699,18 +707,10 @@ func (api *APIBuilder) Favicon(favPath string, requestPath ...string) *Route {
return nil
}
- // ignore error f.Close()
defer f.Close()
fi, _ := f.Stat()
if fi.IsDir() { // if it's dir the try to get the favicon.ico
- fav := path.Join(favPath, "favicon.ico")
- f, err = os.Open(fav)
- if err != nil {
- //we try again with .png
- return api.Favicon(path.Join(favPath, "favicon.png"))
- }
- favPath = fav
- fi, _ = f.Stat()
+ return api.Favicon(path.Join(favPath, "favicon.ico"))
}
cType := TypeByFilename(favPath)
@@ -747,7 +747,7 @@ func (api *APIBuilder) Favicon(favPath string, requestPath ...string) *Route {
}
}
- reqPath := "/favicon" + path.Ext(fi.Name()) //we could use the filename, but because standards is /favicon.ico/.png.
+ reqPath := "/favicon" + path.Ext(fi.Name()) // we could use the filename, but because standards is /favicon.ico
if len(requestPath) > 0 && requestPath[0] != "" {
reqPath = requestPath[0]
} | Add support for multi http methods route registration at the `.HandleMany`
Former-commit-id: 6aa2c<I>d<I>dac<I>a1cf<I>d<I>a3c<I>ad7b2fb0 | kataras_iris | train |
dc0f8d266beb21b0b63bfc78b5130eeef8e4e2b6 | diff --git a/python_modules/dagster/dagster/_core/definitions/materialize.py b/python_modules/dagster/dagster/_core/definitions/materialize.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster/_core/definitions/materialize.py
+++ b/python_modules/dagster/dagster/_core/definitions/materialize.py
@@ -1,7 +1,9 @@
+import warnings
from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence, Set, Union
import dagster._check as check
from dagster._utils import merge_dicts
+from dagster._utils.backcompat import ExperimentalWarning
from ..errors import DagsterInvariantViolationError
from ..instance import DagsterInstance
@@ -55,12 +57,17 @@ def materialize(
resource_defs = wrap_resources_for_execution(resources)
resource_defs = merge_dicts({DEFAULT_IO_MANAGER_KEY: fs_io_manager}, resource_defs)
- return build_assets_job(
- "in_process_materialization_job",
- assets=assets_defs,
- source_assets=source_assets,
- resource_defs=resource_defs,
- ).execute_in_process(run_config=run_config, instance=instance, partition_key=partition_key)
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ "ignore", category=ExperimentalWarning, message=".*build_assets_job.*"
+ )
+
+ return build_assets_job(
+ "in_process_materialization_job",
+ assets=assets_defs,
+ source_assets=source_assets,
+ resource_defs=resource_defs,
+ ).execute_in_process(run_config=run_config, instance=instance, partition_key=partition_key)
def materialize_to_memory(
@@ -117,12 +124,17 @@ def materialize_to_memory(
instance = check.opt_inst_param(instance, "instance", DagsterInstance)
partition_key = check.opt_str_param(partition_key, "partition_key")
- return build_assets_job(
- "in_process_materialization_job",
- assets=assets_defs,
- source_assets=source_assets,
- resource_defs=resource_defs,
- ).execute_in_process(run_config=run_config, instance=instance, partition_key=partition_key)
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ "ignore", category=ExperimentalWarning, message=".*build_assets_job.*"
+ )
+
+ return build_assets_job(
+ "in_process_materialization_job",
+ assets=assets_defs,
+ source_assets=source_assets,
+ resource_defs=resource_defs,
+ ).execute_in_process(run_config=run_config, instance=instance, partition_key=partition_key)
def _get_required_io_manager_keys(
diff --git a/python_modules/dagster/dagster_tests/core_tests/asset_defs_tests/test_materialize.py b/python_modules/dagster/dagster_tests/core_tests/asset_defs_tests/test_materialize.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster_tests/core_tests/asset_defs_tests/test_materialize.py
+++ b/python_modules/dagster/dagster_tests/core_tests/asset_defs_tests/test_materialize.py
@@ -1,10 +1,12 @@
import os
+import warnings
from tempfile import TemporaryDirectory
import pytest
from dagster import (
AssetKey,
+ AssetOut,
AssetsDefinition,
DagsterInvalidConfigError,
DagsterInvalidDefinitionError,
@@ -29,6 +31,15 @@ from dagster import (
from dagster._core.test_utils import instance_for_test
[email protected](autouse=True)
+def check_experimental_warnings():
+ with warnings.catch_warnings(record=True) as record:
+ yield
+
+ for w in record:
+ assert False, f"Unexpected warning: {w.message.args[0]}"
+
+
def test_basic_materialize():
@asset
def the_asset():
@@ -227,8 +238,8 @@ def test_materialize_multi_asset():
@multi_asset(
outs={
- "my_out_name": Out(metadata={"foo": "bar"}),
- "my_other_out_name": Out(metadata={"bar": "foo"}),
+ "my_out_name": AssetOut(metadata={"foo": "bar"}),
+ "my_other_out_name": AssetOut(metadata={"bar": "foo"}),
},
internal_asset_deps={
"my_out_name": {AssetKey("my_other_out_name")}, | materialize should not throw warnings for build_assets_job (#<I>) | dagster-io_dagster | train |
03727188310e7fa9b5fc0b3e04e8e418d2acac6a | diff --git a/.testing.pylintrc b/.testing.pylintrc
index <HASH>..<HASH> 100644
--- a/.testing.pylintrc
+++ b/.testing.pylintrc
@@ -154,7 +154,8 @@ disable=R,
E8266,
E8402,
E8731,
- 3rd-party-local-module-not-gated
+ 3rd-party-local-module-not-gated,
+ pep8-reserved-keywords
# Disabled:
# R* [refactoring suggestions & reports]
diff --git a/salt/cloud/clouds/vmware.py b/salt/cloud/clouds/vmware.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/clouds/vmware.py
+++ b/salt/cloud/clouds/vmware.py
@@ -139,7 +139,7 @@ import salt.config as config
from salt.ext import six
try:
# Attempt to import pyVmomi libs
- from pyVmomi import vim
+ from pyVmomi import vim # pylint: disable=no-name-in-module
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
diff --git a/salt/modules/gpg.py b/salt/modules/gpg.py
index <HASH>..<HASH> 100644
--- a/salt/modules/gpg.py
+++ b/salt/modules/gpg.py
@@ -181,7 +181,7 @@ def _create_gpg(user=None, gnupghome=None):
gnupghome = _get_user_gnupghome(user)
if GPG_1_3_1:
- gpg = gnupg.GPG(homedir=gnupghome)
+ gpg = gnupg.GPG(homedir=gnupghome) # pylint: disable=unexpected-keyword-arg
else:
gpg = gnupg.GPG(gnupghome=gnupghome)
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py
index <HASH>..<HASH> 100644
--- a/salt/netapi/rest_cherrypy/app.py
+++ b/salt/netapi/rest_cherrypy/app.py
@@ -1082,7 +1082,7 @@ def lowdata_fmt():
# headers for form encoded data (including charset or something similar)
if data and isinstance(data, collections.Mapping):
# Make the 'arg' param a list if not already
- if 'arg' in data and not isinstance(data['arg'], list):
+ if 'arg' in data and not isinstance(data['arg'], list): # pylint: disable=unsupported-membership-test
data['arg'] = [data['arg']]
# Finally, make a Low State and put it in request | Fix lint issues on salt | saltstack_salt | train |
d67f1b7718bb57c0137685eb609502869929b704 | diff --git a/www/docusaurus.config.js b/www/docusaurus.config.js
index <HASH>..<HASH> 100644
--- a/www/docusaurus.config.js
+++ b/www/docusaurus.config.js
@@ -48,12 +48,12 @@ module.exports = {
}
]
},
- announcementBar: {
- id: 'release-candiate-announcement',
- content: 'NextAuth.js now has automatic ๐ค releases ๐! Check out the <a href="https://next-auth-git-canary.nextauthjs.vercel.app">Canary documentation ๐</a>',
- backgroundColor: '#2DB2F9',
- textColor: '#fff'
- },
+ // announcementBar: {
+ // id: 'release-candiate-announcement',
+ // content: 'NextAuth.js now has automatic ๐ค releases ๐! Check out the <a href="https://next-auth-git-canary.nextauthjs.vercel.app">Canary documentation ๐</a>',
+ // backgroundColor: '#2DB2F9',
+ // textColor: '#fff'
+ // },
footer: {
links: [
{ | docs: remove announcement bar [skip release] | iaincollins_next-auth | train |
2d6c502ee3f41895881270dfbce4a4db8ab45336 | diff --git a/bigchaindb/commands/bigchain.py b/bigchaindb/commands/bigchain.py
index <HASH>..<HASH> 100644
--- a/bigchaindb/commands/bigchain.py
+++ b/bigchaindb/commands/bigchain.py
@@ -14,7 +14,7 @@ import bigchaindb
import bigchaindb.config_utils
from bigchaindb import db
from bigchaindb.exceptions import DatabaseAlreadyExists, KeypairNotFoundException
-from bigchaindb.commands.utils import base_parser, start
+from bigchaindb.commands import utils
from bigchaindb.processes import Processes
from bigchaindb import crypto
@@ -122,8 +122,12 @@ def run_drop(args):
def run_start(args):
"""Start the processes to run the node"""
- # run_configure(args, skip_if_exists=True)
bigchaindb.config_utils.autoconfigure(filename=args.config, force=True)
+
+ if args.start_rethinkdb:
+ proc = utils.start_rethinkdb()
+ logger.info('RethinkDB started with PID %s' % proc.pid)
+
try:
db.init()
except DatabaseAlreadyExists:
@@ -140,7 +144,12 @@ def run_start(args):
def main():
parser = argparse.ArgumentParser(
description='Control your BigchainDB node.',
- parents=[base_parser])
+ parents=[utils.base_parser])
+
+ parser.add_argument('--experimental-start-rethinkdb',
+ dest='start_rethinkdb',
+ action='store_true',
+ help='Run RethinkDB on start')
# all the commands are contained in the subparsers object,
# the command selected by the user will be stored in `args.command`
@@ -172,7 +181,7 @@ def main():
subparsers.add_parser('start',
help='Start BigchainDB')
- start(parser, globals())
+ utils.start(parser, globals())
if __name__ == '__main__':
diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py
index <HASH>..<HASH> 100644
--- a/bigchaindb/commands/utils.py
+++ b/bigchaindb/commands/utils.py
@@ -2,12 +2,30 @@
for ``argparse.ArgumentParser``.
"""
+import os
import argparse
import multiprocessing as mp
+import subprocess
from bigchaindb.version import __version__
+def start_rethinkdb():
+ proc = subprocess.Popen(['rethinkdb', '--bind', 'all'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True)
+
+ line = ''
+
+ for line in proc.stdout:
+ if line.startswith('Server ready'):
+ return proc
+
+ exit('Error starting RethinkDB, reason is: {}'.format(line))
+ proc.kill()
+
+
def start(parser, scope):
"""Utility function to execute a subcommand.
diff --git a/tests/test_commands.py b/tests/test_commands.py
index <HASH>..<HASH> 100644
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -1,4 +1,5 @@
import json
+from unittest.mock import Mock, patch
from argparse import Namespace
from pprint import pprint
import copy
@@ -62,9 +63,21 @@ def mock_bigchaindb_backup_config(monkeypatch):
def test_bigchain_run_start(mock_run_configure, mock_processes_start, mock_db_init_with_existing_db):
from bigchaindb.commands.bigchain import run_start
- args = Namespace(config=None, yes=True)
+ args = Namespace(start_rethinkdb=False, config=None, yes=True)
+ run_start(args)
+
+
+@patch('bigchaindb.commands.utils.start_rethinkdb')
+def test_bigchain_run_start_with_rethinkdb(mock_start_rethinkdb,
+ mock_run_configure,
+ mock_processes_start,
+ mock_db_init_with_existing_db):
+ from bigchaindb.commands.bigchain import run_start
+ args = Namespace(start_rethinkdb=True, config=None, yes=True)
run_start(args)
+ mock_start_rethinkdb.assert_called_with()
+
@pytest.mark.skipif(reason="BigchainDB doesn't support the automatic creation of a config file anymore")
def test_bigchain_run_start_assume_yes_create_default_config(monkeypatch, mock_processes_start,
@@ -195,3 +208,19 @@ def test_run_configure_when_config_does_exist(monkeypatch,
args = Namespace(config='foo', yes=None)
run_configure(args)
assert value == {}
+
+
+@patch('subprocess.Popen')
+def test_start_rethinkdb_returns_a_process_when_successful(mock_popen):
+ from bigchaindb.commands import utils
+ mock_popen.return_value = Mock(stdout=['Server ready'])
+ assert utils.start_rethinkdb() is mock_popen.return_value
+
+
+@patch('subprocess.Popen')
+def test_start_rethinkdb_exists_when_cannot_start(mock_popen):
+ from bigchaindb.commands import utils
+ mock_popen.return_value = Mock(stdout=['Nopety nope'])
+ with pytest.raises(SystemExit):
+ utils.start_rethinkdb()
+ | Add exp command to start RethinkDB | bigchaindb_bigchaindb | train |
12d4a9739e3fde5441dc4f7f05f2eba603505e28 | diff --git a/lib/bookingsync/engine/helpers.rb b/lib/bookingsync/engine/helpers.rb
index <HASH>..<HASH> 100644
--- a/lib/bookingsync/engine/helpers.rb
+++ b/lib/bookingsync/engine/helpers.rb
@@ -31,6 +31,10 @@ module BookingSync::Engine::Helpers
end
end
+ def application_token
+ @application_token ||= oauth_client.client_credentials.get_token
+ end
+
def oauth_client
client_options = {
site: ENV['BOOKINGSYNC_URL'] || 'https://www.bookingsync.com', | Expose application token in controller helper
This token is obtained using the client credentials flow | BookingSync_bookingsync-engine | train |
4ec5582511c89064d3b9bf5b1ca0a47451243f53 | diff --git a/mythril/analysis/modules/integer.py b/mythril/analysis/modules/integer.py
index <HASH>..<HASH> 100644
--- a/mythril/analysis/modules/integer.py
+++ b/mythril/analysis/modules/integer.py
@@ -2,13 +2,12 @@
underflows."""
from math import log2, ceil
-from typing import cast, List, Dict, Set
+from typing import cast, List, Set
from mythril.analysis import solver
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import INTEGER_OVERFLOW_AND_UNDERFLOW
from mythril.exceptions import UnsatError
from mythril.laser.ethereum.state.global_state import GlobalState
-from mythril.laser.ethereum.util import get_concrete_int
from mythril.laser.ethereum.state.annotation import StateAnnotation
from mythril.analysis.modules.base import DetectionModule
from copy import copy
@@ -18,6 +17,7 @@ from mythril.laser.smt import (
BVSubNoUnderflow,
BVMulNoOverflow,
BitVec,
+ If,
symbol_factory,
Not,
Expression,
@@ -191,6 +191,8 @@ class IntegerOverflowUnderflowModule(DetectionModule):
value = stack[index]
if isinstance(value, BitVec):
return value
+ if isinstance(value, Bool):
+ return If(value, 1, 0)
stack[index] = symbol_factory.BitVecVal(value, 256)
return stack[index] | Handle the case for bool (#<I>) | ConsenSys_mythril-classic | train |
682d63c52b356e2b7343b9c1b4ba6215054aaff9 | diff --git a/aospy/calc.py b/aospy/calc.py
index <HASH>..<HASH> 100644
--- a/aospy/calc.py
+++ b/aospy/calc.py
@@ -542,24 +542,19 @@ class Calc(object):
raise ValueError("Specified time-reduction method '{}' is not "
"supported".format(reduction))
- def region_calcs(self, loc_ts, n=0):
- """Region-averaged computations. Execute and save to external file."""
- calcs_reg = ('ts', 'av', 'std')
- # Perform each calculation for each region.
- for calc in calcs_reg:
- calc_name = ('reg.' + calc)
- if calc_name in self.dtype_out_time:
- reg_dat = {}
- for reg in self.region.values():
- # Just pass along the data if averaged already.
- if 'av' in self.dtype_in_time:
- data_out = reg.ts(loc_ts, self.model[n])
- # Otherwise perform the calculation.
- else:
- method = getattr(reg, calc)
- data_out = method(loc_ts, self.model[n])
- reg_dat.update({reg.name: data_out})
- self.save(reg_dat, calc_name)
+ def region_calcs(self, arr, func, n=0):
+ """Perform a calculation for all regions."""
+ reg_dat = {}
+ for reg in self.region.values():
+ # Just pass along the data if averaged already.
+ if 'av' in self.dtype_in_time:
+ data_out = reg.ts(arr, self.model[n])
+ # Otherwise perform the calculation.
+ else:
+ method = getattr(reg, func)
+ data_out = method(arr, self.model[n])
+ reg_dat.update({reg.name: data_out})
+ return reg_dat
def compute(self):
"""Perform all desired calculations on the data and save externally."""
@@ -598,20 +593,17 @@ class Calc(object):
for reduc, specs in zip(self.dtype_out_time, reduc_specs):
func = specs[-1]
data = eddy_ts if 'eddy' in specs else full_ts
- if 'reg' not in specs:
+ if 'reg' in specs:
+ reduced.update({reduc: self.region_calcs(data, func)})
+ else:
reduced.update({reduc: self._time_reduce(data, func)})
else:
reduced = {'': full_ts}
# Save to disk.
self._print_verbose("Writing desired gridded outputs to disk.")
- for dtype_out_time, data in reduced.items():
- self.save(data, dtype_out_time, dtype_out_vert=self.dtype_out_vert)
-
- # Regional methods: perform and save.
- if any(['reg' in dot for dot in self.dtype_out_time]) and self.region:
- self._print_verbose("Computing and saving regional outputs.")
- self.region_calcs(full_ts)
+ for dtype_time, data in reduced.items():
+ self.save(data, dtype_time, dtype_out_vert=self.dtype_out_vert)
def _save_to_scratch(self, data, dtype_out_time):
"""Save the data to the scratch filesystem.""" | Add support for eddy computations for regionally-averaged computations | spencerahill_aospy | train |
7fc6de08264a19bbdf488e8cdaf6e276345a3c6b | diff --git a/lib/core/webdriver/chrome.js b/lib/core/webdriver/chrome.js
index <HASH>..<HASH> 100644
--- a/lib/core/webdriver/chrome.js
+++ b/lib/core/webdriver/chrome.js
@@ -25,7 +25,6 @@ const defaultChromeOptions = [
'--no-first-run',
'--new-window',
'--disable-translate',
- '--disable-infobars',
'--disable-desktop-notifications',
'--allow-running-insecure-content',
'--disable-save-password-bubble',
@@ -108,6 +107,7 @@ module.exports.configureBuilder = function(builder, options) {
chromeOptions.addArguments('--no-sandbox');
}
+ chromeOptions.excludeSwitches('enable-automation');
const perfLogConf = { enableNetwork: true, enablePage: true };
if (chromeConfig.collectTracingEvents) { | try out disabling enable-automation #<I> (#<I>) | sitespeedio_browsertime | train |
9f05851f097be044094c8799532453a0171a5d26 | diff --git a/src/Http/Request.php b/src/Http/Request.php
index <HASH>..<HASH> 100644
--- a/src/Http/Request.php
+++ b/src/Http/Request.php
@@ -286,6 +286,30 @@ class Request
}
/**
+ * Builds a query string, including the question mark.
+ *
+ * @param array $data
+ *
+ * @return string
+ */
+ public static function buildQueryString(array $data = null)
+ {
+ if ($data === null) {
+ $data = static::$get;
+ }
+
+ $query = [];
+
+ foreach ($data as $name => $value) {
+ $query[] = "{$name}=" . urlencode($value);
+ }
+
+ if (count($query)) {
+ return '?' . implode('&', $query);
+ }
+ }
+
+ /**
* @return string
*/
protected static function prepareBaseUrl() | Add method to build a query string. | nirix_radium | train |
175d380189a302cef69169a3d611fe63aea3b63e | diff --git a/molgenis-data-mapper/src/main/resources/js/attribute-mapping.js b/molgenis-data-mapper/src/main/resources/js/attribute-mapping.js
index <HASH>..<HASH> 100644
--- a/molgenis-data-mapper/src/main/resources/js/attribute-mapping.js
+++ b/molgenis-data-mapper/src/main/resources/js/attribute-mapping.js
@@ -67,6 +67,8 @@
inArray = $.inArray(name, sourceAttrs);
if(inArray >= 0){
$(this).css('background-color', '#CCFFCC');
+ } else {
+ $(this).css('background-color', '');
}
});
}; | Fix changing background when attribute is removed from mapping service editor | molgenis_molgenis | train |
8ec1a101d597e4a5f31be338205d5589b8e12ecd | diff --git a/engines/bastion_katello/app/assets/javascripts/bastion_katello/activation-keys/details/activation-key-associations.controller.js b/engines/bastion_katello/app/assets/javascripts/bastion_katello/activation-keys/details/activation-key-associations.controller.js
index <HASH>..<HASH> 100644
--- a/engines/bastion_katello/app/assets/javascripts/bastion_katello/activation-keys/details/activation-key-associations.controller.js
+++ b/engines/bastion_katello/app/assets/javascripts/bastion_katello/activation-keys/details/activation-key-associations.controller.js
@@ -25,12 +25,6 @@ angular.module('Bastion.activation-keys').controller('ActivationKeyAssociationsC
'paged': true
};
- $scope.table.working = true;
-
- if ($scope.contentHosts) {
- $scope.table.working = false;
- }
-
contentHostsNutupane = new Nutupane(Host, params);
contentHostsNutupane.searchTransform = function (term) {
var searchQuery, addition = "activation_key_id=" + $scope.$stateParams.activationKeyId;
@@ -43,7 +37,14 @@ angular.module('Bastion.activation-keys').controller('ActivationKeyAssociationsC
};
contentHostsNutupane.masterOnly = true;
+ contentHostsNutupane.setSearchKey('contentHostSearch');
+
$scope.detailsTable = contentHostsNutupane.table;
+ $scope.detailsTable.working = true;
+
+ if ($scope.contentHosts) {
+ $scope.detailsTable.working = false;
+ }
$scope.activationKey.$promise.then(function () {
contentHostsNutupane.setParams(params); | Fixes #<I>: set search key for AK CH table.
We need to set the search key for the activation key content host
table in order for the search to work properly in the parent table.
Also fixed a couple of references to the parent table that should have
been references to the details table.
<URL> | Katello_katello | train |
9b9ae174da282c1e7ea41af240acc73abc5abc3e | diff --git a/backtrader/strategy.py b/backtrader/strategy.py
index <HASH>..<HASH> 100644
--- a/backtrader/strategy.py
+++ b/backtrader/strategy.py
@@ -661,7 +661,7 @@ class Strategy(with_metaclass(MetaStrategy, StrategyBase)):
data = self.getdatabyname(data)
data = data or self.datas[0]
- size = size or self.getsizing(data, isbuy=True)
+ size = size if size is not None else self.getsizing(data, isbuy=True)
return self.broker.buy(
self, data,
@@ -682,7 +682,7 @@ class Strategy(with_metaclass(MetaStrategy, StrategyBase)):
data = self.getdatabyname(data)
data = data or self.datas[0]
- size = size or self.getsizing(data, isbuy=False)
+ size = size if size is not None else self.getsizing(data, isbuy=True)
return self.broker.sell(
self, data,
@@ -710,7 +710,7 @@ class Strategy(with_metaclass(MetaStrategy, StrategyBase)):
data = self.data
possize = self.getposition(data, self.broker).size
- size = abs(size or possize)
+ size = abs(size if size is not None else possize)
if possize > 0:
return self.sell(data=data, size=size, price=price, plimit=plimit, | Make sure sizer is only used if size is not None (default) | backtrader_backtrader | train |
fd40d0c8b670530867efe5194b29bdfaafde4c53 | diff --git a/src/Illuminate/Mail/Markdown.php b/src/Illuminate/Mail/Markdown.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Mail/Markdown.php
+++ b/src/Illuminate/Mail/Markdown.php
@@ -77,9 +77,13 @@ class Markdown
{
$this->view->flushFinderCache();
- return new HtmlString(preg_replace("/[\r\n]{2,}/", "\n\n", $this->view->replaceNamespace(
+ $contents = $this->view->replaceNamespace(
'mail', $this->markdownComponentPaths()
- )->make($view, $data)->render()));
+ )->make($view, $data)->render();
+
+ return new HtmlString(
+ html_entity_decode(preg_replace("/[\r\n]{2,}/", "\n\n", $contents), ENT_QUOTES, 'UTF-8')
+ );
}
/** | decode html entities in plain text email (#<I>) | laravel_framework | train |
82efd457e3fe0a352ef6487a4fdd3cdaae799a9c | diff --git a/publish/publish.go b/publish/publish.go
index <HASH>..<HASH> 100644
--- a/publish/publish.go
+++ b/publish/publish.go
@@ -45,7 +45,7 @@ func RegisterL10nForPublish(Publish *publish.Publish, Admin *admin.Admin) {
publishableLocales := getPublishableLocales(context.Request, context.CurrentUser)
return searchHandler(db, context).Set("l10n:mode", "unscoped").Where("language_code IN (?)", publishableLocales)
}
- return searchHandler(db, context)
+ return searchHandler(db, context).Set("l10n:mode", "unscoped")
}
Admin.RegisterViewPath("github.com/qor/l10n/publish/views") | Fix publish diff action for l<I>n | qor_l10n | train |
830a6325c82fcb8f4c55c81ea7234a7f23365c5a | diff --git a/phpstan.neon b/phpstan.neon
index <HASH>..<HASH> 100644
--- a/phpstan.neon
+++ b/phpstan.neon
@@ -2,6 +2,7 @@ includes:
- src/extension.neon
parameters:
+ inferPrivatePropertyTypeFromConstructor: true
level: max
paths:
- src
diff --git a/src/Extension/ProphetProphesizeDynamicReturnTypeExtension.php b/src/Extension/ProphetProphesizeDynamicReturnTypeExtension.php
index <HASH>..<HASH> 100644
--- a/src/Extension/ProphetProphesizeDynamicReturnTypeExtension.php
+++ b/src/Extension/ProphetProphesizeDynamicReturnTypeExtension.php
@@ -15,9 +15,6 @@ use PHPStan\Type\TypeWithClassName;
class ProphetProphesizeDynamicReturnTypeExtension implements DynamicMethodReturnTypeExtension
{
- /**
- * @var string
- */
private $className;
public function __construct(string $className)
diff --git a/src/Reflection/ObjectProphecyMethodReflection.php b/src/Reflection/ObjectProphecyMethodReflection.php
index <HASH>..<HASH> 100644
--- a/src/Reflection/ObjectProphecyMethodReflection.php
+++ b/src/Reflection/ObjectProphecyMethodReflection.php
@@ -14,14 +14,8 @@ use Prophecy\Prophecy\MethodProphecy;
class ObjectProphecyMethodReflection implements MethodReflection
{
- /**
- * @var ClassReflection
- */
private $declaringClass;
- /**
- * @var string
- */
private $name;
public function __construct(ClassReflection $declaringClass, string $name) | Enhancement: Infer private property type from constructor | Jan0707_phpstan-prophecy | train |
2676c0f24f97dcd323c28cf64d8f2a19b4920402 | diff --git a/src/colab_gitlab/signals.py b/src/colab_gitlab/signals.py
index <HASH>..<HASH> 100644
--- a/src/colab_gitlab/signals.py
+++ b/src/colab_gitlab/signals.py
@@ -184,7 +184,7 @@ def update_basic_info_gitlab_user(sender, **kwargs):
LOGGER.error(error_msg)
return
- if response.status_code != 201:
+ if response.status_code != 200:
reason = 'Unknown.'
try:
diff --git a/tests/test_signals.py b/tests/test_signals.py
index <HASH>..<HASH> 100644
--- a/tests/test_signals.py
+++ b/tests/test_signals.py
@@ -156,7 +156,7 @@ class GitlabTest(TestCase):
COLAB_APPS_mock):
resquests_post_mock.return_value = Mock(
- status_code=201,
+ status_code=200,
json=lambda: {'message': 'Unauthorized'}
) | Fixed status_code when updated_basic_info | colab_colab-gitlab-plugin | train |
Subsets and Splits