hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
c65f7a1d937a6caf768122761c28b994855b975b | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -52,7 +52,7 @@ print('Packages being installed: {}'.format(packages))
install_requires = [
'wsgiref==0.1.2',
'six==1.9.0',
- 'setuptools==14.3.1',
+ 'setuptools>=14.3',
'pyzmq==14.5.0',
'Unidecode==0.4.16',
'cffi==0.8.6', | setuptools==<I> installed by conda and can't be uninstalled (nonexistent file) | hobson_pug-invest | train |
248daf21a40e1eb6cceeea640542f5e0b822f76e | diff --git a/public/summernote/summernote.js b/public/summernote/summernote.js
index <HASH>..<HASH> 100644
--- a/public/summernote/summernote.js
+++ b/public/summernote/summernote.js
@@ -1447,7 +1447,8 @@
$image.css({
display: '',
//width: Math.min($editable.width(), $image.width())
- width: '100%'
+ width: 'auto',
+ maxWidth: '100%'
});
for (var attr_key in img_attributes)
@@ -1712,11 +1713,24 @@
this.resize = function ($editable, sValue, $target) {
recordUndo($editable);
- $target.css({
- // width: $editable.width() * sValue + 'px',
- width: (sValue * 100) + '%',
- height: ''
- });
+ if (sValue == 'auto')
+ {
+ $target.css({
+ // width: $editable.width() * sValue + 'px',
+ width: 'auto',
+ maxWidth: '100%',
+ height: ''
+ });
+ }
+ else
+ {
+ $target.css({
+ // width: $editable.width() * sValue + 'px',
+ width: (sValue * 100) + '%',
+ height: ''
+ });
+ }
+
};
/**
@@ -3061,6 +3075,7 @@
'<div class="arrow"></div>' +
'<div class="popover-content note-image-content">' +
'<div class="btn-group">' +
+ '<button type="button" class="btn btn-default btn-sm btn-small" title="Auto" data-event="resize" data-value="auto" tabindex="-1"><span class="note-fontsize-10">Auto</span> </button>' +
'<button type="button" class="btn btn-default btn-sm btn-small" title="' + lang.image.resizeFull + '" data-event="resize" data-value="1" tabindex="-1"><span class="note-fontsize-10">100%</span> </button>' +
'<button type="button" class="btn btn-default btn-sm btn-small" title="' + lang.image.resizeHalf + '" data-event="resize" data-value="0.5" tabindex="-1"><span class="note-fontsize-10">50%</span> </button>' +
'<button type="button" class="btn btn-default btn-sm btn-small" title="' + lang.image.resizeQuarter + '" data-event="resize" data-value="0.25" tabindex="-1"><span class="note-fontsize-10">25%</span> </button>' + | Few updates to the image resizing code. | CoandaCMS_coanda-core | train |
d5b8cfff59ece4e639b72e0ad04d7cc5fedc1e62 | diff --git a/ui/mirage/config.js b/ui/mirage/config.js
index <HASH>..<HASH> 100644
--- a/ui/mirage/config.js
+++ b/ui/mirage/config.js
@@ -16,12 +16,29 @@ export default function() {
this.get('/jobs', function({ jobs }, { queryParams }) {
const json = this.serialize(jobs.all());
+ const namespace = queryParams.namespace || 'default';
return json
- .filter(job => (queryParams.namespace ? job.NamespaceID === queryParams.namespace : true))
+ .filter(
+ job =>
+ namespace === 'default'
+ ? !job.NamespaceID || job.NamespaceID === namespace
+ : job.NamespaceID === namespace
+ )
.map(job => filterKeys(job, 'TaskGroups', 'NamespaceID'));
});
- this.get('/job/:id');
+ this.get('/job/:id', function({ jobs }, { params, queryParams }) {
+ const job = jobs.all().models.find(job => {
+ const jobIsDefault = !job.namespaceId || job.namespaceId === 'default';
+ const qpIsDefault = !queryParams.namespace || queryParams.namespace === 'default';
+ return (
+ job.id === params.id &&
+ (job.namespaceId === queryParams.namespace || (jobIsDefault && qpIsDefault))
+ );
+ });
+
+ return job ? this.serialize(job) : new Response(404, {}, null);
+ });
this.get('/job/:id/summary', function({ jobSummaries }, { params }) {
return this.serialize(jobSummaries.findBy({ jobId: params.id }));
diff --git a/ui/mirage/factories/job.js b/ui/mirage/factories/job.js
index <HASH>..<HASH> 100644
--- a/ui/mirage/factories/job.js
+++ b/ui/mirage/factories/job.js
@@ -53,8 +53,10 @@ export default Factory.extend({
});
if (!job.namespaceId) {
+ const namespace = server.db.namespaces.length ? pickOne(server.db.namespaces).id : null;
job.update({
- namespaceId: server.db.namespaces.length ? pickOne(server.db.namespaces).id : null,
+ namespace,
+ namespaceId: namespace,
});
} | Update job factories and mock endpoints to handle namespaces correctly | hashicorp_nomad | train |
48553f969a39f75a4ae0617febb9e1bbb9503a84 | diff --git a/mutant/forms.py b/mutant/forms.py
index <HASH>..<HASH> 100644
--- a/mutant/forms.py
+++ b/mutant/forms.py
@@ -2,7 +2,7 @@ from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.models import ContentType
-from django.utils.encoding import force_text
+from django.utils.encoding import smart_text
from .utils import choices_from_dict, group_item_getter
@@ -47,9 +47,9 @@ class FieldDefinitionTypeField(forms.ModelChoiceField):
definition = content_type.model_class()
category = definition.get_field_category()
definition_choices.append({
- 'group': force_text(category) if category else None,
+ 'group': smart_text(category) if category else None,
'value': content_type.pk,
- 'label': force_text(definition.get_field_description()),
+ 'label': self.label_from_instance(content_type),
})
choices = list(
choices_from_dict(
@@ -62,3 +62,6 @@ class FieldDefinitionTypeField(forms.ModelChoiceField):
return super(FieldDefinitionTypeField, self)._get_choices()
choices = property(_get_choices, forms.ModelChoiceField._set_queryset)
+
+ def label_from_instance(self, obj):
+ return smart_text(obj.model_class().get_field_description())
diff --git a/mutant/tests/test_forms.py b/mutant/tests/test_forms.py
index <HASH>..<HASH> 100644
--- a/mutant/tests/test_forms.py
+++ b/mutant/tests/test_forms.py
@@ -79,7 +79,21 @@ class FieldDefinitionTypeFieldTest(TestCase):
form = form_cls({'field_type': self.content_type_ct.pk})
self.assertFalse(form.is_valid())
- def test_group_by_category(self):
+ def test_choices(self):
+ field = FieldDefinitionTypeField(
+ ContentType.objects.filter(pk__in=[
+ self.field_definition_ct.pk, self.custom_field_ct.pk
+ ]).order_by('pk'), group_by_category=False, empty_label='Empty'
+ )
+ self.assertEqual(
+ list(field.choices), [
+ ('', 'Empty'),
+ (self.field_definition_ct.pk, 'None'),
+ (self.custom_field_ct.pk, ugettext('Custom description'))
+ ]
+ )
+
+ def test_group_by_category_choices(self):
field = FieldDefinitionTypeField(
ContentType.objects.filter(pk__in=[
self.field_definition_ct.pk, self.custom_field_ct.pk | Make sure `FieldDefinitionTypeField` always uses description as choice label. | charettes_django-mutant | train |
eb5907078a989caeb4a42c605e2b8703735496ac | diff --git a/chalkboard/chalkboard.js b/chalkboard/chalkboard.js
index <HASH>..<HASH> 100644
--- a/chalkboard/chalkboard.js
+++ b/chalkboard/chalkboard.js
@@ -105,7 +105,6 @@ var RevealChalkboard = window.RevealChalkboard || (function(){
penCursors[0] = config.pen[0];
boardCursors[0] = config.pen[1];
}
- if ( config.draw ) draw = config.draw;
if ( config.color ) {
color = config.color;
penColors[0] = config.color[0]; | Remove reference to config.draw. | rajgoel_reveal.js-plugins | train |
695e1763003a7d2a6e027e8818d292a4c23faa9c | diff --git a/test/jumpstart/test_base.rb b/test/jumpstart/test_base.rb
index <HASH>..<HASH> 100755
--- a/test/jumpstart/test_base.rb
+++ b/test/jumpstart/test_base.rb
@@ -110,6 +110,14 @@ class TestJumpstartBase < Test::Unit::TestCase
end
end
+
+ context "Tests for the JumpStart::Base#create_template instance method. \n" do
+
+ should "run create_template method" do
+
+ end
+
+ end
context "Tests for the JumpStart::Base#jumpstart_options instance method. \n" do
@@ -184,7 +192,6 @@ class TestJumpstartBase < Test::Unit::TestCase
end
- # TODO Add tests that involve extended FileUtils methods after writing tests for FileUtils module.
context "Tests for the JumpStart::Base#check_local_nginx_configuration instance method. \n" do
should "run check_local_nginx_configuration method" do
@@ -241,6 +248,22 @@ class TestJumpstartBase < Test::Unit::TestCase
end
end
+
+ context "Tests for the JumpStart::#check_for_strings_to_replace instance method.\n" do
+
+ should "" do
+
+ end
+
+ end
+
+ context "Tests for the JumpStart::#check_replace_string_pairs_for_project_name_sub instance method.\n" do
+
+ should "" do
+
+ end
+
+ end
context "Tests for the JumpStart::Base.get_line_number class method.\n" do | added more tests to be written in base | i0n_jumpstart | train |
b7ce11e16495a4592eeb36b233755a426c458a17 | diff --git a/skyfield/iokit.py b/skyfield/iokit.py
index <HASH>..<HASH> 100644
--- a/skyfield/iokit.py
+++ b/skyfield/iokit.py
@@ -1,6 +1,7 @@
from __future__ import print_function
import itertools
import os
+import errno
import numpy as np
import sys
from datetime import date, datetime, timedelta
@@ -79,8 +80,11 @@ class Loader(object):
self.verbose = verbose
self.expire = expire
self.events = []
- if not os.path.exists(self.directory):
+ try:
os.makedirs(self.directory)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
# Each instance gets its own copy of these data structures,
# instead of sharing a single copy, so users can edit them
@@ -184,10 +188,14 @@ class Loader(object):
def tle(self, url, reload=False):
"""Load and parse a satellite TLE file.
- Given a URL or a local path, this loads a file of three-line
- records in the common Celestrak file format, where each first
- line gives the name of a satellite and the following two lines
- are the TLE orbital elements.
+ Given a URL or a local path, this loads a file of three-line records in
+ the common Celestrak file format, or two-line records like those from
+ space-track.org. For a three-line element set, each first line gives
+ the name of a satellite and the following two lines are the TLE orbital
+ elements. A two-line element set comprises only these last two lines.
+
+ If two-line element sets are provided, the EarthSatellite 'name'
+ attribute is set to the satellite ID number for the object.
Returns a Python dictionary whose keys are satellite names and
values are :class:`~skyfield.sgp4lib.EarthSatellite` objects.
@@ -350,10 +358,17 @@ def parse_leap_seconds(fileobj):
def parse_celestrak_tle(fileobj):
lines = iter(fileobj)
for line in lines:
- name = line.decode('ascii').strip()
- line1 = next(lines).decode('ascii')
- line2 = next(lines).decode('ascii')
- sat = EarthSatellite(line1, line2, name)
+ if line.decode('ascii').strip()[0] != '1': # three-line elset
+ name = line.decode('ascii').strip()
+ line1 = next(lines).decode('ascii')
+ line2 = next(lines).decode('ascii')
+ sat = EarthSatellite(line1, line2, name)
+ else: # two-line elset, no name provided!
+ line1 = line.decode('ascii')
+ line2 = next(lines).decode('ascii')
+ sat = EarthSatellite(line1, line2, name=None)
+ name = str(sat.model.satnum) # set to satellite number
+ sat.name = name
yield sat.model.satnum, sat
yield name, sat
if ' (' in name:
diff --git a/skyfield/tests/test_io_parsing.py b/skyfield/tests/test_io_parsing.py
index <HASH>..<HASH> 100644
--- a/skyfield/tests/test_io_parsing.py
+++ b/skyfield/tests/test_io_parsing.py
@@ -12,6 +12,13 @@ FLOCK 2E-1 \n\
2 41483 51.6270 103.3896 0004826 61.7810 298.3684 15.92672255114129
"""
+sample_spacetrack_text = b"""\
+1 29273U 06033B 18081.29838594 -.00000056 +00000-0 +00000-0 0 9993
+2 29273 000.0189 154.5198 0004980 202.4902 284.9321 01.00271755042548
+1 29274U 06033C 18081.39999693 +.00002637 +00000-0 +10299-2 0 9992
+2 29274 005.9144 244.7152 6177908 248.3941 037.5897 03.74556424124616
+"""
+
def test_celestrak():
f = BytesIO(sample_celestrak_text)
d = dict(parse_celestrak_tle(f))
@@ -19,3 +26,15 @@ def test_celestrak():
assert d[25544] is d['ISS'] is d['ISS (ZARYA)'] is d['ZARYA']
assert d[41483] is d['FLOCK 2E-1']
assert d[25544] is not d[41483]
+
+def test_spacetrack():
+ f = BytesIO(sample_spacetrack_text)
+ d = dict(parse_celestrak_tle(f))
+ assert len(d) == 4
+ assert d['29273'] is d[29273]
+ assert d['29274'] is d[29274]
+ assert d[29273] is not d[29274]
+ assert d[29273].model.satnum == 29273
+ assert d[29273].name == '29273'
+ assert d[29274].model.satnum == 29274
+ assert d[29274].name == '29274' | Parse TLE files that lack satellite names (#<I>)
Updated `parse_celestrak_tle()` to accept two-line element sets that do
not specify a satellite name, and to grab the satellite ID from the
first line to use as the name when initializing EarthSatellite objects. | skyfielders_python-skyfield | train |
147d8af438d60c267da4eb89e57f8b9a7afad9a6 | diff --git a/executor.go b/executor.go
index <HASH>..<HASH> 100644
--- a/executor.go
+++ b/executor.go
@@ -6,14 +6,14 @@ import (
// Execute executes a tree of *Actions sequentually in depth first order.
func Execute(a *Action) error {
- seen := make(map[*Action]bool)
+ seen := make(map[*Action]error)
return execute(seen, a)
}
-func execute(seen map[*Action]bool, a *Action) error {
+func execute(seen map[*Action]error, a *Action) error {
// step 0, have we been here before
- if seen[a] {
- return nil
+ if err, ok := seen[a]; ok {
+ return err
}
// step 1, build all dependencies
@@ -24,8 +24,9 @@ func execute(seen map[*Action]bool, a *Action) error {
}
// step 2, now execute ourselves
- seen[a] = true
- return a.Run()
+ err := a.Run()
+ seen[a] = err
+ return err
}
// ExecuteConcurrent executes all actions in a tree concurrently.
@@ -45,6 +46,9 @@ func ExecuteConcurrent(a *Action, n int) error {
permits <- true
}
+ // wg tracks all the outstanding actions
+ var wg sync.WaitGroup
+
var execute func(map[*Action]chan error, *Action) chan error
execute = func(seen map[*Action]chan error, a *Action) chan error {
@@ -68,7 +72,9 @@ func ExecuteConcurrent(a *Action, n int) error {
results = append(results, execute(seen, dep))
}
+ wg.Add(1)
go func() {
+ defer wg.Done()
// wait for dependant actions
for _, r := range results {
if err := get(r); err != nil {
@@ -84,5 +90,7 @@ func ExecuteConcurrent(a *Action, n int) error {
return result
}
- return get(execute(seen, a))
+ err := get(execute(seen, a))
+ wg.Wait()
+ return err
} | Fix bugs with exector
- fix concurrent executor not waiting for all actions to exit
- pass error back to caller in sync executor | constabulary_gb | train |
bf8fa5571138712d804dd667888061549a4f6e47 | diff --git a/lib/compare.js b/lib/compare.js
index <HASH>..<HASH> 100644
--- a/lib/compare.js
+++ b/lib/compare.js
@@ -21,12 +21,7 @@ function shortcircuitPrimitive (value) {
return false
}
-function compare (actual, expected) {
- if (actual === expected) return true
- // Primitive values should be the same, so if actual or expected is primitive
- // then the values will never compare.
- if (shortcircuitPrimitive(actual) || shortcircuitPrimitive(expected)) return false
-
+function compareDescriptors (lhs, rhs) {
const lhsLookup = new Map()
const rhsLookup = new Map()
@@ -34,9 +29,6 @@ function compare (actual, expected) {
const rhsStack = []
let topIndex = -1
- let lhs = describe(actual)
- let rhs = describe(expected)
-
do {
let result
@@ -102,4 +94,17 @@ function compare (actual, expected) {
return true
}
-module.exports = compare
+exports.compareDescriptors = compareDescriptors
+
+function compare (actual, expected) {
+ if (actual === expected) return { pass: true }
+ // Primitive values should be the same, so if actual or expected is primitive
+ // then the values will never compare.
+ if (shortcircuitPrimitive(actual) || shortcircuitPrimitive(expected)) return { pass: false }
+
+ actual = describe(actual)
+ expected = describe(expected)
+ const pass = compareDescriptors(actual, expected)
+ return { actual, expected, pass }
+}
+exports.compare = compare
diff --git a/perf/lodash-isequal-comparison.js b/perf/lodash-isequal-comparison.js
index <HASH>..<HASH> 100644
--- a/perf/lodash-isequal-comparison.js
+++ b/perf/lodash-isequal-comparison.js
@@ -37,7 +37,7 @@ Benchmarks adopted from <https://github.com/lodash/lodash/blob/3967c1e1197b72646
const Benchmark = require('benchmark')
global.isEqual = require('lodash.isequal')
-global.compare = require('../lib/compare')
+global.compare = require('../lib/compare').compare
const buildName = 'lodash.isequal'
const otherName = 'compare'
diff --git a/test/compare.js b/test/compare.js
index <HASH>..<HASH> 100644
--- a/test/compare.js
+++ b/test/compare.js
@@ -1,6 +1,6 @@
import test from 'ava'
-import compare from '../lib/compare'
+import {compare} from '../lib/compare'
test('compare functions by reference', t => {
function a () { return 1 + 2 }
@@ -8,7 +8,7 @@ test('compare functions by reference', t => {
return function a () { return 1 + 2 } // eslint-disable-line no-shadow
})()
- t.false(compare(a, a_))
+ t.false(compare(a, a_).pass)
})
test('objects compare even if symbol properties are out of order', t => {
@@ -17,7 +17,7 @@ test('objects compare even if symbol properties are out of order', t => {
const o1 = { [s1]: 1, [s2]: 2 }
const o2 = { [s2]: 2, [s1]: 1 }
- t.true(compare(o1, o2))
+ t.true(compare(o1, o2).pass)
const a1 = new Set([1, 2])
a1[s1] = 1
@@ -26,7 +26,7 @@ test('objects compare even if symbol properties are out of order', t => {
a2[s2] = 2
a2[s1] = 1
- t.true(compare(a1, a2))
+ t.true(compare(a1, a2).pass)
const a3 = new Set([3, 0])
a1[s1] = 1
@@ -35,5 +35,5 @@ test('objects compare even if symbol properties are out of order', t => {
a2[s2] = 2
a2[s1] = 1
- t.false(compare(a3, a4))
+ t.false(compare(a3, a4).pass)
})
diff --git a/test/lodash-isequal-comparison.js b/test/lodash-isequal-comparison.js
index <HASH>..<HASH> 100644
--- a/test/lodash-isequal-comparison.js
+++ b/test/lodash-isequal-comparison.js
@@ -36,7 +36,9 @@ Tests adopted from https://github.com/lodash/lodash/blob/3967c1e1197b726463246b4
import vm from 'vm'
import test from 'ava'
-import isEqual from '../lib/compare'
+import {compare} from '../lib/compare'
+
+const isEqual = (actual, expected) => compare(actual, expected).pass
const realm = vm.runInNewContext('(function () { return this })()')
const symbol1 = Symbol ? Symbol('a') : true | Change compare() to return an object
Returns the described actual and expected values, so a diff can be
generated.
Export both compare() and compareDescriptors(), the latter should be
called with descriptors rather than raw values. | concordancejs_concordance | train |
96547fcc5285dc0d650b6138216a9dbc505759f2 | diff --git a/src/view/items/element/Decorator.js b/src/view/items/element/Decorator.js
index <HASH>..<HASH> 100644
--- a/src/view/items/element/Decorator.js
+++ b/src/view/items/element/Decorator.js
@@ -72,10 +72,11 @@ export default class Decorator {
resolver = this.parentFragment.resolve( ref, model => {
this.models[i] = model;
removeFromArray( this.resolvers, resolver );
+ model.register( this );
});
this.resolvers.push( resolver );
- }
+ } else model.register( this );
return model;
});
@@ -90,6 +91,8 @@ export default class Decorator {
}
}
+ handleChange () { this.bubble(); }
+
rebind () {
if ( this.dynamicName ) this.nameFragment.rebind();
if ( this.dynamicArgs ) this.argsFragment.rebind();
@@ -139,6 +142,9 @@ export default class Decorator {
if ( this.dynamicName ) this.nameFragment.unbind();
if ( this.dynamicArgs ) this.argsFragment.unbind();
if ( this.resolvers ) this.resolvers.forEach( unbind );
+ if ( this.models ) this.models.forEach( m => {
+ if ( m ) m.unregister( this );
+ });
}
unrender ( shouldDestroy ) {
diff --git a/test/browser-tests/plugins/decorators.js b/test/browser-tests/plugins/decorators.js
index <HASH>..<HASH> 100644
--- a/test/browser-tests/plugins/decorators.js
+++ b/test/browser-tests/plugins/decorators.js
@@ -484,4 +484,31 @@ export default function() {
t.htmlEqual( fixture.innerHTML, '<div>foo baz</div>' );
});
+
+ test( 'named decorators update with their args (#2590)', t => {
+ const r = new Ractive({
+ el: fixture,
+ template: `<div as-foo="bar">bar here</div>`,
+ decorators: {
+ foo ( node, bar ) {
+ const contents = node.innerHTML;
+ node.innerHTML = bar;
+
+ return {
+ update ( bar ) {
+ node.innerHTML = bar;
+ },
+ teardown () {
+ node.innerHTML = contents;
+ }
+ };
+ }
+ },
+ data: { bar: 'foo' }
+ });
+
+ t.htmlEqual( fixture.innerHTML, '<div>foo</div>' );
+ r.set( 'bar', 'baz' );
+ t.htmlEqual( fixture.innerHTML, '<div>baz</div>' );
+ });
} | register decorators with their models so that they updated correctly - fixes #<I> | ractivejs_ractive | train |
7cde8cbbca360efbf1a19775a7fc7ba97719c34d | diff --git a/dvc/project.py b/dvc/project.py
index <HASH>..<HASH> 100644
--- a/dvc/project.py
+++ b/dvc/project.py
@@ -362,7 +362,7 @@ class Project(object):
stages = self.active_stages()
for stage in stages:
- if not stage.locked:
+ if stage.locked:
msg = 'DVC file \'{}\' is locked. Its dependecies are not ' \
'going to be shown in status output.'
self.logger.warn(msg.format(stage.relpath)) | status: print locked warning only when stage is locked
Fixes #<I> | iterative_dvc | train |
aeb27bdb5bc836e1871f9db526e6ab8d505d4442 | diff --git a/plugin/file/reload.go b/plugin/file/reload.go
index <HASH>..<HASH> 100644
--- a/plugin/file/reload.go
+++ b/plugin/file/reload.go
@@ -25,6 +25,7 @@ func (z *Zone) Reload() error {
serial := z.SOASerialIfDefined()
zone, err := Parse(reader, z.origin, zFile, serial)
+ reader.Close()
if err != nil {
if _, ok := err.(*serialErr); !ok {
log.Errorf("Parsing zone %q: %v", z.origin, err) | plugin/file: close reader for reload (#<I>)
This reloader didn't close the openened file handle. Add a close. Can't
use `defer` because this is in a endless loop. | coredns_coredns | train |
b23bba970063de6762c457c4db7f3a4436a93bd3 | diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py
index <HASH>..<HASH> 100644
--- a/pandas/core/indexes/base.py
+++ b/pandas/core/indexes/base.py
@@ -2826,7 +2826,9 @@ class Index(IndexOpsMixin, PandasObject):
self._assert_can_do_setop(other)
other, _ = self._convert_can_do_setop(other)
- if self.equals(other) and not self.has_duplicates:
+ if self.equals(other):
+ if self.has_duplicates:
+ return self.unique()._get_reconciled_name_object(other)
return self._get_reconciled_name_object(other)
if not is_dtype_equal(self.dtype, other.dtype):
diff --git a/pandas/core/indexes/interval.py b/pandas/core/indexes/interval.py
index <HASH>..<HASH> 100644
--- a/pandas/core/indexes/interval.py
+++ b/pandas/core/indexes/interval.py
@@ -980,7 +980,9 @@ class IntervalIndex(IntervalMixin, ExtensionIndex):
self._assert_can_do_setop(other)
other, _ = self._convert_can_do_setop(other)
- if self.equals(other) and not self.has_duplicates:
+ if self.equals(other):
+ if self.has_duplicates:
+ return self.unique()._get_reconciled_name_object(other)
return self._get_reconciled_name_object(other)
if not isinstance(other, IntervalIndex):
diff --git a/pandas/core/indexes/multi.py b/pandas/core/indexes/multi.py
index <HASH>..<HASH> 100644
--- a/pandas/core/indexes/multi.py
+++ b/pandas/core/indexes/multi.py
@@ -3619,7 +3619,7 @@ class MultiIndex(Index):
if self.equals(other):
if self.has_duplicates:
return self.unique().rename(result_names)
- return self._get_reconciled_name_object(other)
+ return self.rename(result_names)
return self._intersection(other, sort=sort)
diff --git a/pandas/core/indexes/period.py b/pandas/core/indexes/period.py
index <HASH>..<HASH> 100644
--- a/pandas/core/indexes/period.py
+++ b/pandas/core/indexes/period.py
@@ -639,6 +639,8 @@ class PeriodIndex(DatetimeIndexOpsMixin):
other, _ = self._convert_can_do_setop(other)
if self.equals(other):
+ if self.has_duplicates:
+ return self.unique()._get_reconciled_name_object(other)
return self._get_reconciled_name_object(other)
return self._intersection(other, sort=sort)
diff --git a/pandas/tests/indexes/multi/test_setops.py b/pandas/tests/indexes/multi/test_setops.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/indexes/multi/test_setops.py
+++ b/pandas/tests/indexes/multi/test_setops.py
@@ -419,3 +419,13 @@ def test_intersect_with_duplicates(tuples, exp_tuples):
result = left.intersection(right)
expected = MultiIndex.from_tuples(exp_tuples, names=["first", "second"])
tm.assert_index_equal(result, expected)
+
+
+def test_intersection_equal_different_names():
+ # GH#30302
+ mi1 = MultiIndex.from_arrays([[1, 2], [3, 4]], names=["c", "b"])
+ mi2 = MultiIndex.from_arrays([[1, 2], [3, 4]], names=["a", "b"])
+
+ result = mi1.intersection(mi2)
+ expected = MultiIndex.from_arrays([[1, 2], [3, 4]], names=[None, "b"])
+ tm.assert_index_equal(result, expected)
diff --git a/pandas/tests/indexes/period/test_setops.py b/pandas/tests/indexes/period/test_setops.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/indexes/period/test_setops.py
+++ b/pandas/tests/indexes/period/test_setops.py
@@ -339,3 +339,10 @@ class TestPeriodIndex:
expected = PeriodIndex(["20160920", "20160921"], freq="D")
tm.assert_index_equal(idx_diff, expected)
tm.assert_attr_equal("freq", idx_diff, expected)
+
+ def test_intersection_equal_duplicates(self):
+ # GH#38302
+ idx = pd.period_range("2011-01-01", periods=2)
+ idx_dup = idx.append(idx)
+ result = idx_dup.intersection(idx_dup)
+ tm.assert_index_equal(result, idx) | BUG: Clean intersection and fix resulting names when MultiIndex are equal (#<I>) | pandas-dev_pandas | train |
c945b3e9c30329e83a4b75151f46fa94857d9e94 | diff --git a/lib/fog/cloudstack/core.rb b/lib/fog/cloudstack/core.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/cloudstack/core.rb
+++ b/lib/fog/cloudstack/core.rb
@@ -13,6 +13,8 @@ module Fog
def self.escape(string)
string = CGI::escape(string)
string = string.gsub("+","%20")
+ # Escaped asterisk will cause malformed request
+ string = string.gsub("%2A","*")
string
end | Prevent malformed request with asterisk (*) character | fog_fog | train |
f7f64550200a20103dfcbee95ffadf6b7adfdddc | diff --git a/openquake/baselib/parallel.py b/openquake/baselib/parallel.py
index <HASH>..<HASH> 100644
--- a/openquake/baselib/parallel.py
+++ b/openquake/baselib/parallel.py
@@ -553,7 +553,7 @@ class Starmap(object):
cls.dask_client = Client()
@classmethod
- def shutdown(cls, poolsize=None):
+ def shutdown(cls):
if hasattr(cls, 'pool'):
cls.pool.close()
cls.pool.terminate()
diff --git a/openquake/calculators/tests/__init__.py b/openquake/calculators/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/tests/__init__.py
+++ b/openquake/calculators/tests/__init__.py
@@ -27,7 +27,7 @@ import sys
import numpy
from openquake.calculators import base
-from openquake.baselib import datastore, general
+from openquake.baselib import datastore, general, parallel
from openquake.commonlib import readinput, oqvalidation
@@ -69,6 +69,7 @@ class CalculatorTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.duration = general.AccumDict()
+ parallel.Starmap.shutdown = lambda: None # avoid restarting the pool
def get_calc(self, testfile, job_ini, **kw):
""" | Avoided restarting the pool in the tests [demos] | gem_oq-engine | train |
d953a964cd32d3f3056c7f1ec45000ecbfc5ee8d | diff --git a/spec/homesick_spec.rb b/spec/homesick_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/homesick_spec.rb
+++ b/spec/homesick_spec.rb
@@ -94,7 +94,7 @@ describe 'homesick' do
end
it 'should clone a github repo' do
- homesick.should_receive(:git_clone).with('https://github.com/wfarr/dotfiles.git', :destination => Pathname.new('wfarr/dotfiles'))
+ homesick.should_receive(:git_clone).with('https://github.com/wfarr/dotfiles.git', :destination => Pathname.new('dotfiles'))
homesick.clone 'wfarr/dotfiles'
end | Changed the spec, bundle exec rake works | technicalpickles_homesick | train |
e00670b9018d273c67ec4fc93244c98597e4aa83 | diff --git a/service/dap/server.go b/service/dap/server.go
index <HASH>..<HASH> 100644
--- a/service/dap/server.go
+++ b/service/dap/server.go
@@ -1024,6 +1024,8 @@ func (s *Server) stopDebugSession(killProcess bool) error {
s.log.Debug("halt returned state: ", exited)
}
if exited != nil {
+ // TODO(suzmue): log exited error when the process exits, which may have been before
+ // halt was called.
s.logToConsole(exited.Error())
s.logToConsole("Detaching")
} else if killProcess {
diff --git a/service/dap/server_test.go b/service/dap/server_test.go
index <HASH>..<HASH> 100644
--- a/service/dap/server_test.go
+++ b/service/dap/server_test.go
@@ -4330,6 +4330,35 @@ func TestLaunchRequestOutputPath(t *testing.T) {
})
}
+func TestExitNonZeroStatus(t *testing.T) {
+ runTest(t, "pr1055", func(client *daptest.Client, fixture protest.Fixture) {
+ client.InitializeRequest()
+ client.ExpectInitializeResponseAndCapabilities(t)
+
+ client.LaunchRequest("exec", fixture.Path, !stopOnEntry)
+ client.ExpectInitializedEvent(t)
+ client.ExpectLaunchResponse(t)
+
+ client.ConfigurationDoneRequest()
+ client.ExpectConfigurationDoneResponse(t)
+
+ client.ExpectTerminatedEvent(t)
+
+ client.DisconnectRequest()
+ // Check that the process exit status is 2.
+ oep := client.ExpectOutputEventProcessExited(t, 2)
+ if oep.Body.Category != "console" {
+ t.Errorf("\ngot %#v\nwant Category='console'", oep)
+ }
+ oed := client.ExpectOutputEventDetaching(t)
+ if oed.Body.Category != "console" {
+ t.Errorf("\ngot %#v\nwant Category='console'", oed)
+ }
+ client.ExpectDisconnectResponse(t)
+ client.ExpectTerminatedEvent(t)
+ })
+}
+
func TestNoDebug_GoodExitStatus(t *testing.T) {
runTest(t, "increment", func(client *daptest.Client, fixture protest.Fixture) {
runNoDebugSession(t, client, func() {
diff --git a/service/debugger/debugger.go b/service/debugger/debugger.go
index <HASH>..<HASH> 100644
--- a/service/debugger/debugger.go
+++ b/service/debugger/debugger.go
@@ -1130,6 +1130,11 @@ func (d *Debugger) Command(command *api.DebuggerCommand, resumeNotify chan struc
d.recordMutex.Lock()
if d.stopRecording == nil {
err = d.target.RequestManualStop()
+ // The error returned from d.target.Valid will have more context
+ // about the exited process.
+ if _, valErr := d.target.Valid(); valErr != nil {
+ err = valErr
+ }
}
d.recordMutex.Unlock()
} | service/debugger: return correct exit status on manual halt (#<I>)
* service/dap: add test for nonzero exit status | go-delve_delve | train |
bfb70b41f9fc9540afc6550a34f8fe4f2a137bd2 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -33,7 +33,7 @@ install_requires = ['penaltymodel>=0.15.0,<0.16.0',
'penaltymodel-maxgap>=0.4.0,<0.5.0; platform_machine == "x86" or python_version == "3.4"',
'penaltymodel-mip>=0.1.2,<0.2.0; platform_machine != "x86" and python_version != "3.4"',
'networkx>=2.0,<3.0',
- 'dimod>=0.6.7,<0.7.0'
+ 'dimod>=0.6.7,<0.7.0',
'six>=1.11.0,<2.0.0']
packages = ['dwavebinarycsp', | Fix install_requires syntax in setup.py | dwavesystems_dwavebinarycsp | train |
4fb52a38b73c7e08d35df8e23dc9c88ea67f64b7 | diff --git a/src/Support/Period.php b/src/Support/Period.php
index <HASH>..<HASH> 100644
--- a/src/Support/Period.php
+++ b/src/Support/Period.php
@@ -100,6 +100,28 @@ class Period
}
/**
+ * Create a new Period instance with only a start date time.
+ *
+ * @param \Datetime $startDateTime
+ * @return CyrildeWit\EloquentViewable\Period
+ */
+ public static function since(DateTime $startDateTime = null): self
+ {
+ return new static($startDateTime);
+ }
+
+ /**
+ * Create a new Period instance with only a end date time.
+ *
+ * @param \Datetime $endDateTime
+ * @return CyrildeWit\EloquentViewable\Period
+ */
+ public static function upto(DateTime $endDateTime = null): self
+ {
+ return new static($endDateTime);
+ }
+
+ /**
* Get the start date time.
*
* @return \DateTime|null
@@ -251,7 +273,7 @@ class Period
* @param int $weeks
* @return \CyrildeWit\EloquentViewable\Support\Period
*/
- public static function pastWeeks(int $weeks)
+ public static function pastWeeks(int $weeks): self
{
return self::subToday(self::PAST_WEEKS, $weeks);
} | feat: add static since and upto methods to Period class | cyrildewit_eloquent-viewable | train |
0e8b8649400fc2072309a76814f27e0b1dee5a8e | diff --git a/src/Symfony/Bundle/WebProfilerBundle/EventListener/WebDebugToolbarListener.php b/src/Symfony/Bundle/WebProfilerBundle/EventListener/WebDebugToolbarListener.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Bundle/WebProfilerBundle/EventListener/WebDebugToolbarListener.php
+++ b/src/Symfony/Bundle/WebProfilerBundle/EventListener/WebDebugToolbarListener.php
@@ -11,6 +11,7 @@
namespace Symfony\Bundle\WebProfilerBundle\EventListener;
+use Symfony\Bundle\FullStack;
use Symfony\Bundle\WebProfilerBundle\Csp\ContentSecurityPolicyHandler;
use Symfony\Component\EventDispatcher\EventSubscriberInterface;
use Symfony\Component\HttpFoundation\Request;
@@ -142,6 +143,7 @@ class WebDebugToolbarListener implements EventSubscriberInterface
$toolbar = "\n".str_replace("\n", '', $this->twig->render(
'@WebProfiler/Profiler/toolbar_js.html.twig',
[
+ 'full_stack' => class_exists(FullStack::class),
'excluded_ajax_paths' => $this->excludedAjaxPaths,
'token' => $response->headers->get('X-Debug-Token'),
'request' => $request, | Fixing missing full_stack variable that's needed by toolbar.html.twig | symfony_symfony | train |
07617b65305544e2337c5d304e4a94ecccbf22c7 | diff --git a/src/passes/Pass.js b/src/passes/Pass.js
index <HASH>..<HASH> 100644
--- a/src/passes/Pass.js
+++ b/src/passes/Pass.js
@@ -226,25 +226,18 @@ export class Pass {
}
/**
- * Returns the current fullscreen material.
+ * The fullscreen material.
*
- * @return {Material} The current fullscreen material, or null if there is none.
+ * @type {Material}
*/
- getFullscreenMaterial() {
+ get fullscreenMaterial() {
return (this.screen !== null) ? this.screen.material : null;
}
- /**
- * Sets the fullscreen material.
- *
- * @protected
- * @param {Material} value - A fullscreen material.
- */
-
- setFullscreenMaterial(value) {
+ set fullscreenMaterial(value) {
let screen = this.screen;
@@ -271,6 +264,33 @@ export class Pass {
}
/**
+ * Returns the current fullscreen material.
+ *
+ * @deprecated Use fullscreenMaterial instead.
+ * @return {Material} The current fullscreen material, or null if there is none.
+ */
+
+ getFullscreenMaterial() {
+
+ return this.fullscreenMaterial;
+
+ }
+
+ /**
+ * Sets the fullscreen material.
+ *
+ * @deprecated Use fullscreenMaterial instead.
+ * @protected
+ * @param {Material} value - A fullscreen material.
+ */
+
+ setFullscreenMaterial(value) {
+
+ this.fullscreenMaterial = value;
+
+ }
+
+ /**
* Returns the current depth texture.
*
* @return {Texture} The current depth texture, or null if there is none. | Add fullscreenMaterial accessor
deprecated getFullscreenMaterial and setFullscreenMaterial. | vanruesc_postprocessing | train |
3eaa89efa830df2681a4c88f807195d0fde847a0 | diff --git a/enricher/fabric8/src/main/java/io/fabric8/maven/enricher/fabric8/IconEnricher.java b/enricher/fabric8/src/main/java/io/fabric8/maven/enricher/fabric8/IconEnricher.java
index <HASH>..<HASH> 100644
--- a/enricher/fabric8/src/main/java/io/fabric8/maven/enricher/fabric8/IconEnricher.java
+++ b/enricher/fabric8/src/main/java/io/fabric8/maven/enricher/fabric8/IconEnricher.java
@@ -139,7 +139,8 @@ public class IconEnricher extends BaseEnricher {
if (Strings.isNullOrBlank(answer)) {
log.debug("No icon file found for this project");
} else {
- log.info("Icon URL: " + answer);
+ log.info("Adding icon");
+ log.verbose("Icon URL: " + answer);
}
return answer; | Make icon enrichment a bit less verbose | fabric8io_fabric8-maven-plugin | train |
616a22a15a3f4eef09a05a4b14263fb75a9a4e82 | diff --git a/packages/ReflectionDocBlock/src/NodeAnalyzer/DocBlockAnalyzer.php b/packages/ReflectionDocBlock/src/NodeAnalyzer/DocBlockAnalyzer.php
index <HASH>..<HASH> 100644
--- a/packages/ReflectionDocBlock/src/NodeAnalyzer/DocBlockAnalyzer.php
+++ b/packages/ReflectionDocBlock/src/NodeAnalyzer/DocBlockAnalyzer.php
@@ -6,7 +6,6 @@ use Nette\Utils\Strings;
use phpDocumentor\Reflection\DocBlock;
use phpDocumentor\Reflection\DocBlock\Tag;
use phpDocumentor\Reflection\DocBlock\Tags\Param;
-use phpDocumentor\Reflection\DocBlock\Tags\Var_;
use phpDocumentor\Reflection\Type;
use phpDocumentor\Reflection\Types\Boolean;
use phpDocumentor\Reflection\Types\Integer;
@@ -19,6 +18,7 @@ use Rector\ReflectionDocBlock\DocBlock\AnnotationRemover;
use Rector\ReflectionDocBlock\DocBlock\DocBlockFactory;
use Rector\ReflectionDocBlock\DocBlock\TidingSerializer;
use ReflectionProperty;
+use Symplify\BetterReflectionDocBlock\Tag\TolerantVar;
final class DocBlockAnalyzer
{
@@ -79,7 +79,7 @@ final class DocBlockAnalyzer
*/
public function getVarTypes(Node $node): ?array
{
- /** @var Var_[] $varTags */
+ /** @var TolerantVar[] $varTags */
$varTags = $this->getTagsByName($node, 'var');
if (! count($varTags)) {
return null;
@@ -131,7 +131,7 @@ final class DocBlockAnalyzer
$tags = $docBlock->getTags();
foreach ($tags as $tag) {
- if (! $tag instanceof Var_) {
+ if (! $tag instanceof TolerantVar) {
continue;
} | make use of TolerantVar from Symplify <I> | rectorphp_rector | train |
741af33b99b6ab90ec2ed2b91f81b3e984dbfdf8 | diff --git a/_example/memcached.go b/_example/memcached.go
index <HASH>..<HASH> 100644
--- a/_example/memcached.go
+++ b/_example/memcached.go
@@ -100,7 +100,7 @@ func readStat() (map[string]float64, error) {
if res[0] == "STAT" {
stat[res[1]], err = strconv.ParseFloat(res[2], 64)
if err != nil {
- fmt.Println("readStat:", err)
+ fmt.Fprintln(os.Stderr, "readStat:", err)
}
}
line, isPrefix, err = r.ReadLine()
diff --git a/mackerel-plugin-helper.go b/mackerel-plugin-helper.go
index <HASH>..<HASH> 100644
--- a/mackerel-plugin-helper.go
+++ b/mackerel-plugin-helper.go
@@ -52,10 +52,10 @@ func (h *MackerelPluginHelper) fetchLastValues() (map[string]float64, time.Time,
}
defer f.Close()
- r := bufio.NewReader(f)
- line, isPrefix, err := r.ReadLine()
stat := make(map[string]float64)
- for err == nil && !isPrefix {
+ scanner := bufio.NewScanner(f)
+ for scanner.Scan() {
+ line := scanner.Text()
s := string(line)
res := strings.Split(s, "\t")
if len(res) != 3 {
@@ -63,22 +63,18 @@ func (h *MackerelPluginHelper) fetchLastValues() (map[string]float64, time.Time,
}
stat[res[0]], err = strconv.ParseFloat(res[1], 64)
if err != nil {
- fmt.Println("fetchLastValues: ", err)
+ fmt.Fprintln(os.Stderr, "fetchLastValues: ", err)
}
timestamp, err := strconv.Atoi(res[2])
if err != nil {
- fmt.Println("fetchLastValues: ", err)
+ fmt.Fprintln(os.Stderr, "fetchLastValues: ", err)
}
lastTime = time.Unix(int64(timestamp), 0)
if err != nil {
- fmt.Println("fetchLastValues: ", err)
+ fmt.Fprintln(os.Stderr, "fetchLastValues: ", err)
}
- line, isPrefix, err = r.ReadLine()
}
- if isPrefix {
- return nil, lastTime, errors.New("buffer size too small")
- }
- if err != nil {
+ if err := scanner.Err(); err != nil {
return stat, lastTime, err
}
return stat, lastTime, nil
@@ -90,12 +86,12 @@ func (h *MackerelPluginHelper) saveValues(values map[string]float64, now time.Ti
return err
}
defer f.Close()
- w := bufio.NewWriter(f)
+ w := bufio.NewWriter(f)
for key, value := range values {
h.printValue(w, key, value, now)
- w.Flush()
}
+ w.Flush()
return nil
}
@@ -120,12 +116,12 @@ func (h *MackerelPluginHelper) OutputValues() {
lastStat, lastTime, err := h.fetchLastValues()
if err != nil {
- fmt.Println("fetchLastValues (ignore):", err)
+ fmt.Fprintln(os.Stderr, "fetchLastValues (ignore):", err)
}
err = h.saveValues(stat, now)
if err != nil {
- fmt.Println("saveValues: ", err)
+ fmt.Fprintln(os.Stderr, "saveValues: ", err)
return
}
@@ -141,7 +137,7 @@ func (h *MackerelPluginHelper) OutputValues() {
h.printValue(os.Stdout, key+"."+metric.Key, diff, now)
}
} else {
- fmt.Printf("%s is not exist at last fetch\n", metric.Key)
+ fmt.Fprintf(os.Stderr, "%s is not exist at last fetch\n", metric.Key)
}
} else {
h.printValue(os.Stdout, key+"."+metric.Key, stat[metric.Key], now) | Output errors to stderr, at el. | mackerelio_go-mackerel-plugin-helper | train |
ac45621df2aeda94ddd15cea990040be8ff09957 | diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index <HASH>..<HASH> 100644
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -1192,10 +1192,16 @@ def update_generated_files_for_package(
}
if update_release_notes:
git_cmd = get_git_command(previous_release)
- changes = subprocess.check_output(git_cmd, cwd=source_provider_package_path, universal_newlines=True)
- changes_table = convert_git_changes_to_table(
- changes, base_url="https://github.com/apache/airflow/commit/"
- )
+ try:
+ changes = subprocess.check_output(
+ git_cmd, cwd=source_provider_package_path, universal_newlines=True
+ )
+ changes_table = convert_git_changes_to_table(
+ changes, base_url="https://github.com/apache/airflow/commit/"
+ )
+ except subprocess.CalledProcessError:
+ # TODO(potiuk) fix me for both backport/provider package check
+ changes_table = ''
context["CURRENT_CHANGES_TABLE"] = changes_table
pip_requirements_table = convert_pip_requirements_to_table(
PROVIDERS_REQUIREMENTS[provider_package_id]
@@ -1542,6 +1548,7 @@ ERROR! Wrong first param: {sys.argv[1]}
print("Generate setup files")
print()
provider = sys.argv[2]
+ make_sure_remote_apache_exists_and_fetch()
update_generated_files_for_package(
provider, "", suffix, [], BACKPORT_PACKAGES, update_release_notes=False, update_setup=True
) | Workaround missing git commit in providers's check in CI (#<I>)
Temporary fix to unblock master PRs.
There is a problem with comparing/fetching commits during tests
and it should be investigated properly - this is temporary fix
to workaround it. | apache_airflow | train |
21a1ebe4ccd337e49328a11a43ea9cec7423350d | diff --git a/js/browser-ui.js b/js/browser-ui.js
index <HASH>..<HASH> 100644
--- a/js/browser-ui.js
+++ b/js/browser-ui.js
@@ -488,7 +488,7 @@ Browser.prototype.toggleOptsPopup = function(ev) {
b.positionRuler();
b.storeStatus();
}, false);
-
+ singleBaseHighlightButton.setAttribute('id','singleBaseHightlightButton'); // making this because access is required when the key 'u' is pressed and the options are visible
optsTable.appendChild(makeElement('tr', [makeElement('td', 'Display and highlight current genome location', {align: 'right'}), makeElement('td', singleBaseHighlightButton)]));
optsForm.appendChild(optsTable);
diff --git a/js/cbrowser.js b/js/cbrowser.js
index <HASH>..<HASH> 100644
--- a/js/cbrowser.js
+++ b/js/cbrowser.js
@@ -391,6 +391,10 @@ Browser.prototype.realInit2 = function() {
}
ev.stopPropagation(); ev.preventDefault();
} else if (ev.keyCode == 85) { // u
+ if (thisB.uiMode === 'opts') { // if the options are visible, toggle the checkbox too
+ var check = document.getElementById("singleBaseHightlightButton").checked;
+ document.getElementById("singleBaseHightlightButton").checked = !check;
+ }
thisB.singleBaseHighlight = !thisB.singleBaseHighlight;
thisB.positionRuler();
ev.stopPropagation(); ev.preventDefault(); | If a user presses the single base shortcut key (presently u), toggle the show single base checkbox if the options panel is displayed. | dasmoth_dalliance | train |
5fc1a090104a53c39f25d24a13c4bd7f5a87065a | diff --git a/test/unit/specs/popup.spec.js b/test/unit/specs/popup.spec.js
index <HASH>..<HASH> 100644
--- a/test/unit/specs/popup.spec.js
+++ b/test/unit/specs/popup.spec.js
@@ -26,22 +26,29 @@ describe('Popup', () => {
})
expect(vm.$el.className)
.to.equal('cube-popup cube-popup_mask')
+ expect(vm.$el.querySelector('.cube-popup-container').className)
+ .to.equal('cube-popup-container cube-popup-center')
})
it('should render correct contents', () => {
vm = instantiateComponent(Vue, Popup, {
props: {
type: 'xx',
- content: 'popup content'
+ content: 'popup content',
+ position: 'top',
+ maskClosable: true
}
})
vm.show()
expect(vm.$el.className)
.to.equal('cube-popup cube-popup_mask cube-xx')
expect(vm.$el.querySelector('.cube-popup-container').className)
- .to.equal('cube-popup-container cube-popup-center')
+ .to.equal('cube-popup-container cube-popup-top')
expect(vm.$el.querySelector('.cube-popup-content').innerHTML)
.to.equal('popup content')
+ vm.$el.querySelector('.cube-popup-mask').click()
+ expect(vm.isVisible)
+ .to.be.false
})
it('should toggle by change v-model visible', function (done) { | test(Popup): position and mask-closable | didi_cube-ui | train |
cd9e42ff1d1e6b7844f78a030bb76cbb68c01e0e | diff --git a/lib/websession.py b/lib/websession.py
index <HASH>..<HASH> 100644
--- a/lib/websession.py
+++ b/lib/websession.py
@@ -30,7 +30,7 @@ import cPickle
import time
from UserDict import UserDict
-from invenio.dbquery import run_sql, blob_to_string
+from invenio.dbquery import run_sql, blob_to_string, OperationalError, IntegrityError
from invenio.session import Session
class SessionNotInDb(Exception):
@@ -41,7 +41,7 @@ class SessionNotInDb(Exception):
class pSession(Session):
"""Specialisation of the class Session which adds persistence to sessions
- by using a MySQL table (it pickles itself into the corresponding row of
+ by using a database table (it pickles itself into the corresponding row of
the table). The class provides methods to save and retrieve an instance
to/from the DB and to access the main session attributes (uid). The
table in the DB must have the following structure:
@@ -71,15 +71,23 @@ class pSession(Session):
return self.__uid
def setUid( self, newUid ):
- self.__uid = int(newUid)
- self.__dirty = 1
+ if newUid:
+ self.__uid = int(newUid)
+ self.__dirty = 1
+ else:
+ # something bad happened, e.g. database down, so return user id -1
+ self.__uid = -1
+ self.__dirty = 1
def retrieve( cls, sessionId ):
"""method for retrieving a session from the DB for the given id. If the
id has no corresponding session an exception is raised
"""
sql = "select session_object from %s where session_key='%s'"%(cls.__tableName, sessionId)
- res = run_sql(sql)
+ try:
+ res = run_sql(sql)
+ except OperationalError:
+ raise SessionNotInDb("Session %s doesn't exist"%sessionId)
if len(res)==0:
raise SessionNotInDb("Session %s doesn't exist"%sessionId)
s = cPickle.loads(blob_to_string(res[0][0]))
@@ -102,14 +110,16 @@ class pSession(Session):
sql = 'INSERT INTO %s (session_key, session_expiry, session_object, uid) values ("%s","%s","%s","%s")' % \
(self.__class__.__tableName, self.id, self.get_access_time()+60*60*24*2, repr, int(self.getUid()))
res = run_sql(sql)
- # FIXME. WARNING!! it should be "except IntegrityError, e:" but this will
- # create a dependency on package MySQL. I'll leave it like this for
- # the time being but this can lead to Exception masking
- except Exception, e:
- sql = 'UPDATE %s SET uid=%s, session_expiry=%s, session_object="%s" WHERE session_key="%s"' % \
- (self.__class__.__tableName, int(self.getUid()), self.get_access_time()+60*60*24*2, repr, self.id)
- res = run_sql(sql)
+ except IntegrityError:
+ try:
+ sql = 'UPDATE %s SET uid=%s, session_expiry=%s, session_object="%s" WHERE session_key="%s"' % \
+ (self.__class__.__tableName, int(self.getUid()), self.get_access_time()+60*60*24*2, repr, self.id)
+ res = run_sql(sql)
+ except OperationalError:
+ pass
self.__dirty=0
+ except OperationalError:
+ self.__dirty=0
class pSessionMapping(UserDict):
"""Only the necessary methods to make it work with the session manager | Handle OperationalError database exception (in case the database
connection is down) in which case set user ID to -1 as for the
permission denied case. | inveniosoftware_invenio-accounts | train |
74aa0df8f7f132b62754e5159262e4a5b9b641ab | diff --git a/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java b/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java
index <HASH>..<HASH> 100644
--- a/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java
+++ b/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java
@@ -76,8 +76,6 @@ public final class JavaStructuredSessionization {
for (String word : lineWithTimestamp.getLine().split(" ")) {
eventList.add(new Event(word, lineWithTimestamp.getTimestamp()));
}
- System.out.println(
- "Number of events from " + lineWithTimestamp.getLine() + " = " + eventList.size());
return eventList.iterator();
}
};
@@ -100,7 +98,7 @@ public final class JavaStructuredSessionization {
// If timed out, then remove session and send final update
if (state.hasTimedOut()) {
SessionUpdate finalUpdate = new SessionUpdate(
- sessionId, state.get().getDurationMs(), state.get().getNumEvents(), true);
+ sessionId, state.get().calculateDuration(), state.get().getNumEvents(), true);
state.remove();
return finalUpdate;
@@ -133,7 +131,7 @@ public final class JavaStructuredSessionization {
// Set timeout such that the session will be expired if no data received for 10 seconds
state.setTimeoutDuration("10 seconds");
return new SessionUpdate(
- sessionId, state.get().getDurationMs(), state.get().getNumEvents(), false);
+ sessionId, state.get().calculateDuration(), state.get().getNumEvents(), false);
}
}
};
@@ -215,7 +213,8 @@ public final class JavaStructuredSessionization {
public long getEndTimestampMs() { return endTimestampMs; }
public void setEndTimestampMs(long endTimestampMs) { this.endTimestampMs = endTimestampMs; }
- public long getDurationMs() { return endTimestampMs - startTimestampMs; }
+ public long calculateDuration() { return endTimestampMs - startTimestampMs; }
+
@Override public String toString() {
return "SessionInfo(numEvents = " + numEvents +
", timestamps = " + startTimestampMs + " to " + endTimestampMs + ")"; | [SPARK-<I>][SS] Fix JavaStructuredSessionization example
## What changes were proposed in this pull request?
Extra accessors in java bean class causes incorrect encoder generation, which corrupted the state when using timeouts.
## How was this patch tested?
manually ran the example | apache_spark | train |
e28cc2fce05f2670eae76e96dc4c04af5c6ae42b | diff --git a/commands/story/start/command.go b/commands/story/start/command.go
index <HASH>..<HASH> 100644
--- a/commands/story/start/command.go
+++ b/commands/story/start/command.go
@@ -28,17 +28,17 @@ var Command = &gocli.Command{
UsageLine: "start [-base=BASE] [-no_branch] [-no_push]",
Short: "start a new story",
Long: `
-Start a new issue tracker story.
+ Start a new issue tracker story.
-The user is shown the list of stories that can be started.
-When they choose one, they are assigned to that story and the story
-is started in the issue tracker.
+ The user is shown the list of stories that can be started.
+ When they choose one, they are assigned to that story and the story
+ is started in the issue tracker.
-Unless -no_branch is specified, the user is asked to insert
-the branch name to be used for the branch holding the story commits.
-The branch of the given name is created on top of the trunk branch
-and checked out. A custom base branch can be set by using -base.
-The story branch is then pushed unless -no_push is specified.
+ Unless -no_branch is specified, the user is asked to insert
+ the branch name to be used for the branch holding the story commits.
+ The branch of the given name is created on top of the trunk branch
+ and checked out. A custom base branch can be set by using -base.
+ The story branch is then pushed unless -no_push is specified.
`,
Action: run,
} | story start: Fix command help formatting
The description was not indented properly.
Story-Id: SF-<I>
Change-Id: e<I>c3c<I>b | salsaflow_salsaflow | train |
f561ca2c13748211905bc2af8ec5805d13929df8 | diff --git a/packages/vuetify/src/mixins/menuable.js b/packages/vuetify/src/mixins/menuable.js
index <HASH>..<HASH> 100644
--- a/packages/vuetify/src/mixins/menuable.js
+++ b/packages/vuetify/src/mixins/menuable.js
@@ -107,11 +107,11 @@ export default Vue.extend({
computedLeft () {
const a = this.dimensions.activator
const c = this.dimensions.content
+ const activatorLeft = this.isAttached ? a.offsetLeft : a.left
const minWidth = Math.max(a.width, c.width)
let left = 0
- if (this.isAttached) left += a.offsetLeft
- else left += this.left ? a.left - (minWidth - a.width) : a.left
+ left += this.left ? activatorLeft - (minWidth - a.width) : activatorLeft
if (this.offsetX) left += this.left ? -a.width : a.width
if (this.nudgeLeft) left -= parseInt(this.nudgeLeft)
if (this.nudgeRight) left += parseInt(this.nudgeRight) | fix(menuable): set correct position with left+attach+offset-x
see a<I>, #<I> | vuetifyjs_vuetify | train |
076b3a95190213374024c69d72407a99d6c408e4 | diff --git a/dist/dhtmlx-e6.js b/dist/dhtmlx-e6.js
index <HASH>..<HASH> 100644
--- a/dist/dhtmlx-e6.js
+++ b/dist/dhtmlx-e6.js
@@ -46,8 +46,8 @@ const OBJECT_TYPE = {
TREE : 'tree',
WINDOW : 'window',
WINDOW_MANAGER : 'windowManager',
- TABBAR : 'tabbar',
- TAB : 'tab'
+ TABBAR : 'tabbar',
+ TAB : 'tab'
};
class Action {
@@ -473,8 +473,9 @@ class BaseLayout extends BaseObject {
skin: SKIN
});
- } else if (container.type === OBJECT_TYPE.LAYOUT_CELL ||
- container.type === OBJECT_TYPE.TAB) {
+ } else if (container.type === OBJECT_TYPE.LAYOUT_CELL
+ || container.type === OBJECT_TYPE.TAB
+ || container.type === OBJECT_TYPE.WINDOW) {
impl = container.impl.attachLayout(pattern);
}
return impl;
diff --git a/src/global/config.js b/src/global/config.js
index <HASH>..<HASH> 100644
--- a/src/global/config.js
+++ b/src/global/config.js
@@ -47,6 +47,6 @@ export const OBJECT_TYPE = {
TREE : 'tree',
WINDOW : 'window',
WINDOW_MANAGER : 'windowManager',
- TABBAR : 'tabbar',
- TAB : 'tab'
+ TABBAR : 'tabbar',
+ TAB : 'tab'
};
\ No newline at end of file
diff --git a/src/layout/BaseLayout.js b/src/layout/BaseLayout.js
index <HASH>..<HASH> 100644
--- a/src/layout/BaseLayout.js
+++ b/src/layout/BaseLayout.js
@@ -83,8 +83,9 @@ export class BaseLayout extends BaseObject {
skin: SKIN
});
- } else if (container.type === OBJECT_TYPE.LAYOUT_CELL ||
- container.type === OBJECT_TYPE.TAB) {
+ } else if (container.type === OBJECT_TYPE.LAYOUT_CELL
+ || container.type === OBJECT_TYPE.TAB
+ || container.type === OBJECT_TYPE.WINDOW) {
impl = container.impl.attachLayout(pattern);
}
return impl; | Added WINDOW container type to BaseLayout constructor | igalarza_dhtmlx-e6 | train |
5f03399f18991a40cf167be4ba5498a2399d7569 | diff --git a/src/js/core.js b/src/js/core.js
index <HASH>..<HASH> 100644
--- a/src/js/core.js
+++ b/src/js/core.js
@@ -622,12 +622,30 @@ s.maxTranslate = function () {
Slider/slides sizes
===========================*/
s.updateAutoHeight = function () {
- // Update Height
- var slide = s.slides.eq(s.activeIndex)[0];
- if (typeof slide !== 'undefined') {
- var newHeight = slide.offsetHeight;
- if (newHeight) s.wrapper.css('height', newHeight + 'px');
+ var activeSlides = [];
+ var newHeight = 0;
+
+ // Find slides currently in view
+ if(s.params.slidesPerView !== 'auto' && s.params.slidesPerView > 1) {
+ for (i = 0; i < Math.ceil(s.params.slidesPerView); i++) {
+ var index = s.activeIndex + i;
+ if(index > s.slides.length) break;
+ activeSlides.push(s.slides.eq(index)[0]);
+ }
+ } else {
+ activeSlides.push(s.slides.eq(s.activeIndex)[0]);
+ }
+
+ // Find new height from heighest slide in view
+ for (i = 0; i < activeSlides.length; i++) {
+ if (typeof activeSlides[i] !== 'undefined') {
+ var height = activeSlides[i].offsetHeight;
+ newHeight = height > newHeight ? height : newHeight;
+ }
}
+
+ // Update Height
+ if (newHeight) s.wrapper.css('height', newHeight + 'px');
};
s.updateContainerSize = function () {
var width, height; | Fixing autoheight if slidesPerView is more than 1 | nolimits4web_swiper | train |
bcde900fd84fa7d4a251aefde70c1b30ed293bc8 | diff --git a/src/main/java/com/jayway/maven/plugins/android/AbstractEmulatorMojo.java b/src/main/java/com/jayway/maven/plugins/android/AbstractEmulatorMojo.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/jayway/maven/plugins/android/AbstractEmulatorMojo.java
+++ b/src/main/java/com/jayway/maven/plugins/android/AbstractEmulatorMojo.java
@@ -135,6 +135,8 @@ public abstract class AbstractEmulatorMojo extends AbstractAndroidMojo {
*/
protected void startAndroidEmulator() throws MojoExecutionException
{
+ parseParameters();
+
CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor();
executor.setLogger(this.getLog());
@@ -191,6 +193,7 @@ public abstract class AbstractEmulatorMojo extends AbstractAndroidMojo {
* @return absolute path name of start script
* @throws IOException
* @throws MojoExecutionException
+ * @see "http://stackoverflow.com/questions/2328776/how-do-i-write-a-pidfile-in-a-windows-batch-file"
*/
private String writeEmulatorStartScriptWindows() throws IOException, MojoExecutionException {
@@ -198,15 +201,18 @@ public abstract class AbstractEmulatorMojo extends AbstractAndroidMojo {
File file = new File(filename);
PrintWriter writer = new PrintWriter(new FileWriter(file));
- writer.print("start " + assembleStartCommandLine());
- getLog().info("Creating pid file on Windows not yet implemented. ");
- // TODO write pid into pid file
- // writer.println(" echo $! > " + pidFileName);
+ // command needs to be assembled before unique window title since it parses settings and sets up parsedAvd
+ // and others.
+ String command = assembleStartCommandLine();
+ String uniqueWindowTitle = "MavenAndroidPlugin-AVD" + parsedAvd;
+ writer.print("START " + uniqueWindowTitle + " " + command);
+ writer.println();
+ writer.println("FOR /F \"tokens=2\" %%I in ('TASKLIST /NH /FI \"WINDOWTITLE eq " + uniqueWindowTitle + "\"' ) DO SET PID=%%I");
+ writer.println("ECHO %PID% > " + pidFileName);
writer.flush();
writer.close();
file.setExecutable(true);
return filename;
-
}
/**
@@ -246,6 +252,8 @@ public abstract class AbstractEmulatorMojo extends AbstractAndroidMojo {
* @throws org.apache.maven.plugin.MojoExecutionException
*/
protected void stopAndroidEmulator() throws MojoExecutionException {
+ parseParameters();
+
CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor();
executor.setLogger(this.getLog());
@@ -276,15 +284,12 @@ public abstract class AbstractEmulatorMojo extends AbstractAndroidMojo {
* @throws ExecutionException
*/
private void stopEmulatorWindows(CommandExecutor executor, String pid) throws ExecutionException {
-// TODO comment below out when pid file is implemented
-// String stopCommand = "taskkill"; // there is also tskill, this assumes that the command is on the path
-// List<String> commands = new ArrayList<String>();
-// commands.add("/PID");
-// commands.add(pid);
-// getLog().info(STOP_EMULATOR_MSG + pid);
-// executor.executeCommand(stopCommand, commands);
-
- getLog().info("Stopping emulator on windows not yet implemented. No pid file!");
+ String stopCommand = "TASKKILL"; // this assumes that the command is on the path
+ List<String> commands = new ArrayList<String>();
+ commands.add("/PID");
+ commands.add(pid);
+ getLog().info(STOP_EMULATOR_MSG + pid);
+ executor.executeCommand(stopCommand, commands);
}
/**
@@ -310,6 +315,20 @@ public abstract class AbstractEmulatorMojo extends AbstractAndroidMojo {
* @see com.jayway.maven.plugins.android.Emulator
*/
private String assembleStartCommandLine() throws MojoExecutionException {
+ StringBuilder startCommandline = new StringBuilder()
+ .append(getAndroidSdk().getEmulatorPath())
+ .append(" -avd ")
+ .append(parsedAvd)
+ .append(" ");
+ if (!StringUtils.isEmpty(parsedOptions))
+ {
+ startCommandline.append(parsedOptions);
+ }
+ getLog().info("Android emulator command: " + startCommandline);
+ return startCommandline.toString();
+ }
+
+ private void parseParameters() {
// <emulator> exist in pom file
if (emulator != null)
{
@@ -348,18 +367,6 @@ public abstract class AbstractEmulatorMojo extends AbstractAndroidMojo {
parsedOptions = determineOptions();
parsedWait = determineWait();
}
-
- StringBuilder startCommandline = new StringBuilder()
- .append(getAndroidSdk().getEmulatorPath())
- .append(" -avd ")
- .append(parsedAvd)
- .append(" ");
- if (!StringUtils.isEmpty(parsedOptions))
- {
- startCommandline.append(parsedOptions);
- }
- getLog().info("Android emulator command: " + startCommandline);
- return startCommandline.toString();
}
/** | hopefully implemented working operatings for starting and stopping emulator in windows | simpligility_android-maven-plugin | train |
edeaf15594eac3bdc246d959a585f427d810528a | diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -100,17 +100,18 @@ function gulpVartree(options) {
obj[options.pathProp],
obj[options.nameProp] + obj[options.extProp]
);
- // Add a reference to the parent scope
- if(options.parentProp) {
- file[options.prop][options.parentProp] = curScope;
- }
// Adding the file properties to the scope
if(options.index
&& options.index === Path.basename(file.path, Path.extname(file.path))) {
for(var prop in obj) {
curScope[prop] = obj[prop]
}
+ file[options.prop] = curScope;
} else {
+ // Add a reference to the parent scope
+ if(options.parentProp) {
+ file[options.prop][options.parentProp] = curScope;
+ }
if(!curScope[options.childsProp]) {
curScope[options.childsProp] = [];
} | Fixing the parent of indexes | nfroidure_gulp-vartree | train |
e858fd32163098d10572f5dcf4e52e73ee012991 | diff --git a/supply/lib/supply/options.rb b/supply/lib/supply/options.rb
index <HASH>..<HASH> 100755
--- a/supply/lib/supply/options.rb
+++ b/supply/lib/supply/options.rb
@@ -49,7 +49,7 @@ module Supply
end),
FastlaneCore::ConfigItem.new(key: :rollout,
short_option: "-r",
- description: "The percentage of the user fraction when uploading to the rollout track",
+ description: "The percentage of the user fraction when uploading to the rollout track (setting to 1 will complete the rollout)",
optional: true,
verify_block: proc do |value|
min = 0.0
diff --git a/supply/lib/supply/uploader.rb b/supply/lib/supply/uploader.rb
index <HASH>..<HASH> 100644
--- a/supply/lib/supply/uploader.rb
+++ b/supply/lib/supply/uploader.rb
@@ -192,9 +192,10 @@ module Supply
release = releases.first
track_to = client.tracks(Supply.config[:track_promote_to]).first
- if Supply.config[:rollout]
+ rollout = (Supply.config[:rollout] || 0).to_f
+ if rollout > 0 && rollout < 1
release.status = Supply::ReleaseStatus::IN_PROGRESS
- release.user_fraction = Supply.config[:rollout]
+ release.user_fraction = rollout
else
release.status = Supply::ReleaseStatus::COMPLETED
release.user_fraction = nil | [supply] skip sending user fraction if it's <I> when promoting track (#<I>)
* Skip sending user fraction if it's <I>.
* Updated rollout description to match new logic | fastlane_fastlane | train |
3647361a5277967f2a8a62ed394b5c952c82b7c5 | diff --git a/lems/model/model.py b/lems/model/model.py
index <HASH>..<HASH> 100644
--- a/lems/model/model.py
+++ b/lems/model/model.py
@@ -7,7 +7,11 @@ Model storage.
import os
from os.path import dirname
-from typing import List, Dict
+# For python versions where typing isn't available at all
+try:
+ from typing import List, Dict
+except ImportError:
+ pass
from lems import __schema_location__, __schema_version__
from lems.base.base import LEMSBase | fix: silently continue if typing module is not available | LEMS_pylems | train |
5b6c8baf4ad9a7260c28c3e46b99acd5de37efc4 | diff --git a/state/watcher.go b/state/watcher.go
index <HASH>..<HASH> 100644
--- a/state/watcher.go
+++ b/state/watcher.go
@@ -954,8 +954,6 @@ func (w *modelFieldChangeWatcher) merge(watchSet set.Strings, change watcher.Cha
logger.Tracef("stopped field change watching for %q", docId)
}
delete(w.known, docId)
- // TODO - check this correct.
- watchSet.Remove(docId)
return nil
} | set.Remove() isn't needed because the set is created each time | juju_juju | train |
8727721fa5b7de8216ca245c17a39887bafbf06e | diff --git a/about_time/about_time.py b/about_time/about_time.py
index <HASH>..<HASH> 100644
--- a/about_time/about_time.py
+++ b/about_time/about_time.py
@@ -80,8 +80,8 @@ def about_time(fn=None, it=None):
return HandleResult(timings, result)
# use as counter/throughput iterator.
- if not fn:
- raise UserWarning('fn is required in counter mode')
+ if not fn or not callable(fn): # handles inversion of parameters.
+ raise UserWarning('use as about_time(callback, iterable) in counter/throughput mode.')
def counter():
i = -1
diff --git a/tests/test_about_time.py b/tests/test_about_time.py
index <HASH>..<HASH> 100644
--- a/tests/test_about_time.py
+++ b/tests/test_about_time.py
@@ -167,3 +167,8 @@ def test_throughput_human(end, count, expected, rand_offset):
t = HandleStats([rand_offset, end + rand_offset], count)
assert t.throughput_human == expected
+
+
+def test_counter_throughput_protect_against_inverted_params():
+ with pytest.raises(UserWarning):
+ about_time([], lambda x: 0) | feat(*) protects against inversion of parameters in counter/throughput mode | rsalmei_about-time | train |
0e032f7cc750cae8c64e8c07a1a25d7a9cc87ef2 | diff --git a/validator/sawtooth_validator/networking/dispatch.py b/validator/sawtooth_validator/networking/dispatch.py
index <HASH>..<HASH> 100644
--- a/validator/sawtooth_validator/networking/dispatch.py
+++ b/validator/sawtooth_validator/networking/dispatch.py
@@ -60,6 +60,7 @@ class Dispatcher(InstrumentedThread):
self._condition = Condition()
self._dispatch_timers = {}
self._priority = {}
+ self._preprocessors = {}
def _get_dispatch_timer(self, tag):
if tag not in self._dispatch_timers:
@@ -174,6 +175,17 @@ class Dispatcher(InstrumentedThread):
if priority is not None:
self._priority[message_type] = priority
+ def set_preprocessor(self, message_type, preprocessor, executor):
+ '''
+ Sets PREPROCESSOR to run on MESSAGE_TYPE in EXECUTOR.
+
+ PREPROCESSOR: fn(message_content: bytes) -> PreprocessorResult
+ '''
+ self._preprocessors[message_type] = \
+ _PreprocessorManager(
+ executor=executor,
+ preprocessor=preprocessor)
+
def set_message_priority(self, message_type, priority):
self._priority[message_type] = priority
@@ -181,6 +193,71 @@ class Dispatcher(InstrumentedThread):
message_info = self._message_information[message_id]
try:
+ preprocessor = self._preprocessors[message_info.message_type]
+ except KeyError:
+ self._process_next(message_id)
+ return
+
+ def do_next(result):
+ message_info = self._message_information[message_id]
+
+ try:
+ # check for a None result
+ if result is None:
+ LOGGER.error(
+ "%s preprocessor returned None result for messsage %s",
+ preprocessor,
+ message_id)
+ return
+
+ # check for result status
+ if result.status == HandlerStatus.RETURN:
+ del self._message_information[message_id]
+
+ message = validator_pb2.Message(
+ content=result.message_out.SerializeToString(),
+ correlation_id=message_info.correlation_id,
+ message_type=result.message_type)
+
+ try:
+ self._send_message[message_info.connection](
+ msg=message,
+ connection_id=message_info.connection_id)
+ except KeyError:
+ LOGGER.warning(
+ "Can't send message %s back to "
+ "%s because connection %s not in dispatcher",
+ get_enum_name(message.message_type),
+ message_info.connection_id,
+ message_info.connection)
+
+ return
+
+ # store the preprocessor result
+ self._message_information[message_id] = \
+ _MessageInformation(
+ connection=message_info.connection,
+ connection_id=message_info.connection_id,
+ content=result.content,
+ correlation_id=message_info.correlation_id,
+ collection=message_info.collection,
+ message_type=message_info.message_type)
+
+ self._process_next(message_id)
+
+ except Exception: # pylint: disable=broad-except
+ LOGGER.exception(
+ "Unhandled exception after preprocessing")
+
+ preprocessor.execute(
+ connection_id=message_info.connection_id,
+ message_content=message_info.content,
+ callback=do_next)
+
+ def _process_next(self, message_id):
+ message_info = self._message_information[message_id]
+
+ try:
handler_manager = next(message_info.collection)
except IndexError:
# IndexError is raised if done with handlers
@@ -213,7 +290,7 @@ class Dispatcher(InstrumentedThread):
del self._message_information[message_id]
elif result.status == HandlerStatus.PASS:
- self._process(message_id)
+ self._process_next(message_id)
elif result.status == HandlerStatus.RETURN_AND_PASS:
message_info = self._message_information[message_id]
@@ -235,7 +312,7 @@ class Dispatcher(InstrumentedThread):
message_info.connection_id,
message_info.connection)
- self._process(message_id)
+ self._process_next(message_id)
else:
LOGGER.error("HandlerResult with status of RETURN_AND_PASS "
"is missing message_out or message_type")
@@ -319,6 +396,18 @@ class Dispatcher(InstrumentedThread):
self._condition.wait()
+class _PreprocessorManager:
+ def __init__(self, executor, preprocessor):
+ self._executor = executor
+ self._preprocessor = preprocessor
+
+ def execute(self, connection_id, message_content, callback):
+ def wrapped(message_content):
+ return callback(self._preprocessor(message_content))
+
+ return self._executor.submit(wrapped, message_content)
+
+
class _HandlerManager(object):
def __init__(self, executor, handler):
"""
@@ -374,6 +463,19 @@ class HandlerStatus(enum.Enum):
RETURN_AND_CLOSE = 5 # Send the message out and close connection
+class PreprocessorResult(HandlerResult):
+ def __init__(self, content=None, status=None,
+ message_out=None, message_type=None):
+ """
+ :param content: the content returned if preprocessing is successful
+ :param status HandlerStatus: informs the dispatcher on how to proceed
+ :param message_out protobuf Python class:
+ :param message_type: validator_pb2.Message.* enum value
+ """
+ self.content = content
+ super().__init__(status, message_out, message_type)
+
+
class Handler(object, metaclass=abc.ABCMeta):
@abc.abstractmethod
def handle(self, connection_id, message_content): | Add Dispatcher.set_preprocessor
This commit sets up the plumbing needed for running message
preprocessors but does not actually add any preprocessors. | hyperledger_sawtooth-core | train |
2effe741ae974fda5c2bb995d43b9610dd020047 | diff --git a/test/config.js b/test/config.js
index <HASH>..<HASH> 100644
--- a/test/config.js
+++ b/test/config.js
@@ -41,6 +41,7 @@
,'issues/alias-shortcuts'
,'issues/anonymous'
,'issues/anywhere'
+ ,'issues/auto-transport'
,'issues/charset'
,'issues/circular-detect'
,'issues/combo-use'
diff --git a/test/modules/missing/program.js b/test/modules/missing/program.js
index <HASH>..<HASH> 100644
--- a/test/modules/missing/program.js
+++ b/test/modules/missing/program.js
@@ -16,10 +16,10 @@ define(function(require) {
var bogus = require('bogus')
}
catch (ex) { // for node
- bogus = null
+ bogus = {}
}
- test.assert(bogus === null, 'return null when module missing')
+ test.assert(bogus, 'return {} when module missing')
test.done()
}) | Return empty object instead of null when a file is failed to feteched. | seajs_seajs | train |
978cbce220bc171520dba58ad765b983b1b4a97a | diff --git a/components/Templates/includes/functions-view_template.php b/components/Templates/includes/functions-view_template.php
index <HASH>..<HASH> 100644
--- a/components/Templates/includes/functions-view_template.php
+++ b/components/Templates/includes/functions-view_template.php
@@ -395,8 +395,7 @@ function frontier_template_once_blocks( $atts, $code ) {
* @since 2.4.0
*/
function frontier_do_subtemplate( $atts, $content ) {
-
- $out = null;
+ $out = '';
$field_name = $atts['field'];
$pod = Pods_Templates::get_obj( $atts['pod'], $atts['id'] );
@@ -405,21 +404,49 @@ function frontier_do_subtemplate( $atts, $content ) {
return '';
}
- $entries = $pod->field( $field_name );
-
$field = $pod->fields( $field_name );
- if ( ! empty( $entries ) && $field ) {
+ $is_repeatable_field = $field && $field->is_repeatable();
+
+ $entries = $pod->field( [
+ 'name' => $field_name,
+ 'display' => $is_repeatable_field,
+ 'display_process_individually' => $is_repeatable_field,
+ ] );
+
+ if ( $field && ! empty( $entries ) ) {
$entries = (array) $entries;
// Force array even for single items since the logic below is using loops.
- if ( 'single' === pods_v( $field['type'] . '_format_type', $field, 'single' ) && ! isset( $entries[0] ) ) {
+ if (
+ (
+ $is_repeatable_field
+ || 'single' === $field->get_single_multi()
+ )
+ && ! isset( $entries[0] )
+ ) {
$entries = array( $entries );
}
// Object types that could be Pods
$object_types = array( 'post_type', 'pod' );
+ if ( $is_repeatable_field ) {
+ foreach ( $entries as $key => $entry ) {
+ $template = frontier_decode_template( $content, $atts );
+
+ $template = str_replace( '{_key}', '{@_index}', $template );
+ $template = str_replace( '{@_key}', '{@_index}', $template );
+ $template = str_replace( '{_index}', '{@_index}', $template );
+
+ $entry = array(
+ '_index' => $key,
+ '_value' => $entry,
+ );
+
+ $out .= frontier_pseudo_magic_tags( $template, $entry, $pod, true );
+ }
+ }
/**
* Note on the change below for issue #3018:
* ... || 'taxonomy' == $pod->fields[ $atts[ 'field' ] ][ 'type' ]
@@ -431,7 +458,7 @@ function frontier_do_subtemplate( $atts, $content ) {
* the $pod->fields array and is something to not expect to be there in
* 3.0 as this was unintentional.
*/
- if ( 'taxonomy' === $field['type'] || in_array( $field['pick_object'], $object_types, true ) ) {
+ elseif ( 'taxonomy' === $field['type'] || in_array( $field['pick_object'], $object_types, true ) ) {
// Match any Pod object or taxonomy
foreach ( $entries as $key => $entry ) {
$subpod = pods( $field['pick_val'] ); | Support repeatable fields in [each] | pods-framework_pods | train |
3dd5ec63d2e8ab16b43b1778e6254931d3b2ba0c | diff --git a/src/plugins/https.test.js b/src/plugins/https.test.js
index <HASH>..<HASH> 100644
--- a/src/plugins/https.test.js
+++ b/src/plugins/https.test.js
@@ -25,6 +25,9 @@ function iopipeComExpect(
Object.keys(reqHeaders).forEach(header => {
expect(obj[`request.${header}`]).toBe(reqHeaders[header]);
});
+ expect(obj['request.headers.Authorization']).toBeUndefined();
+ expect(obj['request.headers.authorization']).toBeUndefined();
+
expect(obj['response.headers.content-type']).toBe(contentType);
expect(obj['response.statusCode']).toBe(statusCode);
diff --git a/src/plugins/ioredis.js b/src/plugins/ioredis.js
index <HASH>..<HASH> 100644
--- a/src/plugins/ioredis.js
+++ b/src/plugins/ioredis.js
@@ -39,9 +39,14 @@ function wrap({ timeline, data = {} } = {}) {
}
if (!Redis.__iopipeShimmer) {
- //Redis.Command &&
- shimmer.wrap(Redis.Command.prototype, 'initPromise', wrapPromise);
- shimmer.wrap(Redis.prototype, 'sendCommand', wrapSendCommand);
+ if (process.env.IOPIPE_TRACE_IOREDIS_INITPROMISE) {
+ shimmer.wrap(
+ Redis.Command && Redis.Command.prototype,
+ 'initPromise',
+ wrapPromise
+ );
+ }
+ shimmer.wrap(Redis && Redis.prototype, 'sendCommand', wrapSendCommand);
Redis.__iopipeShimmer = true;
}
@@ -66,7 +71,6 @@ function wrap({ timeline, data = {} } = {}) {
data[id].error = err.message;
data[id].errorStack = err.stack;
}
-
timeline.mark(`end:${id}`);
return cb.apply(this, arguments);
};
@@ -126,8 +130,10 @@ function wrap({ timeline, data = {} } = {}) {
}
function unwrap() {
- shimmer.unwrap(Redis.Command.prototype, 'initPromise');
- shimmer.unwrap(Redis.prototype, 'sendCommand');
+ if (process.env.IOPIPE_TRACE_IOREDIS_INITPROMISE) {
+ shimmer.unwrap(Redis.Command && Redis.Command.prototype, 'initPromise');
+ }
+ shimmer.unwrap(Redis && Redis.prototype, 'sendCommand');
delete Redis.__iopipeShimmer;
} | Wrapping the wrapping (inception!) of initPromise in a conditional for the beta. There's value in wrapping callbacks, but in this version, the wrap of initPromise generates effectively content-free traces. There's more actionable information from wrapping Redis.prototype.sendCommand.
Also includes explicit tests of http headers, to ensure that we're not capturing authorization headers. | iopipe_iopipe-js-trace | train |
7d4d2e74691b8b92fa858bc15e54215515501dbd | diff --git a/superset-frontend/src/explore/controlUtils.js b/superset-frontend/src/explore/controlUtils.js
index <HASH>..<HASH> 100644
--- a/superset-frontend/src/explore/controlUtils.js
+++ b/superset-frontend/src/explore/controlUtils.js
@@ -20,6 +20,7 @@ import memoizeOne from 'memoize-one';
import { getChartControlPanelRegistry } from '@superset-ui/chart';
import { expandControlConfig } from '@superset-ui/chart-controls';
import { controls as SHARED_CONTROLS } from './controls';
+import * as exploreActions from './actions/exploreActions';
import * as SECTIONS from './controlPanels/sections';
export function getFormDataFromControls(controlsState) {
@@ -93,7 +94,7 @@ export function applyMapStateToPropsToControl(controlState, controlPanelState) {
if (mapStateToProps && controlPanelState) {
return {
...controlState,
- ...mapStateToProps(controlPanelState, controlState),
+ ...mapStateToProps(controlPanelState, controlState, exploreActions),
};
}
return controlState; | fix(explore): 'Edit Datasource' is missing from btn-dropdown (#<I>)
Not sure how it got broken but he's a fix. I'm thinking this is related
to the controls refactor.
closes <URL> | apache_incubator-superset | train |
e29858ac78c2eb5b353225a4260c12220063676b | diff --git a/src/Field/Composite/PositionField.php b/src/Field/Composite/PositionField.php
index <HASH>..<HASH> 100644
--- a/src/Field/Composite/PositionField.php
+++ b/src/Field/Composite/PositionField.php
@@ -100,7 +100,7 @@ class PositionField extends Field
*/
public function getFields()
{
- return [(new IntegerField('position', 0))->unsigned(true)->setShouldBeAddedToModel(false)];
+ return [(new IntegerField('position', 0))->unsigned(true)];
}
/**
diff --git a/test/src/CodeBuilderTest.php b/test/src/CodeBuilderTest.php
index <HASH>..<HASH> 100644
--- a/test/src/CodeBuilderTest.php
+++ b/test/src/CodeBuilderTest.php
@@ -119,7 +119,7 @@ class CodeBuilderTest extends TestCase
*/
public function testChapterClassFields()
{
- $this->assertEquals(['id', 'book_id', 'title'], $this->base_chapter_reflection->getDefaultProperties()['fields']);
+ $this->assertEquals(['id', 'book_id', 'title','position'], $this->base_chapter_reflection->getDefaultProperties()['fields']);
$this->assertInstanceOf(ReflectionMethod::class, $this->base_chapter_reflection->getMethod('getId'));
$this->assertInstanceOf(ReflectionMethod::class, $this->base_chapter_reflection->getMethod('setId'));
@@ -162,9 +162,11 @@ class CodeBuilderTest extends TestCase
{
$default_field_values = $this->base_chapter_reflection->getDefaultProperties()['default_field_values'];
- $this->assertCount(1, $default_field_values);
+ $this->assertCount(2, $default_field_values);
$this->assertArrayHasKey('title', $default_field_values);
$this->assertSame('', $default_field_values['title']);
+ $this->assertArrayHasKey('position', $default_field_values);
+ $this->assertSame(0, $default_field_values['position']);
}
}
\ No newline at end of file | Position field adds methods to the base class | activecollab_databasestructure | train |
2c14a8d51f6e8d0e85ade58c8b234a20e93cdabb | diff --git a/cloudcontrol/library/components/CmsComponent.php b/cloudcontrol/library/components/CmsComponent.php
index <HASH>..<HASH> 100644
--- a/cloudcontrol/library/components/CmsComponent.php
+++ b/cloudcontrol/library/components/CmsComponent.php
@@ -375,7 +375,11 @@ namespace library\components
$this->parameters['documentType'] = $this->storage->getDocumentTypeBySlug($request::$get['documentType'], true);
$this->parameters['bricks'] = $this->storage->getBricks();
} else {
- $this->parameters['documentTypes'] = $this->storage->getDocumentTypes();
+ $documentTypes = $this->storage->getDocumentTypes();
+ if (count($documentTypes) < 1) {
+ throw new \Exception('No Document Types defined yet. <a href="' . $request::$subfolders . $this->parameters['cmsPrefix'] . '/configuration/document-types/new">Please do so first.</a>');
+ }
+ $this->parameters['documentTypes'] = $documentTypes;
}
} elseif ($relativeCmsUri == '/documents/edit-document' && isset($request::$get['slug'])) {
$this->subTemplate = 'cms/documents/document-form'; | Added exception when creating document while no document types exist | jenskooij_cloudcontrol | train |
71a55167aa259f3c25cab24d1f7766ba395c062d | diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -36,12 +36,16 @@ module.exports = class MochaWrapper extends Mocha {
stdio: ['ipc']
})
+ cp.on('exit', () => {
+ console.log('exited')
+ })
+
return new Promise(resolve => {
cp.once('message', () => { resolve(cp) })
})
},
destroy (cp) {
- cp.kill()
+ cp.disconnect()
}
}, {
max: options.maxParallel || os.cpus().length
@@ -87,7 +91,12 @@ module.exports = class MochaWrapper extends Mocha {
stdout += data
}
+ let firstInactivityInterval = true
const inactivityInterval = setInterval(() => {
+ if (firstInactivityInterval) {
+ firstInactivityInterval = false
+ process.stdout.write('\n')
+ }
console.log('still running...')
}, 2 * 60 * 1000) | add a \n before logging 'still running' | fabiosantoscode_mochallel | train |
0bdc881f532edff4da3256040236f37f9a25dba2 | diff --git a/ui/src/data_explorer/components/GroupByTimeDropdown.js b/ui/src/data_explorer/components/GroupByTimeDropdown.js
index <HASH>..<HASH> 100644
--- a/ui/src/data_explorer/components/GroupByTimeDropdown.js
+++ b/ui/src/data_explorer/components/GroupByTimeDropdown.js
@@ -25,7 +25,7 @@ const GroupByTimeDropdown = React.createClass({
} = this.props
let validOptions = groupByTimeOptions
- if (isInDataExplorer) {
+ if (isInDataExplorer || isInRuleBuilder) {
validOptions = validOptions.filter(
({menuOption}) => menuOption !== DEFAULT_DASHBOARD_GROUP_BY_INTERVAL
)
diff --git a/ui/src/kapacitor/components/DataSection.js b/ui/src/kapacitor/components/DataSection.js
index <HASH>..<HASH> 100644
--- a/ui/src/kapacitor/components/DataSection.js
+++ b/ui/src/kapacitor/components/DataSection.js
@@ -32,6 +32,7 @@ export const DataSection = React.createClass({
onAddEvery: PropTypes.func.isRequired,
onRemoveEvery: PropTypes.func.isRequired,
timeRange: PropTypes.shape({}).isRequired,
+ isKapacitorRule: PropTypes.bool,
},
childContextTypes: {
@@ -69,7 +70,13 @@ export const DataSection = React.createClass({
},
handleApplyFuncsToField(fieldFunc) {
- this.props.actions.applyFuncsToField(this.props.query.id, fieldFunc)
+ this.props.actions.applyFuncsToField(
+ this.props.query.id,
+ fieldFunc,
+ // this 3rd arg (isKapacitorRule) makes sure 'auto' is not added as
+ // default group by in Kapacitor rule
+ this.props.isKapacitorRule
+ )
this.props.onAddEvery(defaultEveryFrequency)
},
@@ -109,7 +116,7 @@ export const DataSection = React.createClass({
},
renderQueryBuilder() {
- const {query} = this.props
+ const {query, isKapacitorRule} = this.props
return (
<div className="query-builder">
@@ -129,7 +136,7 @@ export const DataSection = React.createClass({
onToggleField={this.handleToggleField}
onGroupByTime={this.handleGroupByTime}
applyFuncsToField={this.handleApplyFuncsToField}
- isKapacitorRule={true}
+ isKapacitorRule={isKapacitorRule}
/>
</div>
)
diff --git a/ui/src/kapacitor/components/KapacitorRule.js b/ui/src/kapacitor/components/KapacitorRule.js
index <HASH>..<HASH> 100644
--- a/ui/src/kapacitor/components/KapacitorRule.js
+++ b/ui/src/kapacitor/components/KapacitorRule.js
@@ -71,6 +71,7 @@ export const KapacitorRule = React.createClass({
actions={queryActions}
onAddEvery={this.handleAddEvery}
onRemoveEvery={this.handleRemoveEvery}
+ isKapacitorRule={true}
/>
<ValuesSection
rule={rule}
diff --git a/ui/src/utils/queryTransitions.js b/ui/src/utils/queryTransitions.js
index <HASH>..<HASH> 100644
--- a/ui/src/utils/queryTransitions.js
+++ b/ui/src/utils/queryTransitions.js
@@ -86,7 +86,7 @@ export function toggleTagAcceptance(query) {
export function applyFuncsToField(
query,
{field, funcs},
- isInDataExplorer = false
+ preventAutoGroupBy = false
) {
const shouldRemoveFuncs = funcs.length === 0
const nextFields = query.fields.map(f => {
@@ -103,7 +103,7 @@ export function applyFuncsToField(
return f
})
- const defaultGroupBy = isInDataExplorer
+ const defaultGroupBy = preventAutoGroupBy
? DEFAULT_DATA_EXPLORER_GROUP_BY_INTERVAL
: DEFAULT_DASHBOARD_GROUP_BY_INTERVAL
// If there are no functions, then there should be no GROUP BY time | Prevent 'auto' GROUP BY option in Kapacitor rule builder | influxdata_influxdb | train |
b5f7e697565cf2fe8a4e5dc42c8495f9f8a9444f | diff --git a/org/xbill/DNS/BitString.java b/org/xbill/DNS/BitString.java
index <HASH>..<HASH> 100644
--- a/org/xbill/DNS/BitString.java
+++ b/org/xbill/DNS/BitString.java
@@ -181,9 +181,6 @@ equals(Object o) {
/** Compare two bitstrings. */
public int
compareTo(Object o) {
- if (!(o instanceof BitString))
- throw new IllegalArgumentException();
-
BitString b = (BitString) o;
for (int i = 0; i < data.length && i < b.data.length; i++)
diff --git a/org/xbill/DNS/Name.java b/org/xbill/DNS/Name.java
index <HASH>..<HASH> 100644
--- a/org/xbill/DNS/Name.java
+++ b/org/xbill/DNS/Name.java
@@ -499,9 +499,6 @@ hashCode() {
public int
compareTo(Object o) {
- if (!(o instanceof Name))
- throw new IllegalArgumentException();
-
Name arg = (Name) o;
int compares = labels > arg.labels ? arg.labels : labels; | According the the Comparator documentation (even though Name and BitString
don't implement Comparable, since it's not in JDK <I>), compareTo() should
throw a ClassCastException when the argument is not a compatible type.
Allow this to happen as a result of the explicit cast, rather than incorrectly
throwing an IllegalArgumentException.
git-svn-id: <URL> | dnsjava_dnsjava | train |
f5aaded3343e8bdbbd20cb87db77e389e48ddaaf | diff --git a/src/predictionio/BaseClient.php b/src/predictionio/BaseClient.php
index <HASH>..<HASH> 100644
--- a/src/predictionio/BaseClient.php
+++ b/src/predictionio/BaseClient.php
@@ -4,6 +4,7 @@ namespace predictionio;
use GuzzleHttp\Client;
use GuzzleHttp\Exception\ClientException;
+use GuzzleHttp\Promise\PromiseInterface;
/**
* Base client for Event and Engine client
@@ -43,17 +44,21 @@ abstract class BaseClient {
* @param string $method HTTP request method
* @param string $url Relative or absolute url
* @param string $body HTTP request body
- *
- * @return array JSON response
- * @throws PredictionIOAPIError Request error
+ * @param boolean $async Send request asynchronously and return a promise
+ * @return array|PromiseInterface JSON response
+ * @throws PredictionIOAPIError
*/
- protected function sendRequest($method, $url, $body) {
+ protected function sendRequest($method, $url, $body, $async = false) {
$options = ['headers' => ['Content-Type' => 'application/json'],
'body' => $body];
try {
- $response = $this->client->request($method, $url, $options);
- return json_decode($response->getBody(), true);
+ if ($async) {
+ return $this->client->requestAsync($method, $url, $options);
+ } else {
+ $response = $this->client->request($method, $url, $options);
+ return json_decode($response->getBody(), true);
+ }
} catch (ClientException $e) {
throw new PredictionIOAPIError($e->getMessage());
}
diff --git a/src/predictionio/EventClient.php b/src/predictionio/EventClient.php
index <HASH>..<HASH> 100644
--- a/src/predictionio/EventClient.php
+++ b/src/predictionio/EventClient.php
@@ -3,7 +3,8 @@
namespace predictionio;
use GuzzleHttp\Client;
use \DateTime;
-
+use GuzzleHttp\Promise\PromiseInterface;
+
/**
* Client for connecting to an Event Server
*
@@ -211,14 +212,14 @@ class EventClient extends BaseClient {
* @param string Time of the event in ISO 8601 format
* (e.g. 2014-09-09T16:17:42.937-08:00).
* Default is the current time.
- *
- * @return string JSON response
+ * @param bool Send request asynchronously
+ * @return array|PromiseInterface JSON response or PromiseInterface object if async
*
* @throws PredictionIOAPIError Request error
*/
public function recordUserActionOnItem($event, $uid, $iid,
array $properties=array(),
- $eventTime=null) {
+ $eventTime=null, $async = false) {
$eventTime = $this->getEventTime($eventTime);
if (empty($properties)) $properties = (object)$properties;
$json = json_encode([
@@ -231,7 +232,7 @@ class EventClient extends BaseClient {
'eventTime' => $eventTime,
]);
- return $this->sendRequest('POST', $this->eventUrl, $json);
+ return $this->sendRequest('POST', $this->eventUrl, $json, $async);
}
/** | Async option for logging user actions on item events | apache_predictionio-sdk-php | train |
7b3bb8c00f6dcf8ac9613cd2f3cdc71d57b0c275 | diff --git a/pytorch_pretrained_bert/modeling.py b/pytorch_pretrained_bert/modeling.py
index <HASH>..<HASH> 100644
--- a/pytorch_pretrained_bert/modeling.py
+++ b/pytorch_pretrained_bert/modeling.py
@@ -678,7 +678,7 @@ class BertForPreTraining(PreTrainedBertModel):
if masked_lm_labels is not None and next_sentence_label is not None:
loss_fct = CrossEntropyLoss(ignore_index=-1)
- masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels(-1))
+ masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1))
next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1))
total_loss = masked_lm_loss + next_sentence_loss
return total_loss | fix typo in input for masked lm loss function | huggingface_pytorch-pretrained-BERT | train |
9cff61a2be0c9ff6b7d3056bc3b5a14107c227b9 | diff --git a/lib/ReactViews/Map/Panels/SharePanel/BuildShareLink.js b/lib/ReactViews/Map/Panels/SharePanel/BuildShareLink.js
index <HASH>..<HASH> 100644
--- a/lib/ReactViews/Map/Panels/SharePanel/BuildShareLink.js
+++ b/lib/ReactViews/Map/Panels/SharePanel/BuildShareLink.js
@@ -135,7 +135,11 @@ function addSharedMembers(terria, initSources) {
terria.catalog.serializeToJson({
itemFilter: combineFilters([
function(item) {
- return !defined(item.data) || item.isCsvForCharting;
+ if (CatalogMember.itemFilters.noLocalData(item)) {
+ return true;
+ } else if (item.isCsvForCharting) {
+ return true;
+ }
}
]),
propertyFilter: combineFilters([ | Make BuildShareLink addSharedMembers more verbose | TerriaJS_terriajs | train |
250c9beff9b5d9f43f94e379d318c579238d942f | diff --git a/cypress/integration/rendering/flowchart-v2.spec.js b/cypress/integration/rendering/flowchart-v2.spec.js
index <HASH>..<HASH> 100644
--- a/cypress/integration/rendering/flowchart-v2.spec.js
+++ b/cypress/integration/rendering/flowchart-v2.spec.js
@@ -611,6 +611,16 @@ flowchart RL
);
});
+ it('76: handle unicode encoded character with HTML labels true', () => {
+ imgSnapshotTest(
+ `flowchart TB
+ a{{"Lorem 'ipsum' dolor 'sit' amet, 'consectetur' adipiscing 'elit'."}}
+ --> b{{"Lorem #quot;ipsum#quot; dolor #quot;sit#quot; amet,#quot;consectetur#quot; adipiscing #quot;elit#quot;."}}
+ `,
+ {htmlLabels: true, flowchart: {htmlLabels: true}, securityLevel: 'loose'}
+ );
+ });
+
it('2050: handling of different rendering direction in subgraphs', () => {
imgSnapshotTest(
` | #<I> Added new test case | knsv_mermaid | train |
c075722b70501129f2a55b6730fb8d14712260d6 | diff --git a/salt/modules/win_system.py b/salt/modules/win_system.py
index <HASH>..<HASH> 100644
--- a/salt/modules/win_system.py
+++ b/salt/modules/win_system.py
@@ -419,12 +419,11 @@ def set_computer_desc(desc=None):
system_info = win32net.NetServerGetInfo(None, 101)
# If desc is passed, decode it for unicode
- if desc:
- if not isinstance(desc, str):
- desc = desc.decode('utf-8')
- system_info['comment'] = desc
- else:
+ if desc is None:
return False
+ if not isinstance(desc, str):
+ desc = desc.decode('utf-8')
+ system_info['comment'] = desc
# Apply new settings
try:
@@ -505,7 +504,7 @@ def get_computer_desc():
salt 'minion-id' system.get_computer_desc
'''
desc = get_system_info()['description']
- return desc if desc else False
+ return False if desc is None else desc
get_computer_description = salt.utils.alias_function(get_computer_desc, 'get_computer_description') # pylint: disable=invalid-name | win_system: Consider an empty computer description valid
Calling {get,set}_computer_desc should allow for setting and
getting an empty string as the computer description without
returning the error value of False. | saltstack_salt | train |
1bc1401448026e1ab8aeb54f4e01d928b0af4462 | diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py
index <HASH>..<HASH> 100644
--- a/salt/utils/parsers.py
+++ b/salt/utils/parsers.py
@@ -9,6 +9,7 @@
import os
import sys
+import logging
import optparse
from functools import partial
from salt import config, log, version
@@ -119,6 +120,10 @@ class OptionParser(optparse.OptionParser):
for mixin_after_parsed_func in self._mixin_after_parsed_funcs:
mixin_after_parsed_func(self)
+ if self.config.get('conf_file', None) is not None:
+ logging.getLogger(__name__).info(
+ "Loaded configuration file: %s", self.config['conf_file']
+ )
# Retain the standard behaviour of optparse to return options and args
return options, args
@@ -210,7 +215,12 @@ class ConfigDirMixIn(DeprecatedConfigMessage):
# XXX: Remove deprecation warning in next release
self.print_config_warning()
elif not os.path.isdir(self.options.config_dir):
- self.error("{0} is not a directory".format(self.options.config_dir))
+ # No logging is configured yet
+ sys.stderr.write(
+ "WARNING: \"{0}\" directory does not exist.\n".format(
+ self.options.config_dir
+ )
+ )
# Make sure we have an absolute path
self.options.config_dir = os.path.abspath(self.options.config_dir)
@@ -691,7 +701,6 @@ class SaltCMDOptionParser(OptionParser, ConfigDirMixIn, TimeoutMixIn,
self.config['fun'] = self.args[1]
self.config['arg'] = self.args[2:]
-
def setup_config(self):
return config.master_config(self.get_config_file_path('master')) | Complete fix saltstack/salt#<I>
Turn the exit error into a warning. Added a logging message telling us what configuration file, if any, was loaded. | saltstack_salt | train |
694dbc76359a95f6e11e747e7150b7808b83296d | diff --git a/addons/devops/src/main/java/io/fabric8/forge/devops/springboot/SpringBootNewProjectCommand.java b/addons/devops/src/main/java/io/fabric8/forge/devops/springboot/SpringBootNewProjectCommand.java
index <HASH>..<HASH> 100644
--- a/addons/devops/src/main/java/io/fabric8/forge/devops/springboot/SpringBootNewProjectCommand.java
+++ b/addons/devops/src/main/java/io/fabric8/forge/devops/springboot/SpringBootNewProjectCommand.java
@@ -66,7 +66,7 @@ public class SpringBootNewProjectCommand extends AbstractDevOpsCommand implement
private static final transient Logger LOG = LoggerFactory.getLogger(SpringBootNewProjectCommand.class);
// lets use a different category for this command
- private static final String CATEGORY = "Spring-Boot";
+ private static final String CATEGORY = "Spring Boot";
// lets use 1.3.x which currently fabric8 works best with
private static final String SPRING_BOOT_DEFAULT_VERSION = "1.4.1"; | Fixes #<I> to use caregory for Spring Boot command. Also lets use SB <I> as default | fabric8io_fabric8-forge | train |
832dc7bc1e3a2cdd89508c5aa2dd4839b76bd848 | diff --git a/lib/syclink/formatter.rb b/lib/syclink/formatter.rb
index <HASH>..<HASH> 100644
--- a/lib/syclink/formatter.rb
+++ b/lib/syclink/formatter.rb
@@ -55,18 +55,17 @@ module SycLink
# Prints the table's header
def print_header(header, formatter)
- puts cut(sprintf(formatter, *header), 80)
+ puts sprintf(formatter, *header)
end
# Prints a horizontal line below the header
def print_horizontal_line(line, separator, widths)
- puts cut(widths.map { |width| line * width }.join(separator), 80)
+ puts widths.map { |width| line * width }.join(separator)
end
# Prints columns in a table format
def print_table(columns, formatter)
- columns.transpose.each { |row| puts cut(sprintf(formatter, *row),
- 80) }
+ columns.transpose.each { |row| puts sprintf(formatter, *row) }
end
# Cuts the string down to the specified size | remove cut to cut the rows to size. This is now replaced by the scale_widths method | sugaryourcoffee_syclink | train |
9076e854af86540b9bca1eb18d916a0f4d3d573a | diff --git a/packages/blueprint-mongodb/tests/unit-tests/lib/resource-controller-test.js b/packages/blueprint-mongodb/tests/unit-tests/lib/resource-controller-test.js
index <HASH>..<HASH> 100644
--- a/packages/blueprint-mongodb/tests/unit-tests/lib/resource-controller-test.js
+++ b/packages/blueprint-mongodb/tests/unit-tests/lib/resource-controller-test.js
@@ -161,7 +161,7 @@ describe ('lib | ResourceController', function () {
});
});
- it.only ('should get a single resource', function () {
+ it ('should get a single resource', function () {
const {authors: [author]} = seed ('$default');
return request () | fix: Remove the only keyword from test cases | onehilltech_blueprint | train |
05ad9c18c3e3d0067992417023232966bfe15704 | diff --git a/examples/tagtool.py b/examples/tagtool.py
index <HASH>..<HASH> 100755
--- a/examples/tagtool.py
+++ b/examples/tagtool.py
@@ -42,14 +42,6 @@ tt1_card_map = {
"\x11\x48": "Topaz-96 (IRT-5011)",
"\x12\x4C": "Topaz-512 (TPZ-505-016)"
}
-tt3_card_map = {
- "\x00\xF0": "FeliCa Lite RC-S965",
- "\x00\xF1": "FeliCa Lite-S RC-S966",
- "\x01\xE0": "FeliCa Plug RC-S801/RC-S802",
- "\x01\x20": "FeliCa Card RC-S962 [424 kbps, 4KB FRAM]",
- "\x03\x01": "FeliCa Card RC-S860 [212 kbps, 4KB FEPROM]",
- "\x0f\x0d": "FeliCa Card RC-S889 [424 kbps, 9KB FRAM]",
- }
def format_data(data, w=16):
printable = string.digits + string.letters + string.punctuation + ' '
@@ -301,16 +293,9 @@ class TagTool(CommandLineInterface):
if tag.type == "Type1Tag":
tag._hr = tag.read_id()[0:2]
print(" " + tt1_card_map.get(str(tag._hr), "unknown card"))
- elif tag.type == "Type2Tag":
- pass
- elif tag.type == "Type3Tag":
- icc = str(tag.pmm[0:2]) # ic code
- print(" " + tt3_card_map.get(icc, "unknown card"))
- elif tag.type == "Type4Tag":
- pass
if tag.ndef:
- print("NDEF capabilities:")
+ print("NDEF Capabilities:")
if self.options.verbose and tag.type == "Type3Tag":
print(" [%s]" % tag.ndef.attr.pretty())
print(" version = %s" % tag.ndef.version)
@@ -319,31 +304,12 @@ class TagTool(CommandLineInterface):
print(" capacity = %d byte" % tag.ndef.capacity)
print(" message = %d byte" % tag.ndef.length)
if tag.ndef.length > 0:
- if self.options.verbose:
- print("NDEF message dump:")
- print(format_data(tag.ndef.message))
- print("NDEF record list:")
+ print("NDEF Message:")
print(tag.ndef.message.pretty())
if self.options.verbose:
- if tag.type == "Type1Tag":
- mem_size = {0x11: 120, 0x12: 512}.get(tag._hr[0], 2048)
- mem_data = bytearray()
- for offset in range(0, mem_size, 8):
- try: mem_data += tag[offset:offset+8]
- except nfc.clf.DigitalProtocolError as error:
- log.error(repr(error)); break
- print("TAG memory dump:")
- print(format_data(mem_data, w=8))
- tag.clf.sense([nfc.clf.TTA(uid=tag.uid)])
- elif tag.type == "Type2Tag":
- print("TAG memory dump:")
- print('\n'.join(tag.dump()))
- elif tag.type == "Type3Tag":
- print("TAG memory dump:")
- print('\n'.join(tag.dump()))
- elif tag.type == "Type4Tag":
- pass
+ print("Memory Dump:")
+ print(' ' + '\n '.join(tag.dump()))
def dump_tag(self, tag):
if tag.ndef:
@@ -420,6 +386,8 @@ class TagTool(CommandLineInterface):
return True
def format_tt3_tag(self, tag):
+ #tag.format(wipe=True)
+ #return True
block_count = tt3_determine_block_count(tag)
if tag.pmm[0:2] in ("\x00\xF0", "\x00\xF1"):
block_count -= 1 # last block on FeliCa Lite/S is unusable | Use tag.dump() method for printing a memory dump | nfcpy_nfcpy | train |
95df4a4222d645db8bdc3e3446b1419cffc3cd61 | diff --git a/src/calmjs/runtime.py b/src/calmjs/runtime.py
index <HASH>..<HASH> 100644
--- a/src/calmjs/runtime.py
+++ b/src/calmjs/runtime.py
@@ -903,12 +903,22 @@ class BaseArtifactRegistryRuntime(BaseRuntime):
def init_argparser(self, argparser):
super(BaseArtifactRegistryRuntime, self).init_argparser(argparser)
+ self.init_argparser_package_names(argparser)
+
+ def init_argparser_package_names(self, argparser, help=(
+ 'names of the python package to generate artifacts for; '
+ 'note that the metadata directory for the specified '
+ 'packages must be writable')):
+ """
+ Default helper for setting up the package_names option.
+
+ This is separate so that subclasses are not assumed for the
+ purposes of artifact creation; they should consider modifying
+ the default help message to reflect the fact.
+ """
+
argparser.add_argument(
- 'package_names', metavar=metavar('package'), nargs='+',
- help='names of the python package to generate artifacts for; '
- 'note that the metadata directory for the specified '
- 'packages must be writable',
- )
+ 'package_names', metavar=metavar('package'), nargs='+', help=help)
def run(self, argparser=None, package_names=[], *a, **kwargs):
for package_name in package_names: | Minor init_argparser change
- In the base artifact runtime, split out the package name setup to a
separate method to follow the same convention in other runtime classes
that allow subclasses alternative behaviors. | calmjs_calmjs | train |
402920454303936830d7a498e64b06ff739c4068 | diff --git a/gremlin-groovy/src/main/java/org/apache/tinkerpop/gremlin/groovy/engine/ScriptEngines.java b/gremlin-groovy/src/main/java/org/apache/tinkerpop/gremlin/groovy/engine/ScriptEngines.java
index <HASH>..<HASH> 100644
--- a/gremlin-groovy/src/main/java/org/apache/tinkerpop/gremlin/groovy/engine/ScriptEngines.java
+++ b/gremlin-groovy/src/main/java/org/apache/tinkerpop/gremlin/groovy/engine/ScriptEngines.java
@@ -56,6 +56,8 @@ import java.util.stream.Collectors;
public class ScriptEngines implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(ScriptEngines.class);
+ private final static ScriptEngineManager SCRIPT_ENGINE_MANAGER = new ScriptEngineManager();
+
/**
* {@code ScriptEngine} objects configured for the server keyed on the language name.
*/
@@ -294,8 +296,7 @@ public class ScriptEngines implements AutoCloseable {
return Optional.of((ScriptEngine) new GremlinGroovyScriptEngine(
new DefaultImportCustomizerProvider(imports, staticImports), securityCustomizerProvider));
} else {
- final ScriptEngineManager manager = new ScriptEngineManager();
- return Optional.ofNullable(manager.getEngineByName(language));
+ return Optional.ofNullable(SCRIPT_ENGINE_MANAGER.getEngineByName(language));
}
} | Re-use the ScriptEngineManager in the ScriptEngines class. | apache_tinkerpop | train |
cbdc7a736d48fe71746c810f74c059dc4ed68c29 | diff --git a/lib/engineyard-api-client.rb b/lib/engineyard-api-client.rb
index <HASH>..<HASH> 100644
--- a/lib/engineyard-api-client.rb
+++ b/lib/engineyard-api-client.rb
@@ -11,7 +11,6 @@ require 'engineyard-api-client/resolver'
require 'engineyard-api-client/version'
require 'engineyard-api-client/errors'
require 'json'
-require 'engineyard/eyrc'
require 'pp'
module EY
@@ -37,10 +36,15 @@ module EY
end
default_endpoint!
- def initialize(token = nil)
- @token = token
- @token ||= EY::EYRC.load.api_token
- raise ArgumentError, "EY Cloud API token required" unless @token
+ def initialize(token)
+ self.token = token
+ end
+
+ def token=(new_token)
+ unless new_token
+ raise ArgumentError, "EY Cloud API token required"
+ end
+ @token = new_token
end
def request(url, opts={})
@@ -85,10 +89,6 @@ module EY
EY::APIClient::User.from_hash(self, request('/current_user')['user'])
end
- class InvalidCredentials < EY::APIClient::Error; end
- class RequestFailed < EY::APIClient::Error; end
- class ResourceNotFound < RequestFailed; end
-
def self.request(path, opts={})
url = self.endpoint + "api/v2#{path}"
method = (opts.delete(:method) || 'get').to_s.downcase.to_sym
@@ -138,10 +138,7 @@ module EY
end
def self.authenticate(email, password)
- api_token = request("/authenticate", :method => "post",
- :params => { :email => email, :password => password })["api_token"]
- EY::EYRC.load.api_token = api_token
- api_token
+ request("/authenticate", :method => "post", :params => { :email => email, :password => password })["api_token"]
end
end # API
diff --git a/lib/engineyard-api-client/errors.rb b/lib/engineyard-api-client/errors.rb
index <HASH>..<HASH> 100644
--- a/lib/engineyard-api-client/errors.rb
+++ b/lib/engineyard-api-client/errors.rb
@@ -3,6 +3,10 @@ module EY
class Error < RuntimeError
end
+ class RequestFailed < Error; end
+ class InvalidCredentials < RequestFailed; end
+ class ResourceNotFound < RequestFailed; end
+
class BadEndpointError < Error
def initialize(endpoint)
super "#{endpoint.inspect} is not a valid endpoint URI. Endpoint must be an absolute URI."
@@ -17,8 +21,8 @@ module EY
end
end
- class ResolverError < Error; end
- class NoMatchesError < ResolverError; end
+ class ResolverError < Error; end
+ class NoMatchesError < ResolverError; end
class MultipleMatchesError < ResolverError; end
class NoAppError < Error
diff --git a/lib/engineyard/cli/api.rb b/lib/engineyard/cli/api.rb
index <HASH>..<HASH> 100644
--- a/lib/engineyard/cli/api.rb
+++ b/lib/engineyard/cli/api.rb
@@ -1,5 +1,6 @@
require 'highline'
require 'engineyard-api-client'
+require 'engineyard/eyrc'
module EY
class CLI
diff --git a/spec/engineyard-api-client/api_spec.rb b/spec/engineyard-api-client/api_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/engineyard-api-client/api_spec.rb
+++ b/spec/engineyard-api-client/api_spec.rb
@@ -18,23 +18,17 @@ describe EY::APIClient do
end
end
- it "gets the api token from ~/.eyrc if possible" do
- write_eyrc({"api_token" => "asdf"})
- EY::APIClient.new.token.should == "asdf"
+ it "gets the api token from initialize" do
+ EY::APIClient.new('asdf').token.should == "asdf"
end
- context "fetching the token from EY cloud" do
+ describe ".authenticate" do
before(:each) do
FakeWeb.register_uri(:post, "https://cloud.engineyard.com/api/v2/authenticate", :body => %|{"api_token": "asdf"}|, :content_type => 'application/json')
- @token = EY::APIClient.authenticate("[email protected]", "foo")
end
- it "returns an EY::APIClient" do
- @token.should == "asdf"
- end
-
- it "puts the api token into .eyrc" do
- read_eyrc["api_token"].should == "asdf"
+ it "returns the token" do
+ EY::APIClient.authenticate("[email protected]", "foo").should == "asdf"
end
end
@@ -43,7 +37,7 @@ describe EY::APIClient do
lambda {
EY::APIClient.authenticate("[email protected]", "foo")
- }.should raise_error(EY::APIClient::Error)
+ }.should raise_error(EY::APIClient::InvalidCredentials)
end
it "raises RequestFailed with a friendly error when cloud is under maintenance" do | Remove api requirement on EYRC | engineyard_engineyard | train |
cb54b66430983daa3ce7419fde97c600406343ce | diff --git a/tasks/lab.js b/tasks/lab.js
index <HASH>..<HASH> 100644
--- a/tasks/lab.js
+++ b/tasks/lab.js
@@ -17,7 +17,8 @@ module.exports = function (grunt) {
var done = this.async();
var defaultConfig = {
- files : [ "test/**/*.js" ]
+ files : [ "test/**/*.js" ],
+ cmd: []
};
var labOptions = [
@@ -55,6 +56,10 @@ module.exports = function (grunt) {
}
});
+ _.forIn(config.cmd, function(cmd){
+ args.push(cmd);
+ });
+
args.push(grunt.file.expand(config.files));
var binName = [
diff --git a/test/lab_spec.js b/test/lab_spec.js
index <HASH>..<HASH> 100644
--- a/test/lab_spec.js
+++ b/test/lab_spec.js
@@ -84,7 +84,8 @@ describe("grunt-lab plugin", function () {
disableLeakDetection : true,
reporter : "console",
minCoverage : 100,
- timeout : 0
+ timeout : 0,
+ cmd : ['-v']
});
spawn = sinon.stub(task.grunt.util, "spawn").callsArg(1);
@@ -118,7 +119,7 @@ describe("grunt-lab plugin", function () {
cmd : path.join(__dirname, "..", "node_modules", "lab", "bin", "lab.cmd"),
args : [
"-c", "-C", "-l", "-r",
- "console", "-t", 100, "-m", 0, "test/lab_spec.js"
+ "console", "-t", 100, "-m", 0, "-v", "test/lab_spec.js"
],
opts : { stdio : "inherit" }
});
@@ -149,7 +150,7 @@ describe("grunt-lab plugin", function () {
cmd : path.join(__dirname, "..", "node_modules", "lab", "bin", "lab"),
args : [
"-c", "-C", "-l", "-r",
- "console", "-t", 100, "-m", 0, "test/lab_spec.js"
+ "console", "-t", 100, "-m", 0, "-v", "test/lab_spec.js"
],
opts : { stdio : "inherit" }
}); | Added support for command line calls. | wtcross_grunt-lab | train |
9cc428bd728712605dab5014c922a10969f430b3 | diff --git a/packages/node_modules/@webex/plugin-webhooks/test/integration/spec/webhooks.js b/packages/node_modules/@webex/plugin-webhooks/test/integration/spec/webhooks.js
index <HASH>..<HASH> 100644
--- a/packages/node_modules/@webex/plugin-webhooks/test/integration/spec/webhooks.js
+++ b/packages/node_modules/@webex/plugin-webhooks/test/integration/spec/webhooks.js
@@ -25,7 +25,7 @@ describe('plugin-webhooks', function () {
let room;
before(() => webex.rooms.create({
- title: 'Webex Teams Webhook Test Room'
+ title: 'Webex Webhook Test Room'
})
.then((r) => { room = r; })); | test(plugin-webhooks): webex teams -> webex rebranding | webex_spark-js-sdk | train |
eb710ff0464bf7ec7e996f98a355cbaeeec9efb8 | diff --git a/tornado/util.py b/tornado/util.py
index <HASH>..<HASH> 100644
--- a/tornado/util.py
+++ b/tornado/util.py
@@ -17,8 +17,6 @@ import sys
import zlib
-import sys
-
class ObjectDict(dict):
"""Makes a dictionary behave like an object, with attribute-style access.
"""
@@ -75,10 +73,6 @@ def import_object(name):
True
>>> import_object('tornado') is tornado
True
- >>> import_object('missing_module')
- Traceback (most recent call last):
- ...
- ImportError: No module named missing_module
>>> import_object('tornado.missing_module')
Traceback (most recent call last):
...
@@ -92,8 +86,8 @@ def import_object(name):
try:
return getattr(obj, parts[-1])
except AttributeError:
- exc_info = sys.exc_info()
- raise ImportError, "No module named %s" % parts[-1], exc_info[2]
+ raise ImportError("No module named %s" % parts[-1])
+
# Fake unicode literal support: Python 3.2 doesn't have the u'' marker for
# literal strings, and alternative solutions like "from __future__ import | Fix the import_object ImportError on Python 3.
Python 3 also changed exception messages slightly so remove one case
from the doctest. | tornadoweb_tornado | train |
b017863388b3e6bc5f498320293f33b500fd7e32 | diff --git a/maven-plugin/src/main/java/hudson/maven/Maven3Builder.java b/maven-plugin/src/main/java/hudson/maven/Maven3Builder.java
index <HASH>..<HASH> 100644
--- a/maven-plugin/src/main/java/hudson/maven/Maven3Builder.java
+++ b/maven-plugin/src/main/java/hudson/maven/Maven3Builder.java
@@ -32,6 +32,7 @@ import hudson.model.Result;
import hudson.remoting.Channel;
import hudson.remoting.DelegatingCallable;
import hudson.remoting.Future;
+import hudson.util.CopyOnWriteList;
import hudson.util.IOException2;
import java.io.IOException;
@@ -98,7 +99,7 @@ public class Maven3Builder extends AbstractMavenBuilder implements DelegatingCal
sourceProxies = new HashMap<ModuleName, ProxyImpl2>(proxies);
this.proxies = new HashMap<ModuleName, MavenBuildProxy2>(proxies);
for (Entry<ModuleName,MavenBuildProxy2> e : this.proxies.entrySet())
- e.setValue(new FilterImpl(e.getValue(), this.mavenBuildInformation));
+ e.setValue(new FilterImpl(e.getValue(), this.mavenBuildInformation,Channel.current()));
this.reporters.putAll( reporters );
}
@@ -107,7 +108,7 @@ public class Maven3Builder extends AbstractMavenBuilder implements DelegatingCal
MavenExecutionListener mavenExecutionListener = new MavenExecutionListener( this );
try {
- futures = new ArrayList<Future<?>>();
+ futures = new CopyOnWriteArrayList<Future<?>>( );
Maven3Launcher.setMavenExecutionListener( mavenExecutionListener );
@@ -217,15 +218,18 @@ public class Maven3Builder extends AbstractMavenBuilder implements DelegatingCal
private class FilterImpl extends MavenBuildProxy2.Filter<MavenBuildProxy2> implements Serializable {
private MavenBuildInformation mavenBuildInformation;
+
+ private Channel channel;
- public FilterImpl(MavenBuildProxy2 core, MavenBuildInformation mavenBuildInformation) {
+ public FilterImpl(MavenBuildProxy2 core, MavenBuildInformation mavenBuildInformation, Channel channel) {
super(core);
this.mavenBuildInformation = mavenBuildInformation;
+ this.channel = channel;
}
@Override
public void executeAsync(final BuildCallable<?,?> program) throws IOException {
- futures.add(Channel.current().callAsync(new AsyncInvoker(core,program)));
+ futures.add(channel.callAsync(new AsyncInvoker(core,program)));
}
private static final long serialVersionUID = 1L;
@@ -251,7 +255,7 @@ public class Maven3Builder extends AbstractMavenBuilder implements DelegatingCal
private final Map<ModuleName,List<ExecutedMojo>> executedMojosPerModule = new ConcurrentHashMap<ModuleName, List<ExecutedMojo>>();
- private final Map<ModuleName,List<MavenReporter>> reporters = new HashMap<ModuleName,List<MavenReporter>>();
+ private final Map<ModuleName,List<MavenReporter>> reporters = new ConcurrentHashMap<ModuleName,List<MavenReporter>>();
private final Map<ModuleName, Long> currentMojoStartPerModuleName = new ConcurrentHashMap<ModuleName, Long>();
@@ -259,13 +263,13 @@ public class Maven3Builder extends AbstractMavenBuilder implements DelegatingCal
public MavenExecutionListener(Maven3Builder maven3Builder) {
this.maven3Builder = maven3Builder;
- this.proxies = new HashMap<ModuleName, MavenBuildProxy2>(maven3Builder.proxies);
+ this.proxies = new ConcurrentHashMap<ModuleName, MavenBuildProxy2>(maven3Builder.proxies);
for (Entry<ModuleName,MavenBuildProxy2> e : this.proxies.entrySet())
{
- e.setValue(maven3Builder.new FilterImpl(e.getValue(), maven3Builder.mavenBuildInformation));
+ e.setValue(maven3Builder.new FilterImpl(e.getValue(), maven3Builder.mavenBuildInformation, Channel.current()));
executedMojosPerModule.put( e.getKey(), new CopyOnWriteArrayList<ExecutedMojo>() );
}
- this.reporters.putAll( new HashMap<ModuleName, List<MavenReporter>>(maven3Builder.reporters) );
+ this.reporters.putAll( new ConcurrentHashMap<ModuleName, List<MavenReporter>>(maven3Builder.reporters) );
this.eventLogger = new ExecutionEventLogger( new PrintStreamLogger( maven3Builder.listener.getLogger() ) );
} | |FIXED JENKINS-<I>] Maven3 with multiple threads does not work in Jenkins | jenkinsci_jenkins | train |
a5b6dbc89608491ca6e9e457e34fc8bf959104c4 | diff --git a/.ruby-version b/.ruby-version
index <HASH>..<HASH> 100644
--- a/.ruby-version
+++ b/.ruby-version
@@ -1 +1 @@
-2.1.2
+2.2.2
diff --git a/hab.gemspec b/hab.gemspec
index <HASH>..<HASH> 100644
--- a/hab.gemspec
+++ b/hab.gemspec
@@ -22,7 +22,7 @@ Gem::Specification.new do |spec|
spec.add_development_dependency 'vcr', '~> 2.9'
spec.add_development_dependency 'pry', '~> 0.10'
spec.add_development_dependency 'rubocop', '~> 0.32'
- spec.add_runtime_dependency 'habitica_client', '0.0.7'
+ spec.add_runtime_dependency 'habitica_client', '1.0.0'
spec.add_runtime_dependency 'rumoji', '~> 0.4'
spec.add_runtime_dependency 'colorize', '~> 0.7'
spec.add_runtime_dependency 'commander', '~> 4.3'
diff --git a/lib/hab.rb b/lib/hab.rb
index <HASH>..<HASH> 100644
--- a/lib/hab.rb
+++ b/lib/hab.rb
@@ -17,7 +17,7 @@ module Hab
end
def self.status
- Formatter.status(client.user)
+ Formatter.status(client)
end
def self.stats
@@ -25,36 +25,35 @@ module Hab
end
def self.add_task(task, type)
- client.user.tasks.create({
+ client.tasks.create(
text: task,
type: type
- })
+ )
end
def self.add_tasks(tasks, type)
tasks.each do |task|
- self.add_task(task, type)
+ add_task(task, type)
end
end
def self.stdin_tasks
- !STDIN.tty? ? STDIN.read.split("\n") : []
+ !STDIN.tty? ? STDIN.read.split("\n") : []
end
def self.habits(options)
- Formatter.tasks(client.user.tasks.habits, emoji: options.emoji)
+ Formatter.tasks(client.tasks.habits, emoji: options.emoji)
end
-
def self.dailies(options)
- tasks = Filter.by_status(client.user.tasks.dailies,
+ tasks = Filter.by_status(client.tasks.dailies,
options)
Formatter.tasks(tasks, emoji: options.emoji)
end
def self.todos(options)
- tasks = Filter.by_status(client.user.tasks.todos,
+ tasks = Filter.by_status(client.tasks.todos,
options)
Formatter.tasks(tasks, emoji: options.emoji)
diff --git a/lib/hab/filter.rb b/lib/hab/filter.rb
index <HASH>..<HASH> 100644
--- a/lib/hab/filter.rb
+++ b/lib/hab/filter.rb
@@ -5,9 +5,9 @@ module Hab
if options.all
tasks
elsif options.completed
- self.completed(tasks)
+ completed(tasks)
else
- self.uncompleted(tasks)
+ uncompleted(tasks)
end
end
diff --git a/lib/hab/formatter.rb b/lib/hab/formatter.rb
index <HASH>..<HASH> 100644
--- a/lib/hab/formatter.rb
+++ b/lib/hab/formatter.rb
@@ -10,10 +10,10 @@ module Hab
module ClassMethods
- def status(user)
- stats = user.stats
- dailies = user.tasks.dailies
- todos = user.tasks.todos
+ def status(client)
+ stats = client.user.stats
+ dailies = client.tasks.dailies
+ todos = client.tasks.todos
dailies_complete_count = dailies.count(&:completed?)
todos_complete_count = todos.count(&:completed?)
<<-BLOCK
@@ -58,7 +58,6 @@ BLOCK
.strip
.colorize(value_color(task.value))
end
-
end
end
diff --git a/lib/hab/formatter/colors.rb b/lib/hab/formatter/colors.rb
index <HASH>..<HASH> 100644
--- a/lib/hab/formatter/colors.rb
+++ b/lib/hab/formatter/colors.rb
@@ -8,7 +8,7 @@ module Hab::Formatter
-1..1 => :light_yellow,
-5..-1 => :yellow,
-10..-5 => :light_red
- }
+ }.freeze
STAT_COLORS = {
HP: :red,
@@ -16,10 +16,10 @@ module Hab::Formatter
EXP: :yellow,
DAILIES: :light_magenta,
TODOS: :light_blue
- }
+ }.freeze
def value_color(value)
- VALUE_COLORS.select { |key, color| key.cover? value }.values.first
+ VALUE_COLORS.select { |key, _color| key.cover? value }.values.first
end
def stat_color(label)
diff --git a/lib/hab/version.rb b/lib/hab/version.rb
index <HASH>..<HASH> 100644
--- a/lib/hab/version.rb
+++ b/lib/hab/version.rb
@@ -1,3 +1,3 @@
module Hab
- VERSION = '0.0.7'
+ VERSION = '0.0.9'.freeze
end | Updated to use latest version of habitica_client gem | wwqrd_hab | train |
fbf3d7dd3a19d218664aba556bf5d8a4f0038ca1 | diff --git a/src/main/java/org/mapdb/DataOutput2.java b/src/main/java/org/mapdb/DataOutput2.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/mapdb/DataOutput2.java
+++ b/src/main/java/org/mapdb/DataOutput2.java
@@ -31,17 +31,16 @@ import java.util.Arrays;
public final class DataOutput2 extends OutputStream implements DataOutput {
public byte[] buf;
- volatile public int pos;
+ public int pos;
+ public int sizeMask;
+
public DataOutput2(){
pos = 0;
buf = new byte[16]; //TODO take hint from serializer for initial size
+ sizeMask = 0xFFFFFFFF-(buf.length-1);
}
- public DataOutput2(byte[] buf){
- pos=0;
- this.buf = buf;
- }
public byte[] copyBytes(){
return Arrays.copyOf(buf, pos);
@@ -50,13 +49,29 @@ public final class DataOutput2 extends OutputStream implements DataOutput {
/**
* make sure there will be enough space in buffer to write N bytes
*/
- public void ensureAvail(final int n) {
- if (pos + n >= buf.length) {
- int newSize = Math.max(pos + n, buf.length * 2);
+ public void ensureAvail(int n) {
+
+ n+=pos;
+ if ((n&sizeMask)!=0) {
+ int newSize = buf.length;
+ while(newSize<n){
+ newSize<<=2;
+ sizeMask<<=2;
+ }
buf = Arrays.copyOf(buf, newSize);
}
}
+ public static int nextPowTwo(final int a)
+ {
+ int b = 1;
+ while (b < a)
+ {
+ b = b << 1;
+ }
+ return b;
+ }
+
@Override
public void write(final int b) throws IOException { | DataOutput2: small performance improvement | jankotek_mapdb | train |
681d4437d908218b26b79da7b84008d8b72d9431 | diff --git a/plugins/CoreHome/templates/datatable.js b/plugins/CoreHome/templates/datatable.js
index <HASH>..<HASH> 100644
--- a/plugins/CoreHome/templates/datatable.js
+++ b/plugins/CoreHome/templates/datatable.js
@@ -234,12 +234,17 @@ dataTable.prototype =
handleLimit: function(domElem)
{
var self = this;
+ if( self.parentId != '')
+ {
+ // no limit selector for subtables
+ return;
+ }
$('.limitSelection', domElem).append('<div><span>'+self.param.filter_limit+'</span></div><ul></ul>');
if(self.param.viewDataTable == 'table' || self.param.viewDataTable == 'tableAllColumns' || self.param.viewDataTable == 'tableGoals' || self.param.viewDataTable == 'ecommerceOrder' || self.param.viewDataTable == 'ecommerceAbandonedCart') {
$('.limitSelection ul', domElem).hide();
- var numbers = [5, 10, 25, 50, 100, 250];
+ var numbers = [5, 10, 25, 50, 100, 250, 500];
for(var i=0; i<numbers.length; i++) {
$('.limitSelection ul', domElem).append('<li value="'+numbers[i]+'"><span>'+numbers[i]+'</span></li>');
}
diff --git a/plugins/UserSettings/Controller.php b/plugins/UserSettings/Controller.php
index <HASH>..<HASH> 100644
--- a/plugins/UserSettings/Controller.php
+++ b/plugins/UserSettings/Controller.php
@@ -104,6 +104,7 @@ class Piwik_UserSettings_Controller extends Piwik_Controller
);
$view->disableShowAllViewsIcons();
$view->disableShowAllColumns();
+ $view->disableOffsetInformationAndPaginationControls();
$view->setColumnsToDisplay( array('label','nb_visits_percentage','nb_visits') );
$view->setColumnTranslation('label', Piwik_Translate('UserSettings_ColumnPlugin'));
$view->setColumnTranslation('nb_visits_percentage', str_replace(' ', ' ', Piwik_Translate('General_ColumnPercentageVisits'))); | Refs #<I>
* Fixing bug for Pages > tables, where a click to load a sub directory would trigger handleLimit and add the controller multiple times below each other
* Adding possibility to select <I> rows
* Disabling controller for the plugins report
git-svn-id: <URL> | matomo-org_matomo | train |
e1aae97696a019e7f61d87ec7779538714a2a3fb | diff --git a/src/main/java/zmq/ZMQ.java b/src/main/java/zmq/ZMQ.java
index <HASH>..<HASH> 100644
--- a/src/main/java/zmq/ZMQ.java
+++ b/src/main/java/zmq/ZMQ.java
@@ -633,7 +633,7 @@ public class ZMQ {
throw new IllegalArgumentException();
}
if (count == 0) {
- if (timeout_ == 0)
+ if (timeout_ <= 0)
return 0;
try {
Thread.sleep (timeout_); | fix bug where poll does not accept -1 as argument | zeromq_jeromq | train |
84d901aa1897f9983589b29d6aa81eca3774dced | diff --git a/discovery.go b/discovery.go
index <HASH>..<HASH> 100644
--- a/discovery.go
+++ b/discovery.go
@@ -1,7 +1,7 @@
/*
- * This file is part of arduino-create-agent.
+ * This file is part of board-discovery.
*
- * arduino-create-agent is free software; you can redistribute it and/or modify
+ * board-discovery is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
diff --git a/discovery_test.go b/discovery_test.go
index <HASH>..<HASH> 100644
--- a/discovery_test.go
+++ b/discovery_test.go
@@ -1,7 +1,7 @@
/*
- * This file is part of arduino-create-agent.
+ * This file is part of board-discovery.
*
- * arduino-create-agent is free software; you can redistribute it and/or modify
+ * board-discovery is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
diff --git a/network.go b/network.go
index <HASH>..<HASH> 100644
--- a/network.go
+++ b/network.go
@@ -1,7 +1,7 @@
/*
- * This file is part of arduino-create-agent.
+ * This file is part of board-discovery.
*
- * arduino-create-agent is free software; you can redistribute it and/or modify
+ * board-discovery is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
diff --git a/serial.go b/serial.go
index <HASH>..<HASH> 100644
--- a/serial.go
+++ b/serial.go
@@ -1,7 +1,7 @@
/*
- * This file is part of arduino-create-agent.
+ * This file is part of board-discovery.
*
- * arduino-create-agent is free software; you can redistribute it and/or modify
+ * board-discovery is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version. | fixing header arduino-create-agent -> board-discovery | arduino_board-discovery | train |
0b4485a06b9583dfe4a443b39854a874f9281180 | diff --git a/openshift/openshift/src/main/java/org/arquillian/cube/openshift/impl/client/OpenShiftSuiteLifecycleController.java b/openshift/openshift/src/main/java/org/arquillian/cube/openshift/impl/client/OpenShiftSuiteLifecycleController.java
index <HASH>..<HASH> 100644
--- a/openshift/openshift/src/main/java/org/arquillian/cube/openshift/impl/client/OpenShiftSuiteLifecycleController.java
+++ b/openshift/openshift/src/main/java/org/arquillian/cube/openshift/impl/client/OpenShiftSuiteLifecycleController.java
@@ -1,5 +1,6 @@
package org.arquillian.cube.openshift.impl.client;
+import org.arquillian.cube.kubernetes.api.Configuration;
import org.arquillian.cube.spi.event.CreateCube;
import org.arquillian.cube.spi.event.CubeControlEvent;
import org.arquillian.cube.spi.event.DestroyCube;
@@ -17,7 +18,12 @@ public class OpenShiftSuiteLifecycleController {
private Event<CubeControlEvent> controlEvent;
public void startAutoContainers(@Observes(precedence = 99) BeforeSuite event,
- CubeOpenShiftConfiguration openshiftConfiguration) {
+ Configuration conf) {
+ if (!(conf instanceof CubeOpenShiftConfiguration)) {
+ return;
+ }
+ CubeOpenShiftConfiguration openshiftConfiguration = (CubeOpenShiftConfiguration) conf;
+
for (String cubeId : openshiftConfiguration.getAutoStartContainers()) {
controlEvent.fire(new CreateCube(cubeId));
controlEvent.fire(new StartCube(cubeId));
@@ -25,7 +31,12 @@ public class OpenShiftSuiteLifecycleController {
}
public void stopAutoContainers(@Observes(precedence = -99) AfterSuite event,
- CubeOpenShiftConfiguration openshiftConfiguration) {
+ Configuration conf) {
+ if (!(conf instanceof CubeOpenShiftConfiguration)) {
+ return;
+ }
+ CubeOpenShiftConfiguration openshiftConfiguration = (CubeOpenShiftConfiguration) conf;
+
String[] autostart = openshiftConfiguration.getAutoStartContainers();
for (int i = autostart.length - 1; i > -1; i--) {
String cubeId = autostart[i]; | Openshift: Fix @Observer parameter (#<I>)
It's the Configuration class that gets injected, not
CubeOpenShiftConfiguration, so, this second parameter will
never be resolved and thus the method will never be invoked.
This suppresses the following warning:
WARNING: Argument 1 for OpenShiftSuiteLifecycleController.startAutoContainers is null. It won't be invoked. | arquillian_arquillian-cube | train |
19486b4907b497d1a8c6ffea027d6f343192cd79 | diff --git a/doc/issue-service/sample.js b/doc/issue-service/sample.js
index <HASH>..<HASH> 100644
--- a/doc/issue-service/sample.js
+++ b/doc/issue-service/sample.js
@@ -15,7 +15,6 @@ var _ = require("lodash");
var ask = require("../ask");
// Configuration layer API, should be useful to automatically save your credentials
-// Remember to enable options in lib/config.js
var config = require("../config");
@@ -23,7 +22,8 @@ var config = require("../config");
module.exports = {
"meta": {
"desc": "Sample issue service",
- "repo": "user/repository"
+ "repo": "user/repository",
+ "conf": ["my-host.token"]
},
"connect": connect,
@@ -107,7 +107,6 @@ function guessRepoFromUrl (url) {
return match && match[1];
}
-// Assuming option "my-host.token" has been enabled in "../config.js"
// Assuming client.setToken sets authorization token for next calls
function connect (conf) {
// Instantiating API client, this is the one that will be passed to other methods
@@ -155,6 +154,7 @@ function createToken (client) {
});
}
+// Persist credentials to service-specific option
function saveToken (token) {
return config.set("my-host.token", token).then(_.constant(token));
} | Add 'conf' option in sample service | naholyr_github-todos | train |
d921ca3a17c7c677968bc68839e7405419a0f33f | diff --git a/src/JsonCompiler.php b/src/JsonCompiler.php
index <HASH>..<HASH> 100644
--- a/src/JsonCompiler.php
+++ b/src/JsonCompiler.php
@@ -73,7 +73,7 @@ class JsonCompiler
return json_decode(file_get_contents($path), true);
}
- $real = realpath($path);
+ $real = $this->normalize($path);
$cache = str_replace($this->stripPath, '', $real);
$cache = str_replace(['\\', '/'], '_', $cache);
$cache = $this->cacheDir . DIRECTORY_SEPARATOR . $cache . '.php';
@@ -93,4 +93,44 @@ class JsonCompiler
return $data;
}
+
+ /**
+ * Resolve relative paths without using realpath (which causes an
+ * unnecessary fstat).
+ *
+ * @param $path
+ *
+ * @return string
+ */
+ private function normalize($path)
+ {
+ static $replace = ['/', '\\'];
+
+ $parts = explode(
+ DIRECTORY_SEPARATOR,
+ // Normalize path separators
+ str_replace($replace, DIRECTORY_SEPARATOR, $path)
+ );
+
+ $segments = [];
+ foreach ($parts as $part) {
+ if ($part === '' || $part === '.') {
+ continue;
+ }
+ if ($part === '..') {
+ array_pop($segments);
+ } else {
+ $segments[] = $part;
+ }
+ }
+
+ $resolved = implode(DIRECTORY_SEPARATOR, $segments);
+
+ // Add a leading slash if necessary.
+ if (isset($parts[0]) && $parts[0] === '') {
+ $resolved = DIRECTORY_SEPARATOR . $resolved;
+ }
+
+ return $resolved;
+ }
}
diff --git a/tests/JsonCompilerTest.php b/tests/JsonCompilerTest.php
index <HASH>..<HASH> 100644
--- a/tests/JsonCompilerTest.php
+++ b/tests/JsonCompilerTest.php
@@ -73,4 +73,31 @@ class JsonCompilerTest extends \PHPUnit_Framework_TestCase
$entries = array_diff(scandir($c->getCacheDir()), ['.', '..']);
$this->assertEmpty($entries);
}
+
+ public function pathProvider()
+ {
+ return [
+ ['/foo/baz/bar.qux', '/foo/baz/bar.qux'],
+ ['/foo/baz/../bar.qux', '/foo/bar.qux'],
+ ['/foo/baz/./bar.qux', '/foo/baz/bar.qux'],
+ ['/foo/baz/../../bar.qux', '/bar.qux'],
+ ['/../../bar.qux', '/bar.qux'],
+ // Extra slashes
+ ['/foo//baz///bar.qux', '/foo/baz/bar.qux'],
+ // Relative with no leading slash
+ ['foo/baz/../bar.qux', 'foo/bar.qux'],
+ ['\\foo\\baz\\..\\.\\bar.qux', '/foo/bar.qux'],
+ ];
+ }
+
+ /**
+ * @dataProvider pathProvider
+ */
+ public function testResolvesRelativePaths($path, $resolved)
+ {
+ $j = new JsonCompiler();
+ $meth = new \ReflectionMethod('Aws\JsonCompiler', 'normalize');
+ $meth->setAccessible(true);
+ $this->assertEquals($resolved, $meth->invoke($j, $path));
+ }
} | Avoiding stat of file with realpath | aws_aws-sdk-php | train |
3a320724100ab05531d8d18ca8cb279a8e4f5c7f | diff --git a/doc/whats-new.rst b/doc/whats-new.rst
index <HASH>..<HASH> 100644
--- a/doc/whats-new.rst
+++ b/doc/whats-new.rst
@@ -34,6 +34,9 @@ Bug fixes
allowing the ``encoding`` and ``unlimited_dims`` options with ``save_mfdataset``.
(:issue:`6684`)
By `Travis A. O'Brien <https://github.com/taobrienlbl>`_.
+- :py:meth:`Dataset.where` with ``drop=True`` now behaves correctly with mixed dimensions.
+ (:issue:`6227`, :pull:`6690`)
+ By `Michael Niklas <https://github.com/headtr1ck>`_.
Documentation
~~~~~~~~~~~~~
diff --git a/xarray/core/common.py b/xarray/core/common.py
index <HASH>..<HASH> 100644
--- a/xarray/core/common.py
+++ b/xarray/core/common.py
@@ -1362,18 +1362,23 @@ class DataWithCoords(AttrAccessMixin):
f"cond argument is {cond!r} but must be a {Dataset!r} or {DataArray!r}"
)
- # align so we can use integer indexing
self, cond = align(self, cond) # type: ignore[assignment]
- # get cond with the minimal size needed for the Dataset
- if isinstance(cond, Dataset):
- clipcond = cond.to_array().any("variable")
- else:
- clipcond = cond
+ def _dataarray_indexer(dim: Hashable) -> DataArray:
+ return cond.any(dim=(d for d in cond.dims if d != dim))
+
+ def _dataset_indexer(dim: Hashable) -> DataArray:
+ cond_wdim = cond.drop(var for var in cond if dim not in cond[var].dims)
+ keepany = cond_wdim.any(dim=(d for d in cond.dims.keys() if d != dim))
+ return keepany.to_array().any("variable")
+
+ _get_indexer = (
+ _dataarray_indexer if isinstance(cond, DataArray) else _dataset_indexer
+ )
- # clip the data corresponding to coordinate dims that are not used
- nonzeros = zip(clipcond.dims, np.nonzero(clipcond.values))
- indexers = {k: np.unique(v) for k, v in nonzeros}
+ indexers = {}
+ for dim in cond.sizes.keys():
+ indexers[dim] = _get_indexer(dim)
self = self.isel(**indexers)
cond = cond.isel(**indexers)
diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py
index <HASH>..<HASH> 100644
--- a/xarray/tests/test_dataset.py
+++ b/xarray/tests/test_dataset.py
@@ -4715,6 +4715,25 @@ class TestDataset:
actual8 = ds.where(ds > 0, drop=True)
assert_identical(expected8, actual8)
+ # mixed dimensions: PR#6690, Issue#6227
+ ds = xr.Dataset(
+ {
+ "a": ("x", [1, 2, 3]),
+ "b": ("y", [2, 3, 4]),
+ "c": (("x", "y"), np.arange(9).reshape((3, 3))),
+ }
+ )
+ expected9 = xr.Dataset(
+ {
+ "a": ("x", [np.nan, 3]),
+ "b": ("y", [np.nan, 3, 4]),
+ "c": (("x", "y"), np.arange(3.0, 9.0).reshape((2, 3))),
+ }
+ )
+ actual9 = ds.where(ds > 2, drop=True)
+ assert actual9.sizes["x"] == 2
+ assert_identical(expected9, actual9)
+
def test_where_drop_empty(self) -> None:
# regression test for GH1341
array = DataArray(np.random.rand(100, 10), dims=["nCells", "nVertLevels"]) | Fix Dataset.where with drop=True and mixed dims (#<I>)
* support for where with drop=True and mixed dims
* add PR and issue nbr to test
* add changes to whats-new | pydata_xarray | train |
038a1e4e3b912170c7a21ea5605c60a95a3ad82f | diff --git a/lib/array.js b/lib/array.js
index <HASH>..<HASH> 100644
--- a/lib/array.js
+++ b/lib/array.js
@@ -12,6 +12,13 @@ module.exports.max = function(array)
return Math.max.apply(Math, array);
};
+//Get the min value in an array
+module.exports.min = function(obj)
+{
+ //Return the min value
+ return Math.min.apply(Math, obj);
+};
+
//Return a range of values
module.exports.range = function(start, end, step)
{ | lib/array.js: added array.min method to get the minimum value in an array | jmjuanes_utily | train |
4eaddfe0f17cee9d7da6ec70a9684ca8c97e6b41 | diff --git a/core/client/src/main/java/alluxio/client/block/LocalBlockInStream.java b/core/client/src/main/java/alluxio/client/block/LocalBlockInStream.java
index <HASH>..<HASH> 100644
--- a/core/client/src/main/java/alluxio/client/block/LocalBlockInStream.java
+++ b/core/client/src/main/java/alluxio/client/block/LocalBlockInStream.java
@@ -63,8 +63,7 @@ public final class LocalBlockInStream extends BufferedBlockInStream {
if (result == null) {
throw new IOException(ExceptionMessage.BLOCK_NOT_LOCALLY_AVAILABLE.getMessage(mBlockId));
}
- mReader = new LocalFileBlockReader(result.getBlockPath());
- mCloser.register(mReader);
+ mReader = mCloser.register(new LocalFileBlockReader(result.getBlockPath()));
} catch (IOException e) {
mCloser.close();
throw e;
diff --git a/core/client/src/main/java/alluxio/client/block/LocalBlockOutStream.java b/core/client/src/main/java/alluxio/client/block/LocalBlockOutStream.java
index <HASH>..<HASH> 100644
--- a/core/client/src/main/java/alluxio/client/block/LocalBlockOutStream.java
+++ b/core/client/src/main/java/alluxio/client/block/LocalBlockOutStream.java
@@ -85,6 +85,8 @@ public final class LocalBlockOutStream extends BufferedBlockOutStream {
mBlockWorkerClient.cancelBlock(mBlockId);
} catch (AlluxioException e) {
throw mCloser.rethrow(new IOException(e));
+ } catch (Throwable e) { // must catch Throwable
+ throw mCloser.rethrow(e); // IOException will be thrown as-is
} finally {
mClosed = true;
mCloser.close();
diff --git a/core/client/src/main/java/alluxio/client/file/FileSystemContext.java b/core/client/src/main/java/alluxio/client/file/FileSystemContext.java
index <HASH>..<HASH> 100644
--- a/core/client/src/main/java/alluxio/client/file/FileSystemContext.java
+++ b/core/client/src/main/java/alluxio/client/file/FileSystemContext.java
@@ -68,7 +68,7 @@ public enum FileSystemContext {
/**
* Acquires a file system master client from the file system master client pool.
*
- * @return the acquired block master client
+ * @return the acquired file system master client
*/
public FileSystemMasterClient acquireMasterClient() {
return mFileSystemMasterClientPool.acquire();
@@ -78,7 +78,7 @@ public enum FileSystemContext {
* Acquires a file system master client from the file system master client pool. The resource is
* {@code Closeable}.
*
- * @return the acquired block master client resource
+ * @return the acquired file system master client resource
*/
public CloseableResource<FileSystemMasterClient> acquireMasterClientResource() {
return new CloseableResource<FileSystemMasterClient>(mFileSystemMasterClientPool.acquire()) {
diff --git a/core/client/src/main/java/alluxio/client/file/UnderFileSystemFileOutStream.java b/core/client/src/main/java/alluxio/client/file/UnderFileSystemFileOutStream.java
index <HASH>..<HASH> 100644
--- a/core/client/src/main/java/alluxio/client/file/UnderFileSystemFileOutStream.java
+++ b/core/client/src/main/java/alluxio/client/file/UnderFileSystemFileOutStream.java
@@ -18,6 +18,7 @@ import alluxio.exception.PreconditionMessage;
import alluxio.util.io.BufferUtils;
import com.google.common.base.Preconditions;
+import com.google.common.io.Closer;
import java.io.IOException;
import java.io.OutputStream;
@@ -42,7 +43,8 @@ public final class UnderFileSystemFileOutStream extends OutputStream {
private final InetSocketAddress mAddress;
/** Worker file id referencing the file to write to. */
private final long mUfsFileId;
-
+ /** Used to manage closeable resources. */
+ private final Closer mCloser;
/** If the stream is closed, this can only go from false to true. */
private boolean mClosed;
/** Number of bytes flushed to the worker. */
@@ -88,10 +90,11 @@ public final class UnderFileSystemFileOutStream extends OutputStream {
mBuffer = allocateBuffer();
mAddress = address;
mUfsFileId = ufsFileId;
- mWriter = UnderFileSystemFileWriter.Factory.create();
mFlushedBytes = 0;
mWrittenBytes = 0;
mClosed = false;
+ mCloser = Closer.create();
+ mWriter = mCloser.register(UnderFileSystemFileWriter.Factory.create());
}
@Override
@@ -99,10 +102,16 @@ public final class UnderFileSystemFileOutStream extends OutputStream {
if (mClosed) {
return;
}
- if (mFlushedBytes < mWrittenBytes) {
- flush();
+ try {
+ if (mFlushedBytes < mWrittenBytes) {
+ flush();
+ }
+ } catch (Throwable e) { // must catch Throwable
+ throw mCloser.rethrow(e); // IOException will be thrown as-is
+ } finally {
+ mClosed = true;
+ mCloser.close();
}
- mClosed = true;
}
@Override | [SMALLFIX] More cleanup on using Closer | Alluxio_alluxio | train |
b22908a423d8f0806f94153aa15aa3d23fa47f9e | diff --git a/lib/crash_log/configuration.rb b/lib/crash_log/configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/crash_log/configuration.rb
+++ b/lib/crash_log/configuration.rb
@@ -239,7 +239,9 @@ module CrashLog
def development_mode=(flag)
self[:development_mode] = flag
self.level = Logger::DEBUG
- new_logger.level = self.level if self.logger.respond_to?(:level=)
+ if new_logger
+ new_logger.level = self.level if self.logger.respond_to?(:level=)
+ end
end
private | Ensure we have a logger if setting logger level | crashlog_crashlog | train |
4b57ee9deb80889871f3399d5a6c98e8ccd95224 | diff --git a/src/main/java/nl/topicus/jdbc/statement/CloudSpannerPreparedStatement.java b/src/main/java/nl/topicus/jdbc/statement/CloudSpannerPreparedStatement.java
index <HASH>..<HASH> 100644
--- a/src/main/java/nl/topicus/jdbc/statement/CloudSpannerPreparedStatement.java
+++ b/src/main/java/nl/topicus/jdbc/statement/CloudSpannerPreparedStatement.java
@@ -29,6 +29,7 @@ import nl.topicus.jdbc.resultset.CloudSpannerResultSet;
import com.google.cloud.spanner.DatabaseClient;
import com.google.cloud.spanner.Key;
+import com.google.cloud.spanner.KeySet;
import com.google.cloud.spanner.Mutation;
import com.google.cloud.spanner.Mutation.WriteBuilder;
@@ -279,9 +280,18 @@ public class CloudSpannerPreparedStatement extends AbstractCloudSpannerPreparedS
{
String table = delete.getTable().getFullyQualifiedName();
Expression where = delete.getWhere();
- Key.Builder keyBuilder = Key.newBuilder();
- visitDeleteWhereClause(where, keyBuilder);
- writeMutation(Mutation.delete(table, keyBuilder.build()));
+ if (where == null)
+ {
+ // Delete all
+ writeMutation(Mutation.delete(table, KeySet.all()));
+ }
+ else
+ {
+ // Delete one
+ Key.Builder keyBuilder = Key.newBuilder();
+ visitDeleteWhereClause(where, keyBuilder);
+ writeMutation(Mutation.delete(table, keyBuilder.build()));
+ }
return 1;
} | added support for delete all ("DELETE FROM <TABLE>") | olavloite_spanner-jdbc | train |
6856a9baca709c9f5f10e66199ca3a67e76c1fd1 | diff --git a/timeprovider/faketimeprovider/fake_time_provider.go b/timeprovider/faketimeprovider/fake_time_provider.go
index <HASH>..<HASH> 100644
--- a/timeprovider/faketimeprovider/fake_time_provider.go
+++ b/timeprovider/faketimeprovider/fake_time_provider.go
@@ -71,6 +71,13 @@ func (provider *FakeTimeProvider) NewTicker(d time.Duration) timeprovider.Ticker
return NewFakeTicker(provider, d)
}
+func (provider *FakeTimeProvider) WatcherCount() int {
+ provider.Mutex.Lock()
+ defer provider.Mutex.Unlock()
+
+ return len(provider.watchers)
+}
+
func (provider *FakeTimeProvider) addTimeWatcher(tw timeWatcher) {
provider.Mutex.Lock()
provider.watchers[tw] = struct{}{}
diff --git a/timeprovider/faketimeprovider/fake_time_provider_test.go b/timeprovider/faketimeprovider/fake_time_provider_test.go
index <HASH>..<HASH> 100644
--- a/timeprovider/faketimeprovider/fake_time_provider_test.go
+++ b/timeprovider/faketimeprovider/fake_time_provider_test.go
@@ -49,4 +49,26 @@ var _ = Describe("FakeTimeProvider", func() {
Eventually(doneSleeping).Should(BeClosed())
})
})
+
+ Describe("WatcherCount", func() {
+ Context("when a timer is created", func() {
+ It("increments the watcher count", func() {
+ timeProvider.NewTimer(time.Second)
+ Ω(timeProvider.WatcherCount()).Should(Equal(1))
+
+ timeProvider.NewTimer(2 * time.Second)
+ Ω(timeProvider.WatcherCount()).Should(Equal(2))
+ })
+ })
+
+ Context("when a timer fires", func() {
+ It("increments the watcher count", func() {
+ timeProvider.NewTimer(time.Second)
+ Ω(timeProvider.WatcherCount()).Should(Equal(1))
+
+ timeProvider.Increment(time.Second)
+ Ω(timeProvider.WatcherCount()).Should(Equal(0))
+ })
+ })
+ })
}) | Add WatcherCount to help with synchronization
[#<I>] | cloudfoundry-attic_gunk | train |
11fdfd1670e4bb3fd0e6a3ba64784ce3bf935367 | diff --git a/src/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepository.php b/src/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepository.php
index <HASH>..<HASH> 100644
--- a/src/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepository.php
+++ b/src/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepository.php
@@ -85,7 +85,7 @@ class DBALReadRepository extends AbstractDBALRepository implements ReadRepositor
* @param Query $query
* @return Natural
*/
- public function searchTotalItems(Query $query)
+ public function searchTotalLabels(Query $query)
{
$like = $this->createLike($this->getQueryBuilder());
diff --git a/src/Label/ReadModels/JSON/Repository/ReadRepositoryInterface.php b/src/Label/ReadModels/JSON/Repository/ReadRepositoryInterface.php
index <HASH>..<HASH> 100644
--- a/src/Label/ReadModels/JSON/Repository/ReadRepositoryInterface.php
+++ b/src/Label/ReadModels/JSON/Repository/ReadRepositoryInterface.php
@@ -30,5 +30,5 @@ interface ReadRepositoryInterface
* @param Query $query
* @return Natural
*/
- public function searchTotalItems(Query $query);
+ public function searchTotalLabels(Query $query);
}
diff --git a/test/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepositoryTest.php b/test/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepositoryTest.php
index <HASH>..<HASH> 100644
--- a/test/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepositoryTest.php
+++ b/test/Label/ReadModels/JSON/Repository/Doctrine/DBALReadRepositoryTest.php
@@ -222,9 +222,9 @@ class DBALReadRepositoryTest extends BaseDBALRepositoryTest
{
$search = new Query(new StringLiteral('lab'));
- $totalItems = $this->dbalReadRepository->searchTotalItems($search);
+ $totalLabels = $this->dbalReadRepository->searchTotalLabels($search);
- $this->assertEquals(10, $totalItems);
+ $this->assertEquals(10, $totalLabels);
}
/**
@@ -234,8 +234,8 @@ class DBALReadRepositoryTest extends BaseDBALRepositoryTest
{
$search = new Query(new StringLiteral('nothing'));
- $totalItems = $this->dbalReadRepository->searchTotalItems($search);
+ $totalLabels = $this->dbalReadRepository->searchTotalLabels($search);
- $this->assertEquals(0, $totalItems);
+ $this->assertEquals(0, $totalLabels);
}
} | III-<I> Rename from searchTotaltems to searchTotalLabels for label query count. | cultuurnet_udb3-php | train |
eaa96376db1296db998281cb8e2ae3efdd20c1f6 | diff --git a/lib/sfrp/mono/type.rb b/lib/sfrp/mono/type.rb
index <HASH>..<HASH> 100644
--- a/lib/sfrp/mono/type.rb
+++ b/lib/sfrp/mono/type.rb
@@ -113,6 +113,7 @@ module SFRP
# Generate statement to clean up objects of this types.
def gen_mark_cleanup_stmt(src_set, stmts)
+ return unless need_mark?(src_set)
return if src_set.memory(@str) == 0
stmts << L.stmt("#{low_allocator_str}(1)")
end | fix bug of mark-reset statement | sfrp_sfrp | train |
c6344a4f1091a9f02ee18646a05bd2128459afd7 | diff --git a/django_zappa/management/commands/deploy.py b/django_zappa/management/commands/deploy.py
index <HASH>..<HASH> 100644
--- a/django_zappa/management/commands/deploy.py
+++ b/django_zappa/management/commands/deploy.py
@@ -84,19 +84,17 @@ class Command(BaseCommand):
# Add this environment's Django settings to that zipfile
with open(settings_file, 'r') as f:
contents = f.read()
- all_contents = contents + '\n# Automatically added by Zappa: \nSCRIPT_NAME=\'/' + api_stage + '\''
+ all_contents = contents + '\n# Automatically added by Zappa:\nSCRIPT_NAME=\'/' + api_stage + '\'\n'
+ f.close()
- temp = tempfile.NamedTemporaryFile(delete=False)
- temp.write(all_contents)
- temp.flush()
- temp.seek(0)
- temp.close()
- settings_file = temp.name
+ with open('zappa_settings.py', 'w') as f:
+ f.write(all_contents)
with zipfile.ZipFile(zip_path, 'a') as lambda_zip:
- lambda_zip.write(settings_file, 'zappa_settings.py')
+ lambda_zip.write('zappa_settings.py', 'zappa_settings.py')
lambda_zip.close()
- os.unlink(temp.name)
+
+ os.unlink('zappa_settings.py')
# Upload it to S3
zip_arn = zappa.upload_to_s3(zip_path, s3_bucket_name)
diff --git a/django_zappa/management/commands/update.py b/django_zappa/management/commands/update.py
index <HASH>..<HASH> 100644
--- a/django_zappa/management/commands/update.py
+++ b/django_zappa/management/commands/update.py
@@ -77,22 +77,20 @@ class Command(BaseCommand):
lambda_name = project_name + '-' + api_stage
zip_path = zappa.create_lambda_zip(lambda_name, handler_file=handler_file)
- # Add this environment's Django settings to that zipfile
+ #Add this environment's Django settings to that zipfile
with open(settings_file, 'r') as f:
contents = f.read()
- all_contents = contents + '\n# Automatically added by Zappa: \nSCRIPT_NAME=\'/' + api_stage + '\''
+ all_contents = contents + '\n# Automatically added by Zappa:\nSCRIPT_NAME=\'/' + api_stage + '\'\n'
+ f.close()
- temp = tempfile.NamedTemporaryFile(delete=False)
- temp.write(all_contents)
- temp.flush()
- temp.seek(0)
- temp.close()
- settings_file = temp.name
+ with open('zappa_settings.py', 'w') as f:
+ f.write(all_contents)
with zipfile.ZipFile(zip_path, 'a') as lambda_zip:
- lambda_zip.write(settings_file, 'zappa_settings.py')
+ lambda_zip.write('zappa_settings.py', 'zappa_settings.py')
lambda_zip.close()
- os.unlink(temp.name)
+
+ os.unlink('zappa_settings.py')
# Upload it to S3
zip_arn = zappa.upload_to_s3(zip_path, s3_bucket_name)
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ with open(os.path.join(os.path.dirname(__file__), 'requirements.txt')) as f:
setup(
name='django-zappa',
- version='0.7.2',
+ version='0.7.3',
packages=['django_zappa'],
install_requires=required,
include_package_data=True, | <I> - tempfile was acting funny | Miserlou_django-zappa | train |
21d511c1971b2c83afbf29127447d29725eee95a | diff --git a/beakerx/beakerx/install.py b/beakerx/beakerx/install.py
index <HASH>..<HASH> 100644
--- a/beakerx/beakerx/install.py
+++ b/beakerx/beakerx/install.py
@@ -26,6 +26,7 @@ import tempfile
from string import Template
from jupyter_client.kernelspecapp import KernelSpecManager
+from jupyter_core import paths
from traitlets.config.manager import BaseJSONConfigManager
from distutils import log
@@ -127,6 +128,11 @@ def _install_magics():
file.write("c.InteractiveShellApp.extensions = ['beakerx.groovy_magic']")
file.close()
+def _set_conf_privileges():
+ config_path = os.path.join(paths.jupyter_config_dir(), 'beakerx.json')
+ if pathlib.Path(config_path).exists():
+ os.chmod(config_path, 0o600)
+
def _pretty(it):
return json.dumps(it, indent=2)
@@ -187,6 +193,7 @@ def _install_beakerx(args):
_copy_icons()
_install_kernelspec_manager(args.prefix)
_install_magics()
+ _set_conf_privileges()
def install(): | #<I> beaker config is moved to .bak by beakerx-install (#<I>)
* #<I> beaker config is moved to .bak by beakerx-install
* #<I> changing beaker.json privileges on beakerx-install | twosigma_beakerx | train |
534bedc848c3eb1ed35035455c7adad56ff17d9f | diff --git a/springfox-swagger-ui/src/web/js/springfox.js b/springfox-swagger-ui/src/web/js/springfox.js
index <HASH>..<HASH> 100644
--- a/springfox-swagger-ui/src/web/js/springfox.js
+++ b/springfox-swagger-ui/src/web/js/springfox.js
@@ -16,6 +16,7 @@ $(function() {
}
};
window.springfox = springfox;
+ window.oAuthRedirectUrl = springfox.baseUrl() + '/o2c.html'
$('#select_baseUrl').change(function() {
window.swaggerUi.headerView.trigger('update-swagger-ui', { | Added the oAuthRedirectUrl location relative to the baseUrl
fixes #<I> | springfox_springfox | train |
b3ae959c76666fbe81fecd859db9b81df8960fa4 | diff --git a/src/buildfile.js b/src/buildfile.js
index <HASH>..<HASH> 100644
--- a/src/buildfile.js
+++ b/src/buildfile.js
@@ -3,11 +3,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
-const { createModuleDescription } = require('./vs/base/buildfile');
-
-function entrypoint(name) {
- return [{ name: name, include: [], exclude: ['vs/css', 'vs/nls'] }];
-}
+const { createModuleDescription, createEditorWorkerModuleDescription } = require('./vs/base/buildfile');
exports.base = [{
name: 'vs/base/common/worker/simpleWorker',
@@ -17,9 +13,9 @@ exports.base = [{
dest: 'vs/base/worker/workerMain.js'
}];
-exports.workerExtensionHost = [createModuleDescription('vs/workbench/services/extensions/worker/extensionHostWorker')];
-exports.workerNotebook = [createModuleDescription('vs/workbench/contrib/notebook/common/services/notebookSimpleWorker')];
-exports.workerLanguageDetection = [createModuleDescription('vs/workbench/services/languageDetection/browser/languageDetectionSimpleWorker')];
+exports.workerExtensionHost = [createEditorWorkerModuleDescription('vs/workbench/services/extensions/worker/extensionHostWorker')];
+exports.workerNotebook = [createEditorWorkerModuleDescription('vs/workbench/contrib/notebook/common/services/notebookSimpleWorker')];
+exports.workerLanguageDetection = [createEditorWorkerModuleDescription('vs/workbench/services/languageDetection/browser/languageDetectionSimpleWorker')];
exports.workbenchDesktop = require('./vs/workbench/buildfile.desktop').collectModules();
exports.workbenchWeb = require('./vs/workbench/buildfile.web').collectModules(); | Avoid duplicating modules in worker bundles | Microsoft_vscode | train |
a7763704db2db8528f3d3052dbc9a0c4ff0a1b27 | diff --git a/test/freeze-html.js b/test/freeze-html.js
index <HASH>..<HASH> 100644
--- a/test/freeze-html.js
+++ b/test/freeze-html.js
@@ -98,6 +98,28 @@ describe('freeze-html', function() {
}
})
.fail(cb);
+ });
+
+ it('process as string should process ' + test.name, function(cb) {
+
+ // proccess it
+ BORSCHIK
+ .api({
+ 'freeze': true,
+ 'inputString': test.in,
+ 'basePath': PATH.dirname(fakeFile),
+ 'minimize': true,
+ 'tech': 'html'
+ })
+ .then(function(result) {
+ try {
+ ASSERT.equal(result, test.out);
+ cb();
+ } catch(e) {
+ cb(e);
+ }
+ })
+ .fail(cb);
})
});
diff --git a/test/freeze-inline.js b/test/freeze-inline.js
index <HASH>..<HASH> 100644
--- a/test/freeze-inline.js
+++ b/test/freeze-inline.js
@@ -95,6 +95,28 @@ describe('freeze-inline:', function() {
.fail(cb);
});
+ it('process as string ' + test.name, function(cb) {
+
+ // proccess it
+ BORSCHIK
+ .api({
+ 'comments': false,
+ 'freeze': true,
+ 'inputString': FS.readFileSync(input, 'utf-8'),
+ 'basePath': basePath,
+ 'minimize': false,
+ 'tech': ext.replace('.', '')
+ })
+ .then(function(result) {
+ ASSERT.equal(
+ result,
+ FS.readFileSync(expect, 'utf-8')
+ );
+ cb();
+ }, cb)
+ .fail(cb);
+ });
+
});
} | Borschik should support to process strings as input or ouput #<I>
add more tests | borschik_borschik | train |
4733bbecbba7042aff6af60972111b6f87d858fc | diff --git a/config.js b/config.js
index <HASH>..<HASH> 100644
--- a/config.js
+++ b/config.js
@@ -16,8 +16,8 @@ config.history = {
config.tradingAdvisor = {
enabled: true,
- method: 'Exponential Moving Average Crossovers',
- methodSlug: 'EMA',
+ method: 'moving average convergence divergence',
+ methodSlug: 'MACD',
candleSize: 2
}
diff --git a/core/candleManager.js b/core/candleManager.js
index <HASH>..<HASH> 100644
--- a/core/candleManager.js
+++ b/core/candleManager.js
@@ -92,7 +92,7 @@ Manager.prototype.processHistory = function(history) {
} else {
// we have complete history
log.info('Full history available');
- this.emit('prepared', history);
+ this.emit('history', history);
}
}
}
diff --git a/gekko.js b/gekko.js
index <HASH>..<HASH> 100644
--- a/gekko.js
+++ b/gekko.js
@@ -142,7 +142,7 @@ var watchFeeds = function(next) {
if(actor.processTrade)
market.on('trade', actor.processTrade);
if(actor.init)
- advisor.on('history', actor.init);
+ market.on('history', actor.init);
}
if(_.contains(subscriptions, 'advice feed')) {
diff --git a/methods/moving-average-convergence-divergence.js b/methods/moving-average-convergence-divergence.js
index <HASH>..<HASH> 100644
--- a/methods/moving-average-convergence-divergence.js
+++ b/methods/moving-average-convergence-divergence.js
@@ -41,13 +41,13 @@ var Util = require('util');
var EventEmitter = require('events').EventEmitter;
Util.inherits(TradingMethod, EventEmitter);
-TradingMethod.prototype.init = function (history) {
+TradingMethod.prototype.init = function(history) {
_.each(history.candles, function (candle) {
this.calculateEMAs(candle);
}, this);
this.lastCandle = _.last(history.candles);
-
+ this.log();
this.calculateAdvice();
}
@@ -62,7 +62,6 @@ TradingMethod.prototype.update = function (candle) {
// add a price and calculate the EMAs and
// the diff for that price
TradingMethod.prototype.calculateEMAs = function (candle) {
-
_.each(['short', 'long'], function (type) {
this.ema[type].update(candle.p);
}, this);
@@ -74,19 +73,11 @@ TradingMethod.prototype.calculateEMAs = function (candle) {
// for debugging purposes: log the last calculated
// EMAs and diff.
TradingMethod.prototype.log = function () {
-
- // Use local MACD debug flag rather than global.
-
- if(settings.debug) {
- log.info('calced EMA properties for candle:');
- _.each(['short', 'long', 'signal'], function (e) {
- if(config.watch.exchange === 'cexio')
- log.info('\t', e, 'ema', this.ema[e].result.toFixed(8));
- else
- log.info('\t', e, 'ema', this.ema[e].result.toFixed(3));
- }, this);
- log.info('\t diff', this.diff.toFixed(4));
- }
+ log.debug('calced EMA properties for candle:');
+ _.each(['short', 'long', 'signal'], function (e) {
+ log.debug('\t', e, 'ema', this.ema[e].result.toFixed(8));
+ }, this);
+ log.debug('\t macd', this.diff.toFixed(4));
}
@@ -101,8 +92,9 @@ TradingMethod.prototype.calculateAdvice = function () {
// @ cexio we need to be more precise due to low prices
// and low margins on trade. All others use 3 digits.
+
var digits = 3;
- if(config.watch.exchange === 'cexio')
+ if(config.normal.exchange === 'cexio')
digits = 8;
var macd = this.diff.toFixed(3),
@@ -119,7 +111,7 @@ TradingMethod.prototype.calculateAdvice = function () {
if(typeof price === 'string')
price = parseFloat(price);
- if(config.watch.exchange !== 'cexio')
+ if(config.normal.exchange !== 'cexio')
price = price.toFixed(3);
var message = '@ P:' + price + ' (L:' + long + ', S:' + short + ', M:' + macd + ', s:' + signal + ', D:' + macddiff + ')';
@@ -152,20 +144,19 @@ TradingMethod.prototype.calculateAdvice = function () {
else {
this.currentTrend = 'down';
this.advice('short');
- if(settings.verbose) log.info('advice - SELL' + message);
+ // if(settings.verbose) log.info('advice - SELL' + message);
this.trendDuration = 1;
}
} else
this.advice();
message = message + ', DT: ' + this.trendDuration;
} else {
- if(settings.debug) log.info('we are currently not in an up or down trend', message);
+ // if(settings.debug) log.info('we are currently not in an up or down trend', message);
this.advice();
// Trend has ended so reset counter
this.trendDuration = 1;
message = message + ', NT: ' + this.trendDuration;
}
- if(settings.verbose) log.info('MACD ' + message);
} | history is propogated to trading methods again | askmike_gekko | train |
1a7266cd6fb3e5ff92cab4918fbe8d7af805968c | diff --git a/asyncio_xmpp/node.py b/asyncio_xmpp/node.py
index <HASH>..<HASH> 100644
--- a/asyncio_xmpp/node.py
+++ b/asyncio_xmpp/node.py
@@ -358,7 +358,7 @@ class Client:
# not available.
# both stops us from continuing, let’s put a policy violation on the
# stream and let it bubble up.
- self.xmlstream.stream_error(
+ self._xmlstream.stream_error(
"policy-violation",
str(err),
custom_error="{{{}}}tls-failure".format(namespaces.asyncio_xmpp)
@@ -368,7 +368,7 @@ class Client:
# special form of SASL error telling us that SASL failed due to
# mismatch of our and the servers preferences. we let the server
# know about that and re-raise
- self.xmlstream.stream_error(
+ self._xmlstream.stream_error(
"policy-violation",
str(err),
custom_error="{{{}}}sasl-failure".format(
@@ -378,14 +378,14 @@ class Client:
except errors.SASLFailure as err:
# other, generic SASL failure. this can be an issue e.g. with SCRAM,
# if the server replies with an odd value
- self.xmlstream.stream_error(
+ self._xmlstream.stream_error(
"undefined-condition",
str(err),
custom_error="{{{}}}sasl-failure".format(namespaces.asyncio_xmpp)
)
raise
except Exception as err:
- self.xmlstream.stream_error("internal-server-error")
+ self._xmlstream.stream_error("internal-server-error")
raise
self._stanza_broker.start().add_done_callback(
diff --git a/tests/mocks.py b/tests/mocks.py
index <HASH>..<HASH> 100644
--- a/tests/mocks.py
+++ b/tests/mocks.py
@@ -370,5 +370,5 @@ class TestableClient(node.Client):
self.__mocked_stream.mock_receive_node(self.__initial_node)
self.__mocked_stream.on_connection_lost = \
self._handle_xmlstream_connection_lost
- self.xmlstream = self.__mocked_stream
+ self._xmlstream = self.__mocked_stream
return self.__mocked_transport, self.__mocked_stream | Consistently use Client._xmlstream in tests
Also, fixes AttributeError in the real world. | horazont_aioxmpp | train |
470ffd45354a67d583bca9262184ee5f97c50a85 | diff --git a/grpc/src/main/java/com/linecorp/armeria/server/grpc/GrpcDecoratingService.java b/grpc/src/main/java/com/linecorp/armeria/server/grpc/GrpcDecoratingService.java
index <HASH>..<HASH> 100644
--- a/grpc/src/main/java/com/linecorp/armeria/server/grpc/GrpcDecoratingService.java
+++ b/grpc/src/main/java/com/linecorp/armeria/server/grpc/GrpcDecoratingService.java
@@ -95,6 +95,11 @@ final class GrpcDecoratingService extends SimpleDecoratingHttpService implements
}
@Override
+ public Map<Route, ServerMethodDefinition<?, ?>> methodsByRoute() {
+ return delegate.methodsByRoute();
+ }
+
+ @Override
public Set<SerializationFormat> supportedSerializationFormats() {
return delegate.supportedSerializationFormats();
} | Override methodsByRoute in GrpcDecoratingService
Motivation
`methodsByRoute` method is added to `GrpcService` interface in #<I>.
`GrpcDecoratingService` which is added in #<I> should also override the method but it doesn't
Modification:
- Override `methodsByRoute` in `GrpcDecoratingService`
Result:
- Build passes. | line_armeria | train |
5805324a177a8805b93d12acd766570142dad6ca | diff --git a/src/builder/server.js b/src/builder/server.js
index <HASH>..<HASH> 100644
--- a/src/builder/server.js
+++ b/src/builder/server.js
@@ -204,7 +204,7 @@ export default (options = {}) => {
throw err
}
- if (port !== devHost) {
+ if (port !== devPort) {
log(`Port ${ devPort } is not available. Using port ${ port } instead.`)
}
server.listen(port, devHost, (err) => { | fix: Wrong log message when port is avaiable (#<I>) | phenomic_phenomic | train |
781340ce449961bfdf3a8d67754d00fe0999c87e | diff --git a/src/main/java/nl/hsac/fitnesse/fixture/slim/web/SeleniumDriverSetup.java b/src/main/java/nl/hsac/fitnesse/fixture/slim/web/SeleniumDriverSetup.java
index <HASH>..<HASH> 100644
--- a/src/main/java/nl/hsac/fitnesse/fixture/slim/web/SeleniumDriverSetup.java
+++ b/src/main/java/nl/hsac/fitnesse/fixture/slim/web/SeleniumDriverSetup.java
@@ -12,6 +12,7 @@ import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.edge.EdgeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
+import org.openqa.selenium.firefox.FirefoxOptions;
import org.openqa.selenium.firefox.FirefoxProfile;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.phantomjs.PhantomJSDriver;
@@ -75,7 +76,8 @@ public class SeleniumDriverSetup extends SlimFixture {
if ("firefoxdriver".equalsIgnoreCase(driverClass.getSimpleName())) {
FirefoxProfile fxProfile = getFirefoxProfile(profile);
- driver = new FirefoxDriver(fxProfile);
+ FirefoxOptions options = new FirefoxOptions().setProfile(fxProfile);
+ driver = new FirefoxDriver(options);
}
else if("chromedriver".equalsIgnoreCase(driverClass.getSimpleName())) {
DesiredCapabilities capabilities = getChromeMobileCapabilities(profile); | Remove usage of deprecated constructor | fhoeben_hsac-fitnesse-fixtures | train |
2c64d0dc7baff5b3ff9c2e83eb32545f7da34856 | diff --git a/src/components/networked-video-source.js b/src/components/networked-video-source.js
index <HASH>..<HASH> 100644
--- a/src/components/networked-video-source.js
+++ b/src/components/networked-video-source.js
@@ -42,13 +42,19 @@ AFRAME.registerComponent('networked-video-source', {
if (newStream) {
this.video.srcObject = newStream;
- var playResult = this.video.play();
+ const playResult = this.video.play();
if (playResult instanceof Promise) {
playResult.catch((e) => naf.log.error(`Error play video stream`, e));
}
+ if (this.videoTexture) {
+ this.videoTexture.dispose();
+ }
+
+ this.videoTexture = new THREE.VideoTexture(this.video);
+
const mesh = this.el.getObject3D('mesh');
- mesh.material.map = new THREE.VideoTexture(this.video);
+ mesh.material.map = this.videoTexture;
mesh.material.needsUpdate = true;
}
@@ -57,31 +63,35 @@ AFRAME.registerComponent('networked-video-source', {
},
_clearMediaStream() {
- if (this.video) {
- this.video.srcObject = null;
- this.video = null;
- this.stream = null;
+
+ this.stream = null;
+
+ if (this.videoTexture) {
+
+ if (this.videoTexture.image instanceof HTMLVideoElement) {
+ // Note: this.videoTexture.image === this.video
+ const video = this.videoTexture.image;
+ video.pause();
+ video.srcObject = null;
+ video.load();
+ }
+
+ this.videoTexture.dispose();
+ this.videoTexture = null;
}
},
remove: function() {
- if (!this.videoTexture) return;
-
- if (this.stream) {
- this._clearMediaStream();
- }
+ this._clearMediaStream();
},
setupVideo: function() {
- var el = this.el;
-
if (!this.video) {
- var video = document.createElement('video');
+ const video = document.createElement('video');
video.setAttribute('autoplay', true);
video.setAttribute('playsinline', true);
video.setAttribute('muted', true);
+ this.video = video;
}
-
- this.video = video;
}
});
\ No newline at end of file | improve networked-video-source component videoTexture cleanup and reuse video | networked-aframe_networked-aframe | train |
d81cdc989a212f4430228dba6a66687a13c9364d | diff --git a/hawkular-wildfly-agent/src/main/java/org/hawkular/agent/monitor/service/MonitorService.java b/hawkular-wildfly-agent/src/main/java/org/hawkular/agent/monitor/service/MonitorService.java
index <HASH>..<HASH> 100644
--- a/hawkular-wildfly-agent/src/main/java/org/hawkular/agent/monitor/service/MonitorService.java
+++ b/hawkular-wildfly-agent/src/main/java/org/hawkular/agent/monitor/service/MonitorService.java
@@ -21,6 +21,7 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
+import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
@@ -106,6 +107,7 @@ import org.jboss.msc.value.InjectedValue;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
+import com.fasterxml.jackson.databind.ObjectMapper;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.Response;
@@ -909,8 +911,13 @@ public class MonitorService implements Service<MonitorService> {
if (response.code() != 200) {
log.debugf("Hawkular Metrics is not ready yet: %d/%s", response.code(), response.message());
} else {
- log.debugf("Hawkular Metrics is ready: %s", response.body().string());
- break;
+ String bodyString = response.body().string();
+ if (checkReallyUp(bodyString)) {
+ log.debugf("Hawkular Metrics is ready: %s", bodyString);
+ break;
+ } else {
+ log.debugf("Hawkular Metrics is still starting: %s", bodyString);
+ }
}
} catch (Exception e) {
log.debugf("Hawkular Metrics is not ready yet: %s", e.toString());
@@ -958,6 +965,26 @@ public class MonitorService implements Service<MonitorService> {
}
/**
+ * If the server returns a 200 OK, we still need to check the content if the server
+ * is really up. This is explained here: https://twitter.com/heiglandreas/status/801137903149654017
+ * @param bodyString String representation of the body
+ * @return true if it is really up, false otherwise (still starting).
+ */
+ private boolean checkReallyUp(String bodyString) {
+
+ ObjectMapper mapper = new ObjectMapper(); // We don't need it later
+ Map result = null;
+ try {
+ result = mapper.readValue(bodyString, Map.class);
+ } catch (IOException e) {
+ return false;
+ }
+ String status = (String) result.get("MetricsService");
+
+ return "STARTED".equals(status);
+ }
+
+ /**
* Registers the feed with the Hawkular system under the given tenant.
* Note, it is OK to re-register the same feed/tenant combinations.
* | If metrics returns <I> OK, we still need to check if it is still starting. (#<I>)
HWKAGENT-<I> | hawkular_hawkular-agent | train |
b4ba6a84b9e31612b8630e04f37aceb6debcbb1b | diff --git a/ui/fields/plupload.php b/ui/fields/plupload.php
index <HASH>..<HASH> 100644
--- a/ui/fields/plupload.php
+++ b/ui/fields/plupload.php
@@ -149,9 +149,9 @@
var html = tmpl(binding);
var list = $('#<?php echo esc_js( $css_id ); ?>-pods-files');
- list.append( html );
+ list.prepend( html );
- var items = list.find( 'li' ),
+ var items = list.find( 'ul.pods-file-meta' ),
itemCount = items.size();
if ( 0 < maxFiles || itemCount > maxFiles ) { | Fixed limiting code on plupload field so that new list items append correctly | pods-framework_pods | train |
3302812e98faa0076d1db9e3dd9bffc485ef9a75 | diff --git a/grandalf/utils.py b/grandalf/utils.py
index <HASH>..<HASH> 100644
--- a/grandalf/utils.py
+++ b/grandalf/utils.py
@@ -5,7 +5,7 @@
# published under GPLv2 license
from numpy import array,matrix,linalg
-from math import atan2,cos,sin
+from math import atan2,cos,sin,sqrt
#------------------------------------------------------------------------------
class Poset(object):
@@ -202,56 +202,41 @@ def intersectC(view, r, topt):
#------------------------------------------------------------------------------
-# setcontrols find the bezier control points associated to the rhs vector:
-def setcontrols(rhs):
- b=2.0
- n=len(rhs)
- x=range(n)
- t=range(n)
- x[0]=rhs[0]/b
- for i in range(1,n-1):
- t[i]=1./b
- b = 4.0-t[i]
- x[i] = (rhs[i]-x[i-1])/b
- t[n-1]=1./b
- b = 3.5-t[n-1]
- x[n-1]=(rhs[n-1]-x[n-2])/b
- for i in range(1,n):
- x[n-i-1] -= t[n-i]*x[n-i]
- return x
-
-
-#------------------------------------------------------------------------------
# setcurve returns the spline curve that path through the list of points P.
# The spline curve is a list of cubic bezier curves (nurbs) that have
# matching tangents at their extreme points.
-def setcurve(P):
- n = len(P)-1
- assert n>0
- if n==1:
- P0=P[0]
- P1=P[1]
- C1=((2*P0[0]+P1[0])/3.,(2*P0[1]+P1[1])/3.)
- C2=(2*C1[0]-P0[0],2*C1[1]-P0[1])
- return [[P0,C1,C2,P1]]
- else:
- rhs = range(n)
- rhs[0] = P[0][0]+2.*P[1][0]
- for i in range(1,n-1):
- rhs[i]=4*P[i][0]+2*P[i+1][0]
- rhs[n-1] = (8*P[n-1][0]+P[n][0])/2.
- x = setcontrols(rhs)
- rhs[0] = P[0][1]+2.*P[1][1]
- for i in range(1,n-1):
- rhs[i]=4*P[i][1]+2*P[i+1][1]
- rhs[n-1] = (8*P[n-1][1]+P[n][1])/2.
- y = setcontrols(rhs)
- C1 = zip(x,y)
- C2 = []
- for i in range(0,n-1):
- C2.append((2*P[i+1][0]-x[i+1],2*P[i+1][1]-y[i+1]))
- C2.append(((P[n][0]+x[n-1])/2.,(P[n][1]+y[n-1])/2.))
- splines=[]
- for i in range(n):
- splines.append([P[i],C1[i],C2[i],P[i+1]])
- return splines
+# The method considered here is taken from "The NURBS book" (Les A. Piegl,
+# Wayne Tiller, Springer, 1997) and implements a local interpolation rather
+# than a global interpolation.
+def setcurve(self,pts):
+ P = map(array,pts)
+ n = len(P)
+ assert n>=2
+ # tangent estimation
+ Q = [P[1]-P[0]]
+ T = [array((0.,1.))]
+ for k in xrange(1,n-1):
+ q = P[k+1]-P[k]
+ t = q/sqrt(q.dot(q))
+ Q.append(q)
+ T.append(t)
+ Q.append(P[n-1]-P[n-2])
+ T.append(array((0.,1.)))
+ splines=[]
+ for k in xrange(n-1):
+ t = T[k]+T[k+1]
+ a = 16. - (t.dot(t))
+ b = 12.*(Q[k].dot(t))
+ c = -36. * Q[k].dot(Q[k])
+ D = (b*b) - 4.*a*c
+ assert D>=0
+ sd = sqrt(D)
+ s1,s2 = (-b-sd)/(2.*a),(-b+sd)/(2.*a)
+ s = s2
+ if s1>=0: s=s1
+ C0 = tuple(P[k])
+ C1 = tuple(P[k] + (s/3.)*T[k])
+ C2 = tuple(P[k+1] -(s/3.)*T[k+1])
+ C3 = tuple(P[k+1])
+ splines.append([C0,C1,C2,C3])
+ return splines | change setcurve NURBS interpolation method | bdcht_grandalf | train |
3665801ba60bb04a28737fd7e79e8fa595ba99bc | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -31,17 +31,19 @@ function resolveFilenameOptimized(request, parent) {
}
function loadModuleList() {
- try {
- if (fs.existsSync(options.cacheFile)) {
- var readFileNameLookup = JSON.parse(fs.readFileSync(options.cacheFile, 'utf-8'));
- if ((!options.cacheKiller) || (readFileNameLookup._cacheKiller === options.cacheKiller))
- filenameLookup = readFileNameLookup;
- }
- else if (fs.existsSync(options.startupFile)) {
- var readFileNameLookup = JSON.parse(fs.readFileSync(options.startupFile, 'utf-8'));
+
+ function tryLoadingFile(file) {
+ if (fs.existsSync(file)) {
+ var readFileNameLookup = JSON.parse(fs.readFileSync(file, 'utf-8'));
if ((!options.cacheKiller) || (readFileNameLookup._cacheKiller === options.cacheKiller))
filenameLookup = readFileNameLookup;
+ return true;
}
+ return false;
+ }
+
+ try {
+ tryLoadingFile(options.cacheFile) || tryLoadingFile(options.startupFile);
}
catch (e) {
console.log(e); | small refactoring of load module list | wix_fast-boot | train |
de4e1e60b8886a4498046df5a7304813a811976e | diff --git a/pysos/sos_script.py b/pysos/sos_script.py
index <HASH>..<HASH> 100755
--- a/pysos/sos_script.py
+++ b/pysos/sos_script.py
@@ -394,8 +394,8 @@ class SoS_Workflow:
if item.isdigit():
# pipeline:100
all_steps[int(item)] = True
- elif '-' in item and item.count('-') == 1:
- l, u = item.split('-')
+ elif ':' in item and item.count(':') == 1:
+ l, u = item.split(':')
if (l and not l.isdigit()) or (u and not u.isdigit()) or \
(l and u and int(l) > int(u)):
raise ValueError('Invalid pipeline step item {}'.format(item))
@@ -1091,7 +1091,7 @@ for __n, __v in {}.items():
combined_wf.name = workflow_name
return combined_wf
# if a single workflow
- # workflow_10-15 etc
+ # workflow_10:15 etc
mo = SOS_SUBWORKFLOW.match(workflow_name)
if not mo:
raise ValueError('Incorrect workflow name {}'.format(workflow_name))
diff --git a/pysos/sos_syntax.py b/pysos/sos_syntax.py
index <HASH>..<HASH> 100644
--- a/pysos/sos_syntax.py
+++ b/pysos/sos_syntax.py
@@ -138,7 +138,7 @@ _SUBWORKFLOW_TMPL = '''
[a-zA-Z*] # cannot start with _ etc
([\w\d_]*?)) # can have _ and digit
(_(?P<steps> # index start from _
- [\d\s-]+))? # with - and digit
+ [\d\s:]+))? # with - and digit
\s*$ # end
'''
diff --git a/test/test_parser.py b/test/test_parser.py
index <HASH>..<HASH> 100644
--- a/test/test_parser.py
+++ b/test/test_parser.py
@@ -781,7 +781,7 @@ executed.append(step_name)
self.assertEqual(env.sos_dict['a'], 1)
self.assertEqual(env.sos_dict['input_b1'], ['out_a_4'])
#
- wf = script.workflow('a_ 1-2 + a_4 + b_3-')
+ wf = script.workflow('a_ 1:2 + a_4 + b_3:')
Base_Executor(wf).prepare()
self.assertEqual(env.sos_dict['executed'], ['a_1', 'a_2', 'a_4',
'b_3', 'b_4']) | Use x:y instread of x-y syntax for subworkflow | vatlab_SoS | train |
fba406e02f5731b201e28fb57fcb85f1838a65f9 | diff --git a/new_english/bin/restintercept.py b/new_english/bin/restintercept.py
index <HASH>..<HASH> 100755
--- a/new_english/bin/restintercept.py
+++ b/new_english/bin/restintercept.py
@@ -665,6 +665,14 @@ class JsonProxyRestHandler(splunk.rest.BaseRestHandler):
if atom_root:
timings.append(('app.xml_parse_start', time.time()))
root = et.fromstring(atom_root)
+
+ # service may return messages in the body; try to parse them
+ try:
+ msg = splunk.rest.extractMessages(root)
+ if msg:
+ messages.extend(msg)
+ except:
+ pass
timings.append(('app.odata_create_start', time.time()))
@@ -726,8 +734,12 @@ class JsonProxyRestHandler(splunk.rest.BaseRestHandler):
tmpEntity.data = {}
content_xpath = node.xpath('a:content', namespaces={'a': ATOM_NS})
if (len(content_xpath) > 0):
- content_node = content_xpath[0][0]
- tmpEntity.data = splunk.rest.format.nodeToPrimitive(content_node)
+ if (len(content_xpath[0]) > 0):
+ content_node = content_xpath[0][0]
+ tmpEntity.data = splunk.rest.format.nodeToPrimitive(content_node)
+ else:
+ logger.info(content_xpath[0].text)
+ tmpEntity.data = {"data": content_xpath[0].text}
# move the metadata around
if isinstance(tmpEntity.data, dict): | Add support for search/fields/* in new_english
Part of SPL-<I> | splunk_splunk-sdk-javascript | train |
dddba8f08ca71d8a8885338d859c78251b78e347 | diff --git a/smack-im/src/main/java/org/jivesoftware/smack/roster/Roster.java b/smack-im/src/main/java/org/jivesoftware/smack/roster/Roster.java
index <HASH>..<HASH> 100644
--- a/smack-im/src/main/java/org/jivesoftware/smack/roster/Roster.java
+++ b/smack-im/src/main/java/org/jivesoftware/smack/roster/Roster.java
@@ -101,13 +101,13 @@ public final class Roster extends Manager {
* Returns the roster for the user.
* <p>
* This method will never return <code>null</code>, instead if the user has not yet logged into
- * the server or is logged in anonymously all modifying methods of the returned roster object
+ * the server all modifying methods of the returned roster object
* like {@link Roster#createEntry(Jid, String, String[])},
* {@link Roster#removeEntry(RosterEntry)} , etc. except adding or removing
* {@link RosterListener}s will throw an IllegalStateException.
+ * </p>
*
* @return the user's roster.
- * @throws IllegalStateException if the connection is anonymous
*/
public static synchronized Roster getInstanceFor(XMPPConnection connection) {
Roster roster = INSTANCES.get(connection);
@@ -213,11 +213,6 @@ public final class Roster extends Manager {
@Override
public void authenticated(XMPPConnection connection, boolean resumed) {
- // Anonymous users can't have a roster, but it is possible that a Roster instance is
- // retrieved if getRoster() is called *before* connect(). So we have to check here
- // again if it's an anonymous connection.
- if (connection.isAnonymous())
- return;
if (!isRosterLoadedAtLogin())
return;
// We are done here if the connection was resumed
@@ -296,9 +291,6 @@ public final class Roster extends Manager {
if (!connection.isAuthenticated()) {
throw new NotLoggedInException();
}
- if (connection.isAnonymous()) {
- throw new IllegalStateException("Anonymous users can't have a roster.");
- }
RosterPacket packet = new RosterPacket();
if (rosterStore != null && isRosterVersioningSupported()) {
@@ -438,13 +430,9 @@ public final class Roster extends Manager {
*
* @param name the name of the group.
* @return a new group, or null if the group already exists
- * @throws IllegalStateException if logged in anonymously
*/
public RosterGroup createGroup(String name) {
final XMPPConnection connection = connection();
- if (connection.isAnonymous()) {
- throw new IllegalStateException("Anonymous users can't have a roster.");
- }
if (groups.containsKey(name)) {
return groups.get(name);
}
@@ -473,9 +461,6 @@ public final class Roster extends Manager {
if (!connection.isAuthenticated()) {
throw new NotLoggedInException();
}
- if (connection.isAnonymous()) {
- throw new IllegalStateException("Anonymous users can't have a roster.");
- }
// Create and send roster entry creation packet.
RosterPacket rosterPacket = new RosterPacket();
@@ -551,9 +536,6 @@ public final class Roster extends Manager {
if (!connection.isAuthenticated()) {
throw new NotLoggedInException();
}
- if (connection.isAnonymous()) {
- throw new IllegalStateException("Anonymous users can't have a roster.");
- }
return connection.hasFeature(SubscriptionPreApproval.ELEMENT, SubscriptionPreApproval.NAMESPACE);
}
@@ -570,16 +552,12 @@ public final class Roster extends Manager {
* @throws NoResponseException SmackException if there was no response from the server.
* @throws NotConnectedException
* @throws InterruptedException
- * @throws IllegalStateException if connection is not logged in or logged in anonymously
*/
public void removeEntry(RosterEntry entry) throws NotLoggedInException, NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException {
final XMPPConnection connection = connection();
if (!connection.isAuthenticated()) {
throw new NotLoggedInException();
}
- if (connection.isAnonymous()) {
- throw new IllegalStateException("Anonymous users can't have a roster.");
- }
// Only remove the entry if it's in the entry list.
// The actual removal logic takes place in RosterPacketListenerprocess>>Packet(Packet) | Allow Roster for anonymous connections
Fixes SMACK-<I>. | igniterealtime_Smack | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.