hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
4191ef3ebfb1f8a5e4697c05f82aa0da1e9648c1 | diff --git a/api-v2.go b/api-v2.go
index <HASH>..<HASH> 100644
--- a/api-v2.go
+++ b/api-v2.go
@@ -25,6 +25,7 @@ import (
"path/filepath"
"runtime"
"sort"
+ "strconv"
"strings"
"time"
)
@@ -289,7 +290,7 @@ var maxParts = int64(10000)
// maxPartSize - unexported right now
var maxPartSize int64 = 1024 * 1024 * 1024 * 5
-// GetPartSize - calculate the optimal part size for the given objectSize
+// calculatePartSize - calculate the optimal part size for the given objectSize
//
// NOTE: Assumption here is that for any given object upload to a S3 compatible object
// storage it will have the following parameters as constants
@@ -303,17 +304,16 @@ var maxPartSize int64 = 1024 * 1024 * 1024 * 5
//
// special case where it happens to be that partSize is indeed bigger than the
// maximum part size just return maxPartSize back
-func getPartSize(objectSize int64) int64 {
- partSize := (objectSize / (maxParts - 1)) // make sure last part has enough buffer and handle this poperly
- {
- if partSize > minimumPartSize {
- if partSize > maxPartSize {
- return maxPartSize
- }
- return partSize
+func calculatePartSize(objectSize int64) int64 {
+ // make sure last part has enough buffer and handle this poperly
+ partSize := (objectSize / (maxParts - 1))
+ if partSize > minimumPartSize {
+ if partSize > maxPartSize {
+ return maxPartSize
}
- return minimumPartSize
+ return partSize
}
+ return minimumPartSize
}
func (a apiV2) newObjectUpload(bucket, object, contentType string, size int64, data io.Reader) error {
@@ -324,17 +324,20 @@ func (a apiV2) newObjectUpload(bucket, object, contentType string, size int64, d
uploadID := initiateMultipartUploadResult.UploadID
completeMultipartUpload := completeMultipartUpload{}
var totalLength int64
- for part := range chopper(data, getPartSize(size), nil) {
+
+ partSize := calculatePartSize(size)
+ for part := range chopper(data, partSize, nil) {
if part.Err != nil {
return part.Err
}
// This check is primarily for last part
// This verifies if the part.Len was an unexpected read i.e if we lost few bytes
- if part.Len < getPartSize(size) {
- if (size - totalLength) != part.Len {
+ if part.Len < partSize {
+ expectedPartLen := size - totalLength
+ if expectedPartLen != part.Len {
return ErrorResponse{
- Code: "MethodUnexpectedEOF",
- Message: "Data read is less than the requested size",
+ Code: "UnexpectedShortRead",
+ Message: "Data read ‘" + strconv.FormatInt(expectedPartLen, 10) + "’ is less than the expected size ‘" + strconv.FormatInt(part.Len, 10) + "’",
Resource: separator + bucket + separator + object,
}
}
@@ -424,17 +427,19 @@ func (a apiV2) continueObjectUpload(bucket, object, uploadID string, size int64,
partNumber: part.Metadata.PartNumber,
})
}
- for part := range chopper(data, getPartSize(size), skipParts) {
+ partSize := calculatePartSize(size)
+ for part := range chopper(data, partSize, skipParts) {
if part.Err != nil {
return part.Err
}
// This check is primarily for last part
// This verifies if the part.Len was an unexpected read i.e if we lost few bytes
- if part.Len < getPartSize(size) {
- if (size - totalLength) != part.Len {
+ if part.Len < partSize {
+ expectedPartLen := size - totalLength
+ if expectedPartLen != part.Len {
return ErrorResponse{
- Code: "MethodUnexpectedEOF",
- Message: "Data read is less than the requested size",
+ Code: "UnexpectedShortRead",
+ Message: "Data read ‘" + strconv.FormatInt(expectedPartLen, 10) + "’ is less than the expected size ‘" + strconv.FormatInt(part.Len, 10) + "’",
Resource: separator + bucket + separator + object,
}
}
diff --git a/api_private_test.go b/api_private_test.go
index <HASH>..<HASH> 100644
--- a/api_private_test.go
+++ b/api_private_test.go
@@ -63,13 +63,13 @@ func TestGetRegion(t *testing.T) {
func TestPartSize(t *testing.T) {
var maxPartSize int64 = 1024 * 1024 * 1024 * 5
- partSize := getPartSize(5000000000000000000)
+ partSize := calculatePartSize(5000000000000000000)
if partSize > minimumPartSize {
if partSize > maxPartSize {
t.Fatal("invalid result, cannot be bigger than maxPartSize 5GB")
}
}
- partSize = getPartSize(50000000000)
+ partSize = calculatePartSize(50000000000)
if partSize > minimumPartSize {
t.Fatal("invalid result, cannot be bigger than minimumPartSize 5MB")
} | Rename MethodUnexpectedEOF to UnexpectedShortRead | minio_minio-go | train |
d7d0569ac364922e22e2c263fdda47706c26971b | diff --git a/lib/trello/action.rb b/lib/trello/action.rb
index <HASH>..<HASH> 100644
--- a/lib/trello/action.rb
+++ b/lib/trello/action.rb
@@ -26,7 +26,7 @@ module Trello
def search(query, opts={})
response = client.get("/search/", { query: query }.merge(opts))
- JSON.parse(response).except("options").each_with_object({}) do |(key, data), result|
+ parse_json(response).except("options").each_with_object({}) do |(key, data), result|
klass = "Trello::#{key.singularize.capitalize}".constantize
result[key] = klass.from_json(data)
end | Use parse_json util method in search method | jeremytregunna_ruby-trello | train |
c9d7c0f1c7a34a81bd00444867a5b02a22440dd5 | diff --git a/client/test/order.js b/client/test/order.js
index <HASH>..<HASH> 100644
--- a/client/test/order.js
+++ b/client/test/order.js
@@ -29,7 +29,7 @@ const orderSuite = window.orderSuite = (getData, getTestData) => () => {
// Let's try ordering by a different field.
it('can order results by a field other than id', assertCompletes(() =>
data.order('b').fetch().toArray()
- .do(res => assert.deepEqual(res.slice(3), [
+ .do(res => assert.deepEqual(res.slice(0, 3), [
{ id: 2, a: 20, b: 1 },
{ id: 3, a: 20, b: 2 },
{ id: 4, a: 20, b: 3 },
@@ -47,9 +47,9 @@ const orderSuite = window.orderSuite = (getData, getTestData) => () => {
))
// Let's try to order by a missing field
- it('returns in arbitrary order if a bad field is given', assertCompletes(() =>
+ it('returns no documents if a bad field is given', assertCompletes(() =>
data.order('abracadabra').fetch().toArray()
- .do(res => assert.sameDeepMembers(res, testData))
+ .do(res => assert.sameDeepMembers(res, []))
))
// We can pass multiple fields to `order` to disambiguate. | Fix tests to match <I> server | rethinkdb_horizon | train |
d3fb962f60d3c6f8ecbe336c307036f9ebf32031 | diff --git a/src/Resources/Response.php b/src/Resources/Response.php
index <HASH>..<HASH> 100644
--- a/src/Resources/Response.php
+++ b/src/Resources/Response.php
@@ -4,7 +4,7 @@ use Closure;
use Illuminate\Http\RedirectResponse;
use Illuminate\Http\JsonResponse;
use Illuminate\Http\Response as IlluminateResponse;
-use Orchestra\Facile\Response as FacileResponse;
+use Orchestra\Facile\Container as FacileContainer;
use Symfony\Component\HttpKernel\Exception\HttpException;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
@@ -25,7 +25,7 @@ class Response
return new IlluminateResponse($content, 200);
} elseif ($content instanceof RedirectResponse || $content instanceof JsonResponse) {
return $content;
- } elseif ($content instanceof FacileResponse) {
+ } elseif ($content instanceof FacileContainer) {
return $content->render();
} elseif ($content instanceof IlluminateResponse) {
return $this->handleIlluminateResponse($content, $callback);
@@ -101,7 +101,7 @@ class Response
*/
protected function isRenderableResponse($response)
{
- return $response instanceof FacileResponse && $response->getFormat() !== 'html';
+ return $response instanceof FacileContainer && $response->getFormat() !== 'html';
}
/**
diff --git a/tests/ResponseTest.php b/tests/ResponseTest.php
index <HASH>..<HASH> 100644
--- a/tests/ResponseTest.php
+++ b/tests/ResponseTest.php
@@ -82,7 +82,7 @@ class ResponseTest extends \PHPUnit_Framework_TestCase
/**
* Test Orchestra\Resources\Response::call() method when given
- * Orchestra\Facile\Response.
+ * Orchestra\Facile\Container.
*
* @test
*/
@@ -90,7 +90,7 @@ class ResponseTest extends \PHPUnit_Framework_TestCase
{
$stub = new Response;
- $content = m::mock('\Orchestra\Facile\Response');
+ $content = m::mock('\Orchestra\Facile\Container');
$content->shouldReceive('render')->once()->andReturn('foo');
$this->assertEquals('foo', $stub->call($content));
}
@@ -119,7 +119,7 @@ class ResponseTest extends \PHPUnit_Framework_TestCase
$content = m::mock('\Illuminate\Http\Response');
$content->headers = $headers = m::mock('HeaderBag');
- $facile = m::mock('\Orchestra\Facile\Response');
+ $facile = m::mock('\Orchestra\Facile\Container');
$facile->shouldReceive('getFormat')->andReturn('json')
->shouldReceive('render')->once()->andReturn('foo');
$content->shouldReceive('getStatusCode')->once()->andReturn(200) | Update Facile response class name. | orchestral_resources | train |
0c5c55d519f7321a056c5359c8636fd4c934444d | diff --git a/metrique/server/drivers/basesql.py b/metrique/server/drivers/basesql.py
index <HASH>..<HASH> 100644
--- a/metrique/server/drivers/basesql.py
+++ b/metrique/server/drivers/basesql.py
@@ -153,6 +153,7 @@ def _extract_func(cube, **kwargs):
raise ValueError("row_limit must be a number")
sql_where = []
+ sql_groupby = ''
_sql = c.get_field_property('sql', field)
if not _sql:
sql = 'SELECT %s, %s.%s FROM %s' % (
@@ -160,16 +161,26 @@ def _extract_func(cube, **kwargs):
else:
sql = 'SELECT %s, %s FROM ' % (table_column, _sql[0])
_from = [db_table]
+
+ # FIXME: THIS IS UGLY! use a dict... or sqlalchemy
if _sql[1]:
_from.extend(_sql[1])
sql += ', '.join(_from)
sql += ' '
+
if _sql[2]:
sql += ' '.join(_sql[2])
sql += ' '
+
if _sql[3]:
sql_where.append('(%s)' % ' OR '.join(_sql[3]))
+ try:
+ if _sql[4]:
+ sql_groupby = _sql[4]
+ except IndexError:
+ pass
+
delta_filter = []
delta_filter_sql = None
@@ -222,6 +233,9 @@ def _extract_func(cube, **kwargs):
if sql_where:
sql += ' WHERE %s ' % ' AND '.join(sql_where)
+ if sql_groupby:
+ sql += ' GROUP BY %s ' % sql_groupby
+
if c.get_field_property('sort', field, True):
sql += " ORDER BY %s ASC" % table_column
diff --git a/metrique/server/etl.py b/metrique/server/etl.py
index <HASH>..<HASH> 100644
--- a/metrique/server/etl.py
+++ b/metrique/server/etl.py
@@ -477,9 +477,10 @@ def last_known_warehouse_mtime(cube, field=None, value=None):
# we need to check the etl_activity collection
if value:
spec = {'cube': cube, field: value}
+ doc = c.c_etl_activity.find_one(spec, ['%s.mtime' % field])
else:
spec = {'cube': cube, field: {'$exists': True}}
- doc = c.c_etl_activity.find_one(spec, ['%s.mtime' % field])
+ doc = _cube.find_one(spec, ['%s._mtime' % field])
if doc:
start = doc[field]['mtime']
else: | add basesql groupby; fix last_known_warehouse_mtime | kejbaly2_metrique | train |
a3b1e3ec141483a17e140c274a6e1bc82d054129 | diff --git a/src/Table/Column/ForeignUser.php b/src/Table/Column/ForeignUser.php
index <HASH>..<HASH> 100644
--- a/src/Table/Column/ForeignUser.php
+++ b/src/Table/Column/ForeignUser.php
@@ -102,7 +102,7 @@ class ForeignUser extends BaseColumn implements Foreign, DeleteConstrainable {
* @inheritDoc
*/
public function convert_raw_to_value( $raw ) {
- return get_user_by( $this->key, $raw );
+ return get_user_by( $this->key, $raw ) ?: null;
}
/** | ForeignUser should return , not false, if user does not exist | iron-bound-designs_IronBound-DB | train |
7030b5802e67a8442df9a2bd866c48c871fccd29 | diff --git a/gremlin-python/src/main/jython/gremlin_python/structure/io/graphbinaryV1.py b/gremlin-python/src/main/jython/gremlin_python/structure/io/graphbinaryV1.py
index <HASH>..<HASH> 100644
--- a/gremlin-python/src/main/jython/gremlin_python/structure/io/graphbinaryV1.py
+++ b/gremlin-python/src/main/jython/gremlin_python/structure/io/graphbinaryV1.py
@@ -49,6 +49,7 @@ _deserializers = {}
class DataType(Enum):
+ null = 0xfe
int = 0x01
long = 0x02
string = 0x03
@@ -62,6 +63,8 @@ class DataType(Enum):
set = 0x0b
uuid = 0x0c
edge = 0x0d
+ path = 0x0e
+ property = 0x0f
class GraphBinaryTypeType(type):
@@ -392,8 +395,8 @@ class EdgeIO(_GraphBinaryTypeIO):
ba.extend(cls.string_as_bytes(obj.inV.label))
ba.extend(writer.writeObject(obj.outV.id))
ba.extend(cls.string_as_bytes(obj.outV.label))
- ba.extend([0xfe])
- ba.extend([0xfe])
+ ba.extend([DataType.null.value])
+ ba.extend([DataType.null.value])
return ba
@classmethod
@@ -404,3 +407,40 @@ class EdgeIO(_GraphBinaryTypeIO):
edgelbl, Vertex(reader.readObject(b), cls.read_string(b)))
b.read(2)
return edge
+
+
+class PathIO(_GraphBinaryTypeIO):
+
+ python_type = Path
+ graphbinary_type = DataType.path
+
+ @classmethod
+ def dictify(cls, obj, writer):
+ ba = bytearray([cls.graphbinary_type.value])
+ ba.extend(writer.writeObject(obj.labels))
+ ba.extend(writer.writeObject(obj.objects))
+ return ba
+
+ @classmethod
+ def objectify(cls, b, reader):
+ return Path(reader.readObject(b), reader.readObject(b))
+
+
+class PropertyIO(_GraphBinaryTypeIO):
+
+ python_type = Property
+ graphbinary_type = DataType.property
+
+ @classmethod
+ def dictify(cls, obj, writer):
+ ba = bytearray([cls.graphbinary_type.value])
+ ba.extend(cls.string_as_bytes(obj.key))
+ ba.extend(writer.writeObject(obj.value))
+ ba.extend([DataType.null.value])
+ return ba
+
+ @classmethod
+ def objectify(cls, b, reader):
+ p = Property(cls.read_string(b), reader.readObject(b), None)
+ b.read(1)
+ return p
diff --git a/gremlin-python/src/main/jython/tests/structure/io/test_graphbinaryV1.py b/gremlin-python/src/main/jython/tests/structure/io/test_graphbinaryV1.py
index <HASH>..<HASH> 100644
--- a/gremlin-python/src/main/jython/tests/structure/io/test_graphbinaryV1.py
+++ b/gremlin-python/src/main/jython/tests/structure/io/test_graphbinaryV1.py
@@ -129,3 +129,13 @@ class TestGraphSONWriter(object):
x = Edge(123, Vertex(1, 'person'), "developed", Vertex(10, "software"))
output = self.graphbinary_reader.readObject(self.graphbinary_writer.writeObject(x))
assert x == output
+
+ def test_path(self):
+ x = Path(["x", "y", "z"], [1, 2, 3])
+ output = self.graphbinary_reader.readObject(self.graphbinary_writer.writeObject(x))
+ assert x == output
+
+ def test_property(self):
+ x = Property("name", "stephen", None)
+ output = self.graphbinary_reader.readObject(self.graphbinary_writer.writeObject(x))
+ assert x == output | Added property/path to python graphbinary | apache_tinkerpop | train |
fc53ea7de8100a5385cb091e854b40686da97180 | diff --git a/richtextfx/src/main/java/org/fxmisc/richtext/CodeArea.java b/richtextfx/src/main/java/org/fxmisc/richtext/CodeArea.java
index <HASH>..<HASH> 100644
--- a/richtextfx/src/main/java/org/fxmisc/richtext/CodeArea.java
+++ b/richtextfx/src/main/java/org/fxmisc/richtext/CodeArea.java
@@ -29,6 +29,8 @@ public class CodeArea extends StyleClassedTextArea {
this();
appendText(text);
+ getUndoManager().forgetHistory();
+ getUndoManager().mark();
// position the caret at the beginning
selectRange(0, 0);
diff --git a/richtextfx/src/main/java/org/fxmisc/richtext/InlineCssTextArea.java b/richtextfx/src/main/java/org/fxmisc/richtext/InlineCssTextArea.java
index <HASH>..<HASH> 100644
--- a/richtextfx/src/main/java/org/fxmisc/richtext/InlineCssTextArea.java
+++ b/richtextfx/src/main/java/org/fxmisc/richtext/InlineCssTextArea.java
@@ -19,6 +19,8 @@ public class InlineCssTextArea extends InlineStyleTextArea<String> {
this();
replaceText(0, 0, text);
+ getUndoManager().forgetHistory();
+ getUndoManager().mark();
// position the caret at the beginning
selectRange(0, 0); | Forget undo history in constructors taking content | FXMisc_RichTextFX | train |
4413dd7362f73ce4c9e7216fd3c9f6cd92a64a47 | diff --git a/drools-core/src/test/java/org/drools/examples/manners/ReteooMannersTest.java b/drools-core/src/test/java/org/drools/examples/manners/ReteooMannersTest.java
index <HASH>..<HASH> 100644
--- a/drools-core/src/test/java/org/drools/examples/manners/ReteooMannersTest.java
+++ b/drools-core/src/test/java/org/drools/examples/manners/ReteooMannersTest.java
@@ -53,30 +53,30 @@ public class ReteooMannersTest extends BaseMannersTest {
ruleBase.addPackage( this.pkg );
WorkingMemory workingMemory = ruleBase.newWorkingMemory();
- workingMemory.addEventListener( new DefaultAgendaEventListener() {
- public void activationCreated(ActivationCreatedEvent event) {
- super.activationCreated( event );
- System.out.println( event );
- }
-
- public void activationCancelled(ActivationCancelledEvent event) {
- super.activationCancelled( event );
- System.out.println( event );
- }
-
- public void beforeActivationFired(BeforeActivationFiredEvent event) {
- super.beforeActivationFired( event );
- System.out.println( event );
- }
-
- public void afterActivationFired(AfterActivationFiredEvent event) {
- super.afterActivationFired( event );
- System.out.println( event );
- }
-
- });
+// workingMemory.addEventListener( new DefaultAgendaEventListener() {
+// public void activationCreated(ActivationCreatedEvent event) {
+// super.activationCreated( event );
+// System.out.println( event );
+// }
+//
+// public void activationCancelled(ActivationCancelledEvent event) {
+// super.activationCancelled( event );
+// System.out.println( event );
+// }
+//
+// public void beforeActivationFired(BeforeActivationFiredEvent event) {
+// super.beforeActivationFired( event );
+// System.out.println( event );
+// }
+//
+// public void afterActivationFired(AfterActivationFiredEvent event) {
+// super.afterActivationFired( event );
+// System.out.println( event );
+// }
+//
+// });
- InputStream is = getClass().getResourceAsStream( "/manners5.dat" );
+ InputStream is = getClass().getResourceAsStream( "/manners64.dat" );
List list = getInputObjects( is );
for ( Iterator it = list.iterator(); it.hasNext(); ) {
Object object = it.next(); | -Left in the comments that help with debugging.
git-svn-id: <URL> | kiegroup_drools | train |
06c06493d58bdadefe03e6538eefc1d010bf488f | diff --git a/ags_publishing_tools/api.py b/ags_publishing_tools/api.py
index <HASH>..<HASH> 100644
--- a/ags_publishing_tools/api.py
+++ b/ags_publishing_tools/api.py
@@ -47,7 +47,7 @@ class Api:
else:
request = urllib2.Request(url)
request.get_method = lambda: method
- response = urllib2.urlopen(request, json.dumps(params))
+ response = urllib2.urlopen(request, json.dumps(encoded_params))
# request.add_header('Content-Type', 'application/json')
response_text = response.read() | Encodes post params
BPFG-<I> | lobsteropteryx_slap | train |
d967ec74d3527ba23f08a752d725865c6139cb98 | diff --git a/django_uwsgi/views.py b/django_uwsgi/views.py
index <HASH>..<HASH> 100644
--- a/django_uwsgi/views.py
+++ b/django_uwsgi/views.py
@@ -17,7 +17,7 @@ class UwsgiStatus(TemplateView):
'''
uWSGI Status View
'''
- if apps.is_installed('wagtail.wagtailadmin'):
+ if apps.is_installed('wagtail.admin'):
template_name = 'uwsgi/wagtail_uwsgi.html'
else:
template_name = 'uwsgi/uwsgi.html' | wagtail.wagtailadmin > wagtail.admin | unbit_django-uwsgi | train |
e995989765b3388bdecd1c9b0cbeea717f0023ec | diff --git a/lib/tabula/writers.rb b/lib/tabula/writers.rb
index <HASH>..<HASH> 100644
--- a/lib/tabula/writers.rb
+++ b/lib/tabula/writers.rb
@@ -16,7 +16,7 @@ module Tabula
def Writers.TSV(lines, output=$stdout)
tsv_string = lines.each { |l|
- output.write(l.map(&:text).join("\t") + '\n')
+ output.write(l.map(&:text).join("\t") + "\n")
}
end | minor bugfix: use doublequotes on \n in TSV writer
so it comes out as a line return, not as a literal slash n | tabulapdf_tabula-extractor | train |
3531f361b68aa61b53754449327a739b1f035cd6 | diff --git a/niworkflows/viz/utils.py b/niworkflows/viz/utils.py
index <HASH>..<HASH> 100644
--- a/niworkflows/viz/utils.py
+++ b/niworkflows/viz/utils.py
@@ -548,11 +548,10 @@ def plot_melodic_components(melodic_dir, in_file, tr=None,
gs = GridSpec(n_rows * 2, 9,
width_ratios=[1, 1, 1, 4, 0.001, 1, 1, 1, 4, ],
height_ratios=[1.1, 1] * n_rows)
-
- if noise_components.size == n_components:
- fig.suptitle("WARNING: ALL COMPONENTS CLASSIFIED AS NOISE", color='r')
- elif noise_components is None or noise_components.size == 0:
+ if noise_components is None or noise_components.size == 0:
fig.suptitle("WARNING: NO COMPONENTS CLASSIFIED AS NOISE", color='r')
+ elif noise_components.size == n_components:
+ fig.suptitle("WARNING: ALL COMPONENTS CLASSIFIED AS NOISE", color='r')
for i, img in enumerate(
iter_img(os.path.join(melodic_dir, "melodic_IC.nii.gz"))): | re-order if conditional | poldracklab_niworkflows | train |
ec71830c7aab6f5fdc3f461db1e906adeb7f3c0b | diff --git a/lib/hmac/strategies/query.rb b/lib/hmac/strategies/query.rb
index <HASH>..<HASH> 100644
--- a/lib/hmac/strategies/query.rb
+++ b/lib/hmac/strategies/query.rb
@@ -15,12 +15,19 @@ module Warden
# @return [Bool] true if all required authentication information is available in the request
# @see https://github.com/hassox/warden/wiki/Strategies
def valid?
- valid = auth_info.include? "signature"
+ valid = has_signature?
valid = valid && has_timestamp? if check_ttl?
valid = valid && has_nonce? if nonce_required?
valid
end
-
+
+ # Checks that the request contains a signature
+ #
+ # @return [Bool] true if the request contains a signature
+ def has_signature?
+ auth_info.include? "signature"
+ end
+
# Check that the signature given in the request is valid.
#
# @return [Bool] true if the request is valid | refactored valid?, extracted a check for the signature in the params so that the check can be changed withouth overwriting all of valid? | Asquera_warden-hmac-authentication | train |
39dc7741766563069bb42a79015e3843331a91d2 | diff --git a/ceph_deploy/hosts/debian/install.py b/ceph_deploy/hosts/debian/install.py
index <HASH>..<HASH> 100644
--- a/ceph_deploy/hosts/debian/install.py
+++ b/ceph_deploy/hosts/debian/install.py
@@ -3,11 +3,17 @@ from urlparse import urlparse
from ceph_deploy.lib import remoto
from ceph_deploy.util import pkg_managers
from ceph_deploy.util.paths import gpg
+from ceph_deploy.hosts.common import map_components
+
+
+NON_SPLIT_COMPONENTS = ['ceph-osd', 'ceph-mon']
def install(distro, version_kind, version, adjust_repos, **kw):
- # note: when split packages for ceph land for Debian/Ubuntu,
- # `kw['components']` will have those. Unused for now.
+ packages = map_components(
+ NON_SPLIT_COMPONENTS,
+ kw.pop('components', [])
+ )
codename = distro.codename
machine = distro.machine_type
@@ -81,9 +87,8 @@ def install(distro, version_kind, version, adjust_repos, **kw):
)
# TODO this does not downgrade -- should it?
- remoto.process.run(
- distro.conn,
- [
+ if len(packages):
+ cmd = [
'env',
'DEBIAN_FRONTEND=noninteractive',
'DEBIAN_PRIORITY=critical',
@@ -94,18 +99,19 @@ def install(distro, version_kind, version, adjust_repos, **kw):
'--assume-yes',
'install',
'--',
- 'ceph',
- 'ceph-mds',
- 'ceph-common',
- 'ceph-fs-common',
- 'radosgw',
- ],
+ ]
+ cmd.extend(packages)
+ remoto.process.run(
+ distro.conn,
+ cmd
)
def mirror_install(distro, repo_url, gpg_url, adjust_repos, **kw):
- # note: when split packages for ceph land for Debian/Ubuntu,
- # `kw['components']` will have those. Unused for now.
+ packages = map_components(
+ NON_SPLIT_COMPONENTS,
+ kw.pop('components', [])
+ )
repo_url = repo_url.strip('/') # Remove trailing slashes
gpg_path = gpg_url.split('file://')[-1]
@@ -139,23 +145,14 @@ def mirror_install(distro, repo_url, gpg_url, adjust_repos, **kw):
distro.conn.remote_module.write_sources_list(repo_url, distro.codename)
pkg_managers.apt_update(distro.conn)
- packages = (
- 'ceph',
- 'ceph-mds',
- 'ceph-common',
- 'ceph-fs-common',
- )
-
pkg_managers.apt(distro.conn, packages)
- pkg_managers.apt(distro.conn, 'ceph')
def repo_install(distro, repo_name, baseurl, gpgkey, **kw):
- # do we have specific components to install?
- # removed them from `kw` so that we don't mess with other defaults
- # note: when split packages for ceph land for Debian/Ubuntu, `packages`
- # can be used. Unused for now.
- packages = kw.pop('components', [])
+ packages = map_components(
+ NON_SPLIT_COMPONENTS,
+ kw.pop('components', [])
+ )
# Get some defaults
safe_filename = '%s.list' % repo_name.replace(' ', '-')
install_ceph = kw.pop('install_ceph', False)
@@ -196,13 +193,4 @@ def repo_install(distro, repo_name, baseurl, gpgkey, **kw):
pkg_managers.apt_update(distro.conn)
if install_ceph:
- # Before any install, make sure we have `wget`
- packages = (
- 'ceph',
- 'ceph-mds',
- 'ceph-common',
- 'ceph-fs-common',
- )
-
pkg_managers.apt(distro.conn, packages)
- pkg_managers.apt(distro.conn, 'ceph') | [RM-<I>] debian: only install requested packages
Refs: #<I> | ceph_ceph-deploy | train |
bddcb143b162644957a8ab0f62aa4864a1732ea0 | diff --git a/cas-server-3.4.2/cas-server-core/src/main/java/org/jasig/cas/web/view/Saml10FailureResponseView.java b/cas-server-3.4.2/cas-server-core/src/main/java/org/jasig/cas/web/view/Saml10FailureResponseView.java
index <HASH>..<HASH> 100644
--- a/cas-server-3.4.2/cas-server-core/src/main/java/org/jasig/cas/web/view/Saml10FailureResponseView.java
+++ b/cas-server-3.4.2/cas-server-core/src/main/java/org/jasig/cas/web/view/Saml10FailureResponseView.java
@@ -39,9 +39,10 @@ public class Saml10FailureResponseView extends AbstractCasView {
final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
final WebApplicationService service = this.samlArgumentExtractor.extractService(request);
+ final String artifactId = service != null ? service.getArtifactId() : null;
+ final String serviceId = service != null ? service.getId() : "UNKNOWN";
final String errorMessage = (String) model.get("description");
-
- final SAMLResponse samlResponse = new SAMLResponse(service.getArtifactId(), service.getId(), new ArrayList<Object>(), new SAMLException(errorMessage));
+ final SAMLResponse samlResponse = new SAMLResponse(artifactId, serviceId, new ArrayList<Object>(), new SAMLException(errorMessage));
samlResponse.setIssueInstant(new Date());
response.setContentType("text/xml; charset=" + this.encoding); | CAS-<I>
Ensure service parameter is not null, which would be the case when TARGET
parameter is not supplied. | apereo_cas | train |
d71ff3b6c2de280600934f9c234a9f631ee4a790 | diff --git a/src/faker/date.js b/src/faker/date.js
index <HASH>..<HASH> 100644
--- a/src/faker/date.js
+++ b/src/faker/date.js
@@ -1,4 +1,7 @@
export function between(from, to) {
+ from = getDateObject(from);
+ to = getDateObject(to);
+
const fromMilli = Date.parse(from);
const toMilli = Date.parse(to);
const offset = Math.floor(Math.random() * (toMilli - fromMilli));
@@ -8,6 +11,10 @@ export function between(from, to) {
}
export function betweenExcept(from, to, except) {
+ from = getDateObject(from);
+ to = getDateObject(to);
+ except = getDateObject(except);
+
let date = between(from, to);
while (datesAreEqual(date, except)) {
date = between(from, to);
@@ -33,6 +40,14 @@ export function birthday(minAge=18, maxAge=65) {
return new Date();
}
+function getDateObject(date) {
+ if (typeof(date) == 'string') {
+ return new Date(Date.parse(date));
+ }
+ date.setHours(0, 0, 0, 0);
+ return date;
+}
+
function datesAreEqual(date1, date2) {
return date1.getYear() == date2.getYear() &&
date1.getMonth() == date2.getMonth() &&
diff --git a/test/faker/date.spec.js b/test/faker/date.spec.js
index <HASH>..<HASH> 100644
--- a/test/faker/date.spec.js
+++ b/test/faker/date.spec.js
@@ -6,11 +6,26 @@ describe('#Date', () => {
it('should return a Date', () => {
expect(DateFaker.between(new Date(), new Date())).to.be.a('Date');
});
+
+ it('should handle string dates', () => {
+ const from = new Date(2017, 0, 1);
+ const to = new Date(2017, 0, 10);
+ expect(DateFaker.between('2017-01-01', '2017-01-10')).to.be.within(from, to);
+ });
});
describe('#betweenExcept', () => {
it('should return a Date', () => {
- expect(DateFaker.between(new Date(), new Date(), new Date())).to.be.a('Date');
+ const from = new Date(2017, 0, 1);
+ const to = new Date(2017, 0, 10);
+ const except = new Date(2017, 0, 5);
+ expect(DateFaker.betweenExcept(from, to, except)).to.be.a('Date');
+ });
+
+ it('should handle string dates', () => {
+ const from = new Date(2017, 0, 1);
+ const to = new Date(2017, 0, 10);
+ expect(DateFaker.betweenExcept('2017-01-01', '2017-01-10', '2017-01-04')).to.be.within(from, to);
});
}); | Wrapping dates with getDateObject so it can use strings and date objects. Added tests to check that it is working. | mrstebo_fakergem | train |
69995658431836e13d2f9a1a63d99816c2af3b66 | diff --git a/api/pool.go b/api/pool.go
index <HASH>..<HASH> 100644
--- a/api/pool.go
+++ b/api/pool.go
@@ -111,6 +111,13 @@ func addPoolHandler(w http.ResponseWriter, r *http.Request, t auth.Token) error
return err
}
+// title: remove pool
+// path: /pools/{name}
+// method: DELETE
+// responses:
+// 200: Pool removed
+// 401: Unauthorized
+// 404: Pool not found
func removePoolHandler(w http.ResponseWriter, r *http.Request, t auth.Token) error {
allowed := permission.Check(t, permission.PermPoolDelete)
if !allowed { | api/pools: add comments to describe pool remove | tsuru_tsuru | train |
867b536ed4558cf6eb61aee0dcba70283bafa1e2 | diff --git a/lib/constants/formats.js b/lib/constants/formats.js
index <HASH>..<HASH> 100644
--- a/lib/constants/formats.js
+++ b/lib/constants/formats.js
@@ -4,9 +4,35 @@
* @description Exports different types of formatting for {@link Date#format}.
*/
+import Super from '../Super';
import Str from '../String';
const zero = new Str('0');
+const daysOfTheWeekNames = [
+ 'Sunday',
+ 'Monday',
+ 'Tuesday',
+ 'Wednesday',
+ 'Thursday',
+ 'Friday',
+ 'Saturday'
+];
+const daysOfTheWeekAliases = new Super(daysOfTheWeekNames).map((value) => value.slice(0, 3)).$;
+const monthsNames = [
+ 'January',
+ 'February',
+ 'March',
+ 'April',
+ 'May',
+ 'June',
+ 'Jule',
+ 'August',
+ 'September',
+ 'October',
+ 'November',
+ 'December'
+];
+const monthsAliases = new Super(monthsNames).map((value) => value.slice(0, 3)).$;
/**
* @callback module:constants/formats~matchCallback
@@ -31,7 +57,7 @@ export default [
},
{
format: 'c',
- match: (date, utc) => cut(date[utc]('c'), 3, 1).replace(/^0\./, '')
+ match: (date, utc) => date[utc]('c')
},
{
format: 'ss',
@@ -39,7 +65,7 @@ export default [
},
{
format: 's',
- match: (date, utc) => String(date[utc]('s'))
+ match: (date, utc) => date[utc]('s')
},
{
format: 'mm',
@@ -47,7 +73,7 @@ export default [
},
{
format: 'm',
- match: (date, utc) => String(date[utc]('m'))
+ match: (date, utc) => date[utc]('m')
},
{
format: 'hh',
@@ -55,15 +81,15 @@ export default [
},
{
format: 'h',
- match: (date, utc) => String(date[utc]('h'))
+ match: (date, utc) => date[utc]('h')
},
{
format: 'dddd',
- match: (date, utc) => date[utc]('dwn')
+ match: (date, utc) => daysOfTheWeekAliases[date[utc]('dw')]
},
{
format: 'ddd',
- match: (date, utc) => date[utc]('dwa')
+ match: (date, utc) => daysOfTheWeekNames[date[utc]('dw')]
},
{
format: 'dd',
@@ -71,15 +97,15 @@ export default [
},
{
format: 'd',
- match: (date, utc) => String(date[utc]('d'))
+ match: (date, utc) => date[utc]('d')
},
{
format: 'MMMM',
- match: (date, utc) => date[utc]('Mn')
+ match: (date, utc) => monthsAliases[date[utc]('M')]
},
{
format: 'MMM',
- match: (date, utc) => date[utc]('Ma')
+ match: (date, utc) => monthsNames[date[utc]('M')]
},
{
format: 'MM',
@@ -87,7 +113,7 @@ export default [
},
{
format: 'M',
- match: (date, utc) => String(date[utc]('M'))
+ match: (date, utc) => date[utc]('M')
},
{
format: 'yyyy',
@@ -95,7 +121,7 @@ export default [
},
{
format: 'yy',
- match: (date, utc) => String(date[utc]('y')).slice(2)
+ match: (date, utc) => String(date[utc]('y')).slice(-2)
},
{
format: 'y',
@@ -117,15 +143,3 @@ function round(number, digits) {
return zero.repeat(zeroes).$ + string;
}
-
-/**
- * @function cut
- * @private
- * @param {Number} number - Number to cut.
- * @param {Number} max - Number of digits in number.
- * @param {Number} digits - Number of the digits of the output.
- * @returns {string} Cut input.
- */
-function cut(number, max, digits) {
- return (number / Math.pow(10, max)).toFixed(digits);
-} | constants.formats: helper constants moved to the module. | dwaynejs_dwayne | train |
896dedfd374200b7db49596766be0e2f4384c7bd | diff --git a/lib/jazzy/assets/css/jazzy.css.scss b/lib/jazzy/assets/css/jazzy.css.scss
index <HASH>..<HASH> 100644
--- a/lib/jazzy/assets/css/jazzy.css.scss
+++ b/lib/jazzy/assets/css/jazzy.css.scss
@@ -311,7 +311,7 @@ header {
padding-left: 3px;
margin-left: 15px;
}
- .has_default_implementation {
+ .declaration-note {
font-size: .85em;
color: rgba(128,128,128,1);
font-style: italic;
diff --git a/lib/jazzy/doc_builder.rb b/lib/jazzy/doc_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/jazzy/doc_builder.rb
+++ b/lib/jazzy/doc_builder.rb
@@ -243,6 +243,7 @@ module Jazzy
gh_token_url = gh_token_url(item, source_module)
item_render[:github_token_url] = gh_token_url
item_render[:default_impl_abstract] = Jazzy.markdown.render(item.default_impl_abstract) if item.default_impl_abstract
+ item_render[:merged_from_protocol_extension] = item.merged_from_protocol_extension
item_render[:return] = Jazzy.markdown.render(item.return) if item.return
item_render[:parameters] = item.parameters if item.parameters.any?
item_render[:url] = item.url if item.children.any?
diff --git a/lib/jazzy/source_declaration.rb b/lib/jazzy/source_declaration.rb
index <HASH>..<HASH> 100644
--- a/lib/jazzy/source_declaration.rb
+++ b/lib/jazzy/source_declaration.rb
@@ -15,6 +15,7 @@ module Jazzy
attr_accessor :declaration
attr_accessor :abstract
attr_accessor :default_impl_abstract
+ attr_accessor :merged_from_protocol_extension
attr_accessor :discussion
attr_accessor :return
attr_accessor :children
diff --git a/lib/jazzy/sourcekitten.rb b/lib/jazzy/sourcekitten.rb
index <HASH>..<HASH> 100644
--- a/lib/jazzy/sourcekitten.rb
+++ b/lib/jazzy/sourcekitten.rb
@@ -341,10 +341,9 @@ module Jazzy
merge_default_implementations_into_protocol(typedecl, extensions)
extensions.reject! { |ext| ext.children.empty? }
- ext_mark = SourceMark.new('- Extension Members')
extensions.each do |ext|
ext.children.each do |ext_member|
- ext_member.mark = ext_mark
+ ext_member.merged_from_protocol_extension = true
end
end
end
diff --git a/lib/jazzy/templates/task.mustache b/lib/jazzy/templates/task.mustache
index <HASH>..<HASH> 100755
--- a/lib/jazzy/templates/task.mustache
+++ b/lib/jazzy/templates/task.mustache
@@ -18,10 +18,15 @@
<a class="token" href="#/{{usr}}">{{name}}</a>
</code>
{{#default_impl_abstract}}
- <span class="has_default_implementation">
+ <span class="declaration-note">
Default implementation
</span>
{{/default_impl_abstract}}
+ {{#merged_from_protocol_extension}}
+ <span class="declaration-note">
+ Extension method
+ </span>
+ {{/merged_from_protocol_extension}}
</div>
<div class="height-container">
<div class="pointer-container"></div> | Declaration note instead of generated mark for protocol extension members | realm_jazzy | train |
47ce7ad302c42fb1c68feac94e596c4941435469 | diff --git a/storage/metric/leveldb.go b/storage/metric/leveldb.go
index <HASH>..<HASH> 100644
--- a/storage/metric/leveldb.go
+++ b/storage/metric/leveldb.go
@@ -540,7 +540,6 @@ func (l *LevelDBMetricPersistence) AppendSamples(samples model.Samples) (err err
var (
fingerprintToSamples = groupByFingerprint(samples)
indexErrChan = make(chan error)
- doneCommitting sync.WaitGroup
)
go func(groups map[model.Fingerprint]model.Samples) {
@@ -555,55 +554,50 @@ func (l *LevelDBMetricPersistence) AppendSamples(samples model.Samples) (err err
indexErrChan <- l.indexMetrics(metrics)
}(fingerprintToSamples)
- go func() {
- doneCommitting.Add(1)
- samplesBatch := leveldb.NewBatch()
- defer samplesBatch.Close()
- defer doneCommitting.Done()
-
- for fingerprint, group := range fingerprintToSamples {
- for {
- lengthOfGroup := len(group)
+ samplesBatch := leveldb.NewBatch()
+ defer samplesBatch.Close()
- if lengthOfGroup == 0 {
- break
- }
+ for fingerprint, group := range fingerprintToSamples {
+ for {
+ lengthOfGroup := len(group)
- take := *leveldbChunkSize
- if lengthOfGroup < take {
- take = lengthOfGroup
- }
+ if lengthOfGroup == 0 {
+ break
+ }
- chunk := group[0:take]
- group = group[take:lengthOfGroup]
+ take := *leveldbChunkSize
+ if lengthOfGroup < take {
+ take = lengthOfGroup
+ }
- key := &dto.SampleKey{
- Fingerprint: fingerprint.ToDTO(),
- Timestamp: indexable.EncodeTime(chunk[0].Timestamp),
- LastTimestamp: proto.Int64(chunk[take-1].Timestamp.Unix()),
- SampleCount: proto.Uint32(uint32(take)),
- }
+ chunk := group[0:take]
+ group = group[take:lengthOfGroup]
- value := &dto.SampleValueSeries{}
- for _, sample := range chunk {
- value.Value = append(value.Value, &dto.SampleValueSeries_Value{
- Timestamp: proto.Int64(sample.Timestamp.Unix()),
- Value: proto.Float32(float32(sample.Value)),
- })
- }
+ key := &dto.SampleKey{
+ Fingerprint: fingerprint.ToDTO(),
+ Timestamp: indexable.EncodeTime(chunk[0].Timestamp),
+ LastTimestamp: proto.Int64(chunk[take-1].Timestamp.Unix()),
+ SampleCount: proto.Uint32(uint32(take)),
+ }
- samplesBatch.Put(coding.NewProtocolBufferEncoder(key), coding.NewProtocolBufferEncoder(value))
+ value := &dto.SampleValueSeries{}
+ for _, sample := range chunk {
+ value.Value = append(value.Value, &dto.SampleValueSeries_Value{
+ Timestamp: proto.Int64(sample.Timestamp.Unix()),
+ Value: proto.Float32(float32(sample.Value)),
+ })
}
+
+ samplesBatch.Put(coding.NewProtocolBufferEncoder(key), coding.NewProtocolBufferEncoder(value))
}
+ }
- err = l.metricSamples.Commit(samplesBatch)
+ err = l.metricSamples.Commit(samplesBatch)
- if err != nil {
- panic(err)
- }
- }()
+ if err != nil {
+ panic(err)
+ }
- doneCommitting.Wait()
err = <-indexErrChan
if err != nil {
panic(err) | Extract appending from goroutine. | prometheus_prometheus | train |
1e5163d2411c744c4738b3ef3aa774e95b7238e3 | diff --git a/command/agent/command.go b/command/agent/command.go
index <HASH>..<HASH> 100644
--- a/command/agent/command.go
+++ b/command/agent/command.go
@@ -1167,13 +1167,14 @@ WAIT:
// Agent is already shutdown!
return 0
}
- c.Ui.Output(fmt.Sprintf("Caught signal: %v", sig))
- // Skip SIGPIPE signals
+ // Skip SIGPIPE signals and skip logging whenever such signal is received as well
if sig == syscall.SIGPIPE {
goto WAIT
}
+ c.Ui.Output(fmt.Sprintf("Caught signal: %v", sig))
+
// Check if this is a SIGHUP
if sig == syscall.SIGHUP {
conf, err := c.handleReload(config) | Supress signal logging on SIGPIPE. Should address #<I>
When consul-template is communicating with consul and the job is done, consul thread receives SIGPIPE.
This cause the logs to be filled "Caught signal: broken pipe" and they does not bring any usefull info with them.
Skipping those. | hashicorp_consul | train |
ffb7dd7bdd4ea48335099acdf8fdc39e4afc2eaa | diff --git a/spec/models/model_spec.rb b/spec/models/model_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/models/model_spec.rb
+++ b/spec/models/model_spec.rb
@@ -540,5 +540,26 @@ describe Volt::Model do
expect(count).to eq(1)
end
+
+ it 'should query twice and return twice' do
+ store._items << {name: 'One'}
+ store._items << {name: 'Two'}
+
+ puts "I: #{store._items.inspect}"
+
+ puts "----------"
+ puts "ITEMS: " + store._items.fetch.sync.inspect
+ puts "---------"
+
+ a = store._items.fetch.sync
+ puts "---------"
+ b = store._items.fetch.sync
+
+ puts "A: #{a.inspect}"
+ expect(a.size).to eq(2)
+
+ puts "B"
+ expect(b.size).to eq(2)
+ end
end
end | Add failing spec for issue with model loading. | voltrb_volt | train |
6872ab8c3cf075a25cff60d668052042041b4bcb | diff --git a/bugwarrior/db.py b/bugwarrior/db.py
index <HASH>..<HASH> 100644
--- a/bugwarrior/db.py
+++ b/bugwarrior/db.py
@@ -359,7 +359,7 @@ def synchronize(issue_generator, conf, main_section, dry_run=False):
log.info("Adding %i tasks", len(issue_updates['new']))
for issue in issue_updates['new']:
log.info("Adding task %s%s",
- issue['description'].encode("utf-8"), notreally)
+ issue['description'], notreally)
if dry_run:
continue
if notify:
@@ -382,8 +382,8 @@ def synchronize(issue_generator, conf, main_section, dry_run=False):
])
log.info(
"Updating task %s, %s; %s%s",
- six.text_type(issue['uuid']).encode("utf-8"),
- issue['description'].encode("utf-8"),
+ six.text_type(issue['uuid']),
+ issue['description'],
changes,
notreally
)
@@ -401,7 +401,7 @@ def synchronize(issue_generator, conf, main_section, dry_run=False):
log.info(
"Completing task %s %s%s",
issue,
- task_info.get('description', '').encode('utf-8'),
+ task_info.get('description', ''),
notreally
)
if dry_run: | Fix some problems with unicode
Because of the import
from __future__ import unicode_literals
We can pass all the arguments as unicode and let the logging library
take care of the encoding. | ralphbean_bugwarrior | train |
aa443831256b0fb018082a54c7fe89314c346812 | diff --git a/src/ClientManager.php b/src/ClientManager.php
index <HASH>..<HASH> 100644
--- a/src/ClientManager.php
+++ b/src/ClientManager.php
@@ -3,6 +3,7 @@
namespace ElfSundae\BearyChat\Laravel;
use Closure;
+use Illuminate\Support\Arr;
use ElfSundae\BearyChat\Client;
class ClientManager
@@ -15,6 +16,27 @@ class ClientManager
protected $app;
/**
+ * The default client name.
+ *
+ * @var string
+ */
+ protected $defaultName;
+
+ /**
+ * The defaults for all clients.
+ *
+ * @var array
+ */
+ protected $clientsDefaults = [];
+
+ /**
+ * The clients config.
+ *
+ * @var array
+ */
+ protected $clientsConfig = [];
+
+ /**
* The array of resolved BearyChat clients.
*
* @var array
@@ -29,13 +51,6 @@ class ClientManager
protected $httpClientCreator;
/**
- * Indicate whether the application version is Laravel 4.
- *
- * @var bool
- */
- protected $isLaravel4 = false;
-
- /**
* Create a new client manager instance.
*
* @param mixed $app
@@ -43,22 +58,81 @@ class ClientManager
public function __construct($app)
{
$this->app = $app;
+ }
+
+ /**
+ * Get the default client name.
+ *
+ * @return string
+ */
+ public function getDefaultName()
+ {
+ return $this->defaultName ?: Arr::first(array_keys($this->clientsConfig));
+ }
- $appVersion = method_exists($app, 'version') ? $app->version() : $app::VERSION;
+ /**
+ * Set the default client name.
+ *
+ * @param string $name
+ * @return $this
+ */
+ public function setDefaultName($name)
+ {
+ $this->defaultName = $name;
- $this->isLaravel4 = (int) $appVersion == 4;
+ return $this;
+ }
+
+ /**
+ * Get the clients defaults.
+ *
+ * @return array
+ */
+ public function getClientsDefaults()
+ {
+ return $this->clientsDefaults;
+ }
+
+ /**
+ * Set the clients defaults.
+ *
+ * @param array $defaults
+ * @return $this
+ */
+ public function setClientsDefaults($defaults)
+ {
+ if (is_array($defaults)) {
+ $this->clientsDefaults = $defaults;
+ }
+
+ return $this;
+ }
+
+ /**
+ * Set the clients config.
+ *
+ * @param array $config
+ * @return $this
+ */
+ public function setClientsConfig($config)
+ {
+ if (is_array($config)) {
+ $this->clientsConfig = $config;
+ }
+
+ return $this;
}
/**
* Get a client instance.
*
- * @param string $name
+ * @param string|null $name
* @return \ElfSundae\BearyChat\Client
*/
public function client($name = null)
{
if (is_null($name)) {
- $name = $this->getConfig('default');
+ $name = $this->getDefaultName();
}
return $this->clients[$name] = $this->get($name);
@@ -83,28 +157,35 @@ class ClientManager
*/
protected function resolve($name)
{
- $config = $this->getConfig('clients.'.$name);
+ $config = $this->getConfigForClient($name);
return new Client(
$config['webhook'],
- isset($config['message_defaults']) ? $config['message_defaults'] : [],
+ $config['message_defaults'],
$this->getHttpClient($name)
);
}
/**
- * Get the BearyChat configuration.
+ * Get client config for the given client name.
*
* @param string $name
- * @return mixed
+ * @return array
*/
- protected function getConfig($name)
+ protected function getConfigForClient($name)
{
- if ($this->isLaravel4) {
- return $this->app['config']->get("bearychat::{$name}");
+ $config = $this->clientsConfig[$name];
+
+ if (empty($config['webhook'])) {
+ $config['webhook'] = Arr::get($this->clientsDefaults, 'webhook');
}
- return $this->app['config']["bearychat.{$name}"];
+ $config['message_defaults'] = array_merge(
+ Arr::get($this->clientsDefaults, 'message_defaults', []),
+ Arr::get($config, 'message_defaults', [])
+ );
+
+ return $config;
}
/**
diff --git a/src/ServiceProvider.php b/src/ServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/ServiceProvider.php
+++ b/src/ServiceProvider.php
@@ -79,7 +79,10 @@ class ServiceProvider extends LaravelServiceProvider
}
$this->app->singleton('bearychat', function ($app) {
- return new ClientManager($app);
+ return (new ClientManager($app))
+ ->setDefaultName($this->getConfig('default'))
+ ->setClientsDefaults($this->getConfig('clients_defaults'))
+ ->setClientsConfig($this->getConfig('clients'));
});
$this->app->alias('bearychat', ClientManager::class);
@@ -122,6 +125,20 @@ class ServiceProvider extends LaravelServiceProvider
}
/**
+ * Get the bearychat configuration.
+ *
+ * @param string $key
+ * @param mixed $default
+ * @return mixed
+ */
+ protected function getConfig($key, $default = null)
+ {
+ $prefix = 'bearychat'.($this->isLaravel4 ? '::' : '.');
+
+ return $this->app['config']->get($prefix.$key, $default);
+ }
+
+ /**
* Get the services provided by the provider.
*
* @return string[] | Move getting config to ServiceProvider | ElfSundae_laravel-bearychat | train |
c27ef26fcef3cf64e6f6533a54ed5a4c025ce556 | diff --git a/usbiss/__init__.py b/usbiss/__init__.py
index <HASH>..<HASH> 100644
--- a/usbiss/__init__.py
+++ b/usbiss/__init__.py
@@ -25,9 +25,8 @@ def iss_spi_divisor(sck):
class USBISS(object):
- def __init__(self, port, mode, **kwargs):
- self.mode = mode
- self.dummy_bytes = kwargs.get('dummy_bytes', 0)
+ def __init__(self, port, iss_mode, **kwargs):
+ self.iss_mode = iss_mode
# Open serial port
serial_opts = {"port": port,
@@ -42,14 +41,31 @@ class USBISS(object):
self.get_iss_info()
self.get_iss_serial_no()
- if self.mode == 'spi':
- clk_phase = kwargs.get('clk_phase', 0)
- if 0 <= clk_phase < 4:
- clk_phase = 0x90 + clk_phase
+ if self.iss_mode == 'spi':
+ # Select the SPI mode of USB-ISS's SPI operating mode
+ if 'spi_mode' in kwargs:
+ spi_mode = kwargs.get('spi_mode', 0)
+ if 0 <= spi_mode < 4:
+ # Expose the the SPI mode to external applications
+ # where self.mode is same as spidev.SpiDev.mode
+ if spi_mode == 0:
+ self.mode = 0
+ elif spi_mode == 1:
+ self.mode = 2
+ elif spi_mode == 2:
+ self.mode = 1
+ elif spi_mode == 3:
+ self.mode = 3
+ # Add signal for SPI switch
+ spi_mode = 0x90 + spi_mode
+ else:
+ error = ("The value of spi_mode, %s, is not "
+ "between 0 and 3" % (spi_mode))
+ raise ValueError(error)
else:
- error = ("The value of clk_phase, %s, is not "
- "between 0 and 3" % (clk_phase))
- raise ValueError(error)
+ raise TypeError("Missing argument for spi_mode for SPI"
+ "operating mode")
+ # Select frequency of USB-ISS's SPI operating mode
if 'freq' in kwargs:
freq = kwargs.get('freq')
sck_divisor = iss_spi_divisor(freq)
@@ -57,11 +73,11 @@ class USBISS(object):
error = "The value of sck_divisor, %s, is not between 0 and 255" % (sck_divisor)
raise ValueError(error)
else:
- raise TypeError("Missing argument for frequency for SPI mode")
- self.mode = 1
- set_bytes = [clk_phase, sck_divisor]
- msg = ("Initializing USB-ISS in SPI mode with %s clk_phase and %s "
- "sck_divisor" % (clk_phase, sck_divisor))
+ raise TypeError("Missing argument for frequency for SPI"
+ "operating mode")
+ set_bytes = [spi_mode, sck_divisor]
+ msg = ("Initializing USB-ISS in SPI mode with %s spi_mode and %s "
+ "sck_divisor" % (spi_mode, sck_divisor))
print(msg)
# Configure USB-ISS
@@ -121,6 +137,7 @@ class USBISS(object):
def xfer(self, data):
+ # spidev function for transferring bytes to port
self.serial.write(bytearray([0x61] + data))
response = self.serial.read(1 + len(data))
status = response[0] | Changed constructor arguments, expose mode for SPI applications and more
documentation | DancingQuanta_pyusbiss | train |
b4cc71e3a41afa38e237ff16294a0fe4389cc0b6 | diff --git a/Eloquent/Builder.php b/Eloquent/Builder.php
index <HASH>..<HASH> 100755
--- a/Eloquent/Builder.php
+++ b/Eloquent/Builder.php
@@ -1,6 +1,7 @@
<?php namespace Illuminate\Database\Eloquent;
use Closure;
+use Illuminate\Pagination\Paginator;
use Illuminate\Database\Query\Expression;
use Illuminate\Database\Eloquent\Relations\Relation;
use Illuminate\Database\Query\Builder as QueryBuilder;
@@ -226,6 +227,26 @@ class Builder {
}
/**
+ * Paginate the given query into a simple paginator.
+ *
+ * @param int $perPage
+ * @param array $columns
+ * @return \Illuminate\Contracts\Pagination\Paginator
+ */
+ public function paginate($perPage = null, $columns = ['*'])
+ {
+ $perPage = $perPage ?: $this->model->getPerPage();
+
+ $page = $page ?: Paginator::resolveCurrentPage();
+
+ $this->skip(($page - 1) * $perPage)->take($perPage + 1);
+
+ return new Paginator($this->get($columns)->all(), $page, $perPage, [
+ 'path' => Paginator::resolveCurrentPath()
+ ]);
+ }
+
+ /**
* Update a record in the database.
*
* @param array $values
diff --git a/Eloquent/Model.php b/Eloquent/Model.php
index <HASH>..<HASH> 100755
--- a/Eloquent/Model.php
+++ b/Eloquent/Model.php
@@ -48,6 +48,13 @@ abstract class Model implements ArrayAccess, Arrayable, Jsonable, JsonSerializab
protected $primaryKey = 'id';
/**
+ * The number of models to return for pagination.
+ *
+ * @var int
+ */
+ protected $perPage = 15;
+
+ /**
* Indicates if the IDs are auto-incrementing.
*
* @var bool
@@ -2011,6 +2018,27 @@ abstract class Model implements ArrayAccess, Arrayable, Jsonable, JsonSerializab
}
/**
+ * Get the number of models to return per page.
+ *
+ * @return int
+ */
+ public function getPerPage()
+ {
+ return $this->perPage;
+ }
+
+ /**
+ * Set the number of models to return per page.
+ *
+ * @param int $perPage
+ * @return void
+ */
+ public function setPerPage($perPage)
+ {
+ $this->perPage = $perPage;
+ }
+
+ /**
* Get the default foreign key name for the model.
*
* @return string
diff --git a/Query/Builder.php b/Query/Builder.php
index <HASH>..<HASH> 100755
--- a/Query/Builder.php
+++ b/Query/Builder.php
@@ -2,6 +2,7 @@
use Closure;
use Illuminate\Support\Collection;
+use Illuminate\Pagination\Paginator;
use Illuminate\Database\ConnectionInterface;
use Illuminate\Database\Query\Grammars\Grammar;
use Illuminate\Database\Query\Processors\Processor;
@@ -1304,6 +1305,24 @@ class Builder {
}
/**
+ * Paginate the given query into a simple paginator.
+ *
+ * @param int $perPage
+ * @param array $columns
+ * @return \Illuminate\Contracts\Pagination\Paginator
+ */
+ public function paginate($perPage = 15, $columns = ['*'])
+ {
+ $page = $page ?: Paginator::resolveCurrentPage();
+
+ $this->skip(($page - 1) * $perPage)->take($perPage + 1);
+
+ return new Paginator($this->get($columns), $page, $perPage, [
+ 'path' => Paginator::resolveCurrentPath()
+ ]);
+ }
+
+ /**
* Run the query as a "select" statement against the connection.
*
* @return array | First pass at simpler Eloquent pagination. | illuminate_database | train |
a6994d3f74805b10e0c0f51bd62e00cfb0ca905b | diff --git a/examples/gen_rt_integrity_check.py b/examples/gen_rt_integrity_check.py
index <HASH>..<HASH> 100755
--- a/examples/gen_rt_integrity_check.py
+++ b/examples/gen_rt_integrity_check.py
@@ -65,7 +65,7 @@ end for;
supertype_body_tmpl = Template('''
select one one_${To}_Instance related by ${From}_Instance->${To}[R${Numb}];
if not_empty one_${To}_Instance
- break;
+ continue;
end if;
''') | example: fixed minor typo in runtime consistency check example | xtuml_pyxtuml | train |
f0b95277926775d0052b981ff475bb229b9d1c0c | diff --git a/test/fixtures/multi-server.js b/test/fixtures/multi-server.js
index <HASH>..<HASH> 100644
--- a/test/fixtures/multi-server.js
+++ b/test/fixtures/multi-server.js
@@ -15,15 +15,15 @@ var server3 = net.createServer(function (socket) {
});
server1.addListener('error', function (err) {
- process.exit(1);
+ process.exit(101);
});
server2.addListener('error', function (err) {
- process.exit(2);
+ process.exit(102);
});
server3.addListener('error', function (err) {
- process.exit(3);
+ process.exit(103);
});
// | [test] Make exit codes more specific
When an uncaught exception is thrown, `node ` exits with `1`. Make it
clear that exits are cause by listening errors. | nodejitsu_haibu-carapace | train |
5de13f7083c88438e943f5020e37f899901ed95c | diff --git a/src/test/java/org/boon/tests/SortTest.java b/src/test/java/org/boon/tests/SortTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/org/boon/tests/SortTest.java
+++ b/src/test/java/org/boon/tests/SortTest.java
@@ -50,8 +50,8 @@ public class SortTest {
public void setUp() throws Exception {
list = Lists.list(
Employee.employee( "zzz", "LastA", "120", "5.29.1970:00:00:01", 100 ),
- Employee.employee( "zaaa", "bbb", "124", "5.29.1960:00:00:00", 200 ),
- Employee.employee( "zaaa", "aaa", "123", "5.29.1970:00:00:01", 100 ),
+ Employee.employee( "zbbb", "bbb", "124", "5.29.1960:00:00:00", 200 ),
+ Employee.employee( "zbbb", "aaa", "123", "5.29.1970:00:00:01", 100 ),
Employee.employee( "bababa", "LastB", "125", "5.29.1960:00:00:00", 200 ),
Employee.employee( "BAbaba", "LastB", "126", "5.29.1960:00:00:00", 200 )
@@ -67,8 +67,8 @@ public class SortTest {
List<String> firstNames = BeanUtils.idxList( Typ.string, list, "firstName" );
assertEquals( "bababa", firstNames.get( 0 ) );
assertEquals( "BAbaba", firstNames.get( 1 ) );
- assertEquals( "zaaa", firstNames.get( 2 ) );
- assertEquals( "zaaa", firstNames.get( 3 ) );
+ assertEquals( "zbbb", firstNames.get( 2 ) );
+ assertEquals( "zbbb", firstNames.get( 3 ) );
assertEquals( "zzz", firstNames.get( 4 ) );
}
@@ -83,8 +83,8 @@ public class SortTest {
assertEquals( "bababa", firstNames.get( 0 ) );
assertEquals( "BAbaba", firstNames.get( 1 ) );
- assertEquals( "zaaa", firstNames.get( 2 ) );
- assertEquals( "zaaa", firstNames.get( 3 ) );
+ assertEquals( "zbbb", firstNames.get( 2 ) );
+ assertEquals( "zbbb", firstNames.get( 3 ) );
assertEquals( "zzz", firstNames.get( 4 ) );
} | SortTest fails on Norwegian locale
This is because "aa" is interpreted as "å", which should come after "z".
Fixes #<I> | boonproject_boon | train |
d11dd20bf3537a2d8237b75c89fa20ae284fefd3 | diff --git a/builtin/providers/test/resource_test.go b/builtin/providers/test/resource_test.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/test/resource_test.go
+++ b/builtin/providers/test/resource_test.go
@@ -6,6 +6,7 @@ import (
"strings"
"testing"
+ "github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
@@ -464,7 +465,7 @@ output "value_from_map_from_list" {
`),
ExpectError: nil,
Check: func(s *terraform.State) error {
- root := s.ModuleByPath(terraform.RootModulePath)
+ root := s.ModuleByPath(addrs.RootModuleInstance)
mapOut := root.Outputs["map_from_list"].Value
expectedMapOut := map[string]interface{}{
"a": "1", | builtin/providers/test: use new API for root module address
terraform.RootModulePath is no longer present, but
addrs.RootModuleInstance is equivalent to it. | hashicorp_terraform | train |
d6c9dd5e82ace42a33711d1cbae4dcf1f1881b85 | diff --git a/Bundle/WidgetMapBundle/Entity/WidgetMap.php b/Bundle/WidgetMapBundle/Entity/WidgetMap.php
index <HASH>..<HASH> 100644
--- a/Bundle/WidgetMapBundle/Entity/WidgetMap.php
+++ b/Bundle/WidgetMapBundle/Entity/WidgetMap.php
@@ -249,7 +249,17 @@ class WidgetMap
foreach ($childs as $_child) {
// found child must belongs to the given view or one of it's templates
if ($view) {
- if ($_child->getView() && ($view == $_child->getView() || $_child->getView()->isTemplateOf($view))) {
+ // if child has a view
+ // and child view is same as given view or the child view is a template of given view
+ if ($_child->getView() && ($view == $_child->getView() || $_child->getView()->isTemplateOf($view))
+ ) {
+ // if child is a substitute in view
+ if ($substitute = $_child->getSubstituteForView($view)) {
+ // if i'm not the parent of the substitute or i does not have the same position, child is not valid
+ if ($substitute->getParent() != $this || $substitute->getPosition() != $position) {
+ $_child = null;
+ }
+ }
$children[$position] = $_child;
}
} else {
@@ -257,6 +267,7 @@ class WidgetMap
}
}
}
+
if (!$children[$position]
&& ($replaced = $this->getReplaced())
&& !empty($this->getReplaced()->getChilds($position))) {
@@ -264,6 +275,14 @@ class WidgetMap
foreach ($this->getReplaced()->getChilds($position) as $_child) {
if ($view) {
if ($_child->getView() && ($view == $_child->getView() || $_child->getView()->isTemplateOf($view))) {
+
+ // if child is a substitute in view
+ if ($substitute = $_child->getSubstituteForView($view)) {
+ // if i'm not the parent of the substitute or i does not have the same position, child is not valid
+ if ($substitute->getParent() != $this || $substitute->getPosition() != $position) {
+ $_child = null;
+ }
+ }
$children[$position] = $_child;
}
} else {
@@ -276,9 +295,17 @@ class WidgetMap
return $children;
}
- public function hasChild($position)
+ /**
+ * @return mixed
+ */
+ public function getChildrenRaw()
+ {
+ return $this->children;
+ }
+
+ public function hasChild($position, View $view = null)
{
- foreach ($this->getChildren() as $child) {
+ foreach ($this->getChildren($view) as $child) {
if ($child && $child->getPosition() === $position) {
return true;
}
@@ -314,7 +341,6 @@ class WidgetMap
}
}
-
return $childs;
}
@@ -333,7 +359,6 @@ class WidgetMap
public function removeChildren()
{
foreach ($this->children as $child) {
- $child->setParent(null);
$this->removeChild($child);
}
} | when getting widgetmap children, check for substitutes | Victoire_victoire | train |
f107b981202adc515d80f584a887371399b2b927 | diff --git a/python/ccxt/base/exchange.py b/python/ccxt/base/exchange.py
index <HASH>..<HASH> 100644
--- a/python/ccxt/base/exchange.py
+++ b/python/ccxt/base/exchange.py
@@ -968,6 +968,15 @@ class Exchange(object):
return None
@staticmethod
+ def rfc2616(self, timestamp=None):
+ if timestamp is None:
+ ts = datetime.datetime.now()
+ else:
+ ts = timestamp
+ stamp = mktime(ts.timetuple())
+ return format_date_time(stamp)
+
+ @staticmethod
def dmy(timestamp, infix='-'):
utc_datetime = datetime.datetime.utcfromtimestamp(int(round(timestamp / 1000)))
return utc_datetime.strftime('%m' + infix + '%d' + infix + '%Y') | exchange.py restore rfc<I> | ccxt_ccxt | train |
aa7a25706641a45e639787129e4b617aee075994 | diff --git a/lib/basic_app/core/hash.rb b/lib/basic_app/core/hash.rb
index <HASH>..<HASH> 100644
--- a/lib/basic_app/core/hash.rb
+++ b/lib/basic_app/core/hash.rb
@@ -1,7 +1,13 @@
class Hash
- # sorted yaml suitable for configuration files
- def to_conf( opts = {} )
+ # YAML suitable for configuration files
+ #
+ # returns sorted YAML if Ruby 1.8
+ # returns insertion ordered YAML on Ruby 1.9+
+ def to_conf
+ return to_yaml unless RUBY_VERSION =~ /^1.8/
+
+ opts = {}
YAML::quick_emit( object_id, opts ) do |out|
out.map( taguri, to_yaml_style ) do |map|
sorted_keys = keys
diff --git a/spec/basic_app/core_spec.rb b/spec/basic_app/core_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/basic_app/core_spec.rb
+++ b/spec/basic_app/core_spec.rb
@@ -39,12 +39,23 @@ describe "Core" do
end
describe 'to_conf' do
- it "should convert a hash of symbolized keys to sorted YAML" do
- @hash_symbols.to_conf.should == "--- \n:options: \n :verbose: false\n:repos: \n :repo1: \n :path: something\n:zebras: true\n"
- end
- it "should convert a hash of stringified keys to sorted YAML" do
- @hash_strings.to_conf.should == "--- \noptions: \n verbose: false\nrepos: \n repo1: \n path: something\nzebras: true\n"
+ if RUBY_VERSION =~ /^1.8/
+ it "should convert a hash of symbolized keys to sorted YAML" do
+ @hash_symbols.to_conf.should == "--- \n:options: \n :verbose: false\n:repos: \n :repo1: \n :path: something\n:zebras: true\n"
+ end
+
+ it "should convert a hash of stringified keys to sorted YAML" do
+ @hash_strings.to_conf.should == "--- \noptions: \n verbose: false\nrepos: \n repo1: \n path: something\nzebras: true\n"
+ end
+ else
+ it "should convert a hash of symbolized keys to insertion order YAML" do
+ @hash_symbols.to_conf.should == "---\n:zebras: true\n:options:\n :verbose: false\n:repos:\n :repo1:\n :path: something\n"
+ end
+
+ it "should convert a hash of stringified keys to insertion order YAML" do
+ @hash_strings.to_conf.should == "---\nzebras: true\noptions:\n verbose: false\nrepos:\n repo1:\n path: something\n"
+ end
end
end | Hash.to_conf now does insertion order YAML on <I>
On Ruby <I>, to_conf still outputs sorted YAML. Insertion order
is prefered and comes free with Ruby <I> | robertwahler_repo_manager | train |
3224b4cd21433b06be767582504795f40f5c43b7 | diff --git a/lib/feathers-mongo-collections.js b/lib/feathers-mongo-collections.js
index <HASH>..<HASH> 100644
--- a/lib/feathers-mongo-collections.js
+++ b/lib/feathers-mongo-collections.js
@@ -40,6 +40,8 @@ module.exports = function(db) {
delete options.create;
// Remove the dbName from the collection name.
options.name = parseName(colls[n].name);
+ // Set the name as the _id.
+ options._id = options.name;
// Add it to the list.
collections.push(options);
@@ -82,7 +84,11 @@ module.exports = function(db) {
return callback({error:'name is required'});
}
this.db.createCollection(data.name, {}, function(err, collection){
- callback(null, {name:collection.collectionName});
+ var response = {
+ name:collection.collectionName,
+ _id:collection.collectionName
+ };
+ callback(null, data);
});
},
@@ -92,7 +98,11 @@ module.exports = function(db) {
// All other errors.
return callback({error:err.errmsg});
}
- callback(null, {name:data.name});
+ var response = {
+ name:data.name,
+ _id:data.name
+ };
+ callback(null, response);
});
},
@@ -101,7 +111,7 @@ module.exports = function(db) {
if (err) {
return callback(err.errmsg);
}
- callback(null, true);
+ callback(null, {_id:id});
});
}
}; | Return an _id and name. | marshallswain_feathers-mongo-collections | train |
30fd32f8eb0ef0ccbc53cd54d33e025477dbc15b | diff --git a/pyimagediet/diet.py b/pyimagediet/diet.py
index <HASH>..<HASH> 100644
--- a/pyimagediet/diet.py
+++ b/pyimagediet/diet.py
@@ -68,10 +68,10 @@ def check_configuration(config):
# Check all sections are there and contain dicts
for section in sections:
if section not in config:
- error_msg = 'Error: Section {} is missing.'.format(section)
+ error_msg = 'Error: Section {0} is missing.'.format(section)
raise ConfigurationErrorDietException(error_msg)
if not isinstance(config[section], dict):
- error_msg = 'Error: Section {} is malformed.'.format(section)
+ error_msg = 'Error: Section {0} is malformed.'.format(section)
raise ConfigurationErrorDietException(error_msg)
# Check every command has a corresponding parameters entry
@@ -87,12 +87,12 @@ def check_configuration(config):
for cmd in config['pipelines']:
pipeline = config['pipelines'][cmd]
if not isinstance(pipeline, list):
- error_msg = ('Error: Pipeline {} is malformed. Values have to '
+ error_msg = ('Error: Pipeline {0} is malformed. Values have to '
'be a list of command names.').format(cmd)
raise ConfigurationErrorDietException(error_msg)
for tool in pipeline:
if tool not in commands_cmds:
- error_msg = ('Error in pipeline {}. "{}" cannot be found '
+ error_msg = ('Error in pipeline {0}. "{1}" cannot be found '
'among commands listed in commands '
'section').format(cmd, tool)
raise ConfigurationErrorDietException(error_msg) | Python <I> .format needs numbered placeholders | samastur_pyimagediet | train |
57982167ea1e29ba6825eb5c315e3af0c6ed48cb | diff --git a/CHANGELOG b/CHANGELOG
index <HASH>..<HASH> 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -28,6 +28,7 @@ Version 1.1.8 work in progress
- Enh #2299: Added CAssetManager.newFileMode and newDirMode (Qiang)
- Enh #2325: Added $option parameter to CDbCommand::select() to support special SELECT syntax (Qiang)
- Enh #2357: Documented CWebApplication accessors with @property for better IDE autocomplete (Sam Dark)
+- Enh #2389: MessageCommand now accepts overwrite option determining if merge result will overwrite existing file (Sam Dark)
- Enh: XHR is now passed to CButtonColumn error JavaScript callback as a first argument (Sam Dark)
- Enh: Added CHttpSession::regenerateID() and improved CWebUser::changeIdentity() by regenerating session ID (Qiang)
- Enh: Added CActiveRecord::saveCounters() (Qiang)
diff --git a/framework/cli/commands/MessageCommand.php b/framework/cli/commands/MessageCommand.php
index <HASH>..<HASH> 100644
--- a/framework/cli/commands/MessageCommand.php
+++ b/framework/cli/commands/MessageCommand.php
@@ -38,7 +38,7 @@ PARAMETERS
returns an array of name-value pairs. Each name-value pair represents
a configuration option.
- The following options must be specified:
+ The following options are available:
- sourcePath: string, root directory of all source files.
- messagePath: string, root directory containing message translations.
@@ -56,6 +56,8 @@ PARAMETERS
- translator: the name of the function for translating messages.
Defaults to 'Yii::t'. This is used as a mark to find messages to be
translated.
+ - overwrite: if message file must be overwritten with the merged messages.
+
EOD;
}
@@ -84,6 +86,9 @@ EOD;
if(empty($languages))
$this->usageError("Languages cannot be empty.");
+ if(!isset($overwrite))
+ $overwrite = false;
+
$options=array();
if(isset($fileTypes))
$options['fileTypes']=$fileTypes;
@@ -103,7 +108,7 @@ EOD;
foreach($messages as $category=>$msgs)
{
$msgs=array_values(array_unique($msgs));
- $this->generateMessageFile($msgs,$dir.DIRECTORY_SEPARATOR.$category.'.php');
+ $this->generateMessageFile($msgs,$dir.DIRECTORY_SEPARATOR.$category.'.php',$overwrite);
}
}
}
@@ -126,7 +131,7 @@ EOD;
return $messages;
}
- protected function generateMessageFile($messages,$fileName)
+ protected function generateMessageFile($messages,$fileName,$overwrite)
{
echo "Saving messages to $fileName...";
if(is_file($fileName))
@@ -160,7 +165,8 @@ EOD;
$todo[$message]='@@'.$translation.'@@';
}
$merged=array_merge($todo,$merged);
- $fileName.='.merged';
+ if($overwrite === false)
+ $fileName.='.merged';
echo "translation merged.\n";
}
else
diff --git a/framework/messages/config.php b/framework/messages/config.php
index <HASH>..<HASH> 100644
--- a/framework/messages/config.php
+++ b/framework/messages/config.php
@@ -8,6 +8,7 @@ return array(
'messagePath'=>dirname(__FILE__).DIRECTORY_SEPARATOR.'..'.DIRECTORY_SEPARATOR.'messages',
'languages'=>array('zh_cn','zh_tw','de','el','es','sv','he','nl','pt','ru','it','fr','ja','pl','hu','ro','id','vi','bg','lv','sk'),
'fileTypes'=>array('php'),
+ 'overwrite'=>true,
'exclude'=>array(
'.svn',
'yiilite.php', | (Fixes issue <I>) MessageCommand now accepts overwrite option determining if merge result will overwrite existing file | yiisoft_yii | train |
36157bbecc0c9d42fb95f6ae1a3ea2a523917b82 | diff --git a/Tests/IntegrationTests/contentModule.js b/Tests/IntegrationTests/contentModule.js
index <HASH>..<HASH> 100644
--- a/Tests/IntegrationTests/contentModule.js
+++ b/Tests/IntegrationTests/contentModule.js
@@ -303,7 +303,7 @@ test('Can create a new page', async t => {
.switchToMainWindow();
});
-test.only('Can create content node from inside InlineUI', async t => {
+test('Can create content node from inside InlineUI', async t => {
const headlineTitle = 'Helloworld!';
subSection('Create a headline node');
await waitForIframeLoading(t);
@@ -371,7 +371,7 @@ test('Can edit the page title via inspector', async t => {
.expect(Selector('#neos-UnappliedChangesDialog').exists).notOk();
});
-test.only('Can crop an image', async t => {
+test('Can crop an image', async t => {
await waitForIframeLoading(t);
await t.switchToIframe('[name="neos-content-main"]'); | Remove .only (they have no effect) | neos_neos-ui | train |
9ce804713095b618f25a892902d050b856096bc2 | diff --git a/util/multi_dict.py b/util/multi_dict.py
index <HASH>..<HASH> 100644
--- a/util/multi_dict.py
+++ b/util/multi_dict.py
@@ -1,5 +1,4 @@
import numpy as np
-import scipy.stats
import util.math.sort
import util.io.object
@@ -9,8 +8,11 @@ logger = util.logging.logger
def _isdict(d):
- from blist import sorteddict
- return isinstance(d, dict) or isinstance(d, sorteddict)
+ if isinstance(d, dict):
+ return True
+ else:
+ from blist import sorteddict
+ return isinstance(d, sorteddict)
class MultiDict():
@@ -126,9 +128,12 @@ class MultiDict():
def append_value(self, key, value):
self._get_or_init_value_list(key).append(value)
-
def _add_value_lists(self, keys, value_lists, add_function):
assert callable(add_function)
+ if keys is None:
+ keys = []
+ if value_lists is None:
+ value_lists = []
if len(keys) != len(value_lists):
raise ValueError('Len of keys {} and len of values {} have to be the same!'.format(len(keys), len(value_lists)))
@@ -156,7 +161,7 @@ class MultiDict():
logger.debug('Removing value {} for key {}.'.format(value, key))
value_list = self.get_value_list(key)
n = len(value_list)
- value_list[:] = [v for x in value_list if not np.all(np.isclose(v, value))]
+ value_list[:] = [v for v in value_list if not np.all(np.isclose(v, value))]
if len(value_list) == n:
raise KeyError('Value {} was not deposited for key {}.'.format(value, key))
#TODO remove dict entries if list is empty
@@ -313,9 +318,9 @@ class MultiDict():
sorted = True
m = self.new_like(sorted=sorted)
if return_type == 'multi_dict_unsorted':
- m = Multi_Dict(sorted=False)
+ m = MultiDict(sorted=False)
if return_type == 'multi_dict_sorted':
- m = Multi_Dict(sorted=True)
+ m = MultiDict(sorted=True)
m.extend_value_lists(keys, value_lists)
# try:
@@ -364,7 +369,7 @@ class MultiDict():
for (key, value) in value_dict.items():
total_key = key_prefix + (key,)
if isinstance(value, value_dict_type):
- yield from self._iterate_generator_value_dict(value, value_dict_type, key_prefix=total_key)
+ yield from self._iterate_generator_value_dict(value, value_dict_type=value_dict_type, key_prefix=total_key)
else:
yield (total_key, value)
@@ -568,7 +573,7 @@ class MultiDict():
def variances(self, min_values=3, min_variance=0, return_type='array'):
- logger.debug('Calculate variances of values with at least {} values with mininmal variance {}.'.format(min_values, min_variance))
+ logger.debug('Calculate variances of values with at least {} values with minimal variance {}.'.format(min_values, min_variance))
def calculate_variance(values):
mean = np.average(values)
@@ -581,7 +586,7 @@ class MultiDict():
def standard_deviations(self, min_values=3, min_deviation=0, return_type='array'):
- logger.debug('Calculate deviations of values with at least {} values with mininmal deviation {}.'.format(min_values, min_deviation))
+ logger.debug('Calculate standard deviations of values with at least {} values with minimal deviation {}.'.format(min_values, min_deviation))
def calculate_deviation(values):
mean = np.average(values)
@@ -599,6 +604,7 @@ class MultiDict():
def dagostino_pearson_test(self, min_values=50, alpha=0.05, return_type='array'):
logger.debug('Calculate D´Agostino-Person-test for normality of values with minimal {} values with alpha {}.'.format(min_values, alpha))
+ import scipy.stats
test_values = self.iterate_values(lambda x: scipy.stats.normaltest(x)[1], min_values, return_type=return_type)
@@ -614,6 +620,7 @@ class MultiDict():
def shapiro_wilk_test(self, min_values=50, alpha=0.05, return_type='array'):
logger.debug('Calculate Shapiro-Wilk-test for normality of values with minimal {} values with alpha {}.'.format(min_values, alpha))
+ import scipy.stats
test_values = self.iterate_values(lambda x: scipy.stats.shapiro(x)[1], min_values, return_type=return_type)
@@ -629,6 +636,7 @@ class MultiDict():
def anderson_darling_test(self, min_values=50, alpha=0.05, return_type='array'):
logger.debug('Calculate Anderson-Darling-test for normality of values with minimal {} values with alpha {}.'.format(min_values, alpha))
+ import scipy.stats
def test(x, alpha):
## get test values | MAINT: util.multi_dict: imports of 'blist' and 'scipy' not global anymore -> moved where needed | jor-_util | train |
a9937a9485fb61af6f6e625346f3b506d4cb4154 | diff --git a/lib/podoff.rb b/lib/podoff.rb
index <HASH>..<HASH> 100644
--- a/lib/podoff.rb
+++ b/lib/podoff.rb
@@ -435,62 +435,6 @@ module Podoff
r ? r.to_i : nil
end
-# def parent
-#
-# r = @attributes[:parent]
-# r ? r[0..-2].strip : nil
-# end
-#
-# def kids
-#
-# r = @attributes[:kids]
-# (r || '').split(/[\[\]R]/).collect(&:strip).reject(&:empty?)
-# end
-#
-# def contents
-#
-# r = @attributes[:contents]
-# (r || '').split(/[\[\]R]/).collect(&:strip).reject(&:empty?)
-# end
-
-# def add_annotation(ref)
-#
-# if annots = @attributes[:annots]
-# fail "implement me!"
-# else
-# i = @source.index('/Type ')
-# @source.insert(i, "/Annots [#{ref} R]\n")
-# end
-# recompute_attributes
-# end
-
-# def add_free_text(x, y, text, font, size)
-#
-# fail ArgumentError.new('target is not a page') unless type == '/Page'
-#
-# nref = document.new_ref
-#
-# s = [
-# "#{nref} obj <<",
-# "/Type /Annot",
-# "/Subtype /FreeText",
-# "/Da (/F1 70 Tf 0 100 Td)",
-# "/Rect [0 0 500 600]",
-# "/Contents (#{text})",
-# ">>",
-# "endobj"
-# ].join("\n")
-# anno = Obj.create(document, nref, s)
-#
-# page = self.replicate
-# page.add_annotation(nref)
-#
-# document.add(anno)
-# document.add(page)
-#
-# anno
-# end
-
def insert_font(nick, obj_or_ref)
fail ArgumentError.new("target '#{ref}' not a replica") \
@@ -566,10 +510,6 @@ module Podoff
@content = StringIO.new
end
- #def document; obj.document; end
- #def ref; obj.ref; end
- #def source; self; end
-
def tf(font_name, font_size)
n = font_name[0] == '/' ? font_name[1..-1] : font_name | remove commented out, unused, code | jmettraux_podoff | train |
c2141a2105a085428e375ee20d7bc0bd85c70555 | diff --git a/ReText/editor.py b/ReText/editor.py
index <HASH>..<HASH> 100644
--- a/ReText/editor.py
+++ b/ReText/editor.py
@@ -139,8 +139,8 @@ class ReTextEdit(QTextEdit):
self.setFont(globalSettings.editorFont)
metrics = self.fontMetrics()
self.marginx = (int(self.document().documentMargin())
- + metrics.width(' ' * globalSettings.rightMargin))
- self.setTabStopWidth(globalSettings.tabWidth * self.fontMetrics().width(' '))
+ + metrics.horizontalAdvance(' ' * globalSettings.rightMargin))
+ self.setTabStopWidth(globalSettings.tabWidth * metrics.horizontalAdvance(' '))
self.updateLineNumberAreaWidth()
self.infoArea.updateTextAndGeometry()
self.updateTextStatistics()
@@ -353,7 +353,7 @@ class ReTextEdit(QTextEdit):
digits = len(str(cursor.blockNumber())) + 1
else:
digits = len(str(cursor.blockNumber() + 1))
- return 5 + self.fontMetrics().width('9') * digits
+ return 5 + self.fontMetrics().horizontalAdvance('9') * digits
def updateLineNumberAreaWidth(self, blockcount=0):
self.setViewportMargins(self.lineNumberAreaWidth(), 0, 0, 0)
@@ -548,7 +548,7 @@ class InfoArea(QLabel):
def getAreaSize(self, text):
metrics = self.fontMetrics()
- width = metrics.width(text)
+ width = metrics.horizontalAdvance(text)
height = metrics.height()
return width, height | Replace QFontMetrics::width with QFontMetrics::horizontalAdvance
Which is its new name since Qt <I>. | retext-project_retext | train |
513f218a75b3b95e5cd8fde333bc6a1217972c2c | diff --git a/lib/helper/WebDriverIO.js b/lib/helper/WebDriverIO.js
index <HASH>..<HASH> 100644
--- a/lib/helper/WebDriverIO.js
+++ b/lib/helper/WebDriverIO.js
@@ -779,7 +779,8 @@ class WebDriverIO extends Helper {
*/
seeNumberOfVisibleElements(selector, num) {
return this.browser.isVisible(withStrictLocator(selector))
- .then(function (res) {
+ .then(function(res) {
+ if(!Array.isArray(res)) res = [res];
res = res.filter((val) => val == true);
return truth(`elements of ${locator}`, 'to be seen').assert.equal(res.length, num);
}); | Fix on seeNumberOfVisibleElements to avoid undefined array.length (#<I>)
Just remembered that isVisible returns true or false if can't find multiple elements.
With this fix it will indeed return 0 if no elements found and 1 if only 1 element found. | Codeception_CodeceptJS | train |
0d3ea9f1fa630ce985c01eff6dd700c1e0d7bc08 | diff --git a/examples/retry.js b/examples/retry.js
index <HASH>..<HASH> 100644
--- a/examples/retry.js
+++ b/examples/retry.js
@@ -24,16 +24,16 @@ var SplunkLogger = require("../index").Logger;
/**
* Only the token property is required.
*
- * Here we've set maxRetries to 5,
+ * Here we've set maxRetries to 10,
* If there are any connection errors the request to Splunk will
- * be retried up to 5 times.
+ * be retried up to 10 times.
* The default is 0.
*/
var config = {
token: "your-token-here",
url: "https://localhost:8088",
level: "info",
- maxRetries: 5
+ maxRetries: 10
};
// Create a new logger | Change retry example to retry <I> times | splunk_splunk-javascript-logging | train |
391b76af9ac2ccf7b06f223928f991695b3c2cd3 | diff --git a/harpoon/ship/runner.py b/harpoon/ship/runner.py
index <HASH>..<HASH> 100644
--- a/harpoon/ship/runner.py
+++ b/harpoon/ship/runner.py
@@ -93,6 +93,8 @@ class Runner(object):
def wait_for_deps(self, conf, images):
"""Wait for all our dependencies"""
from harpoon.option_spec.image_objs import WaitCondition
+ ctxt = conf.harpoon.docker_context_maker()
+
waited = set()
last_attempt = {}
dependencies = set(dep for dep, _ in conf.dependency_images())
@@ -118,7 +120,7 @@ class Runner(object):
image = images[dependency]
if dependency in wait_conditions:
- done = self.wait_for_dep(image, wait_conditions[dependency], start, last_attempt.get(dependency))
+ done = self.wait_for_dep(ctxt, image, wait_conditions[dependency], start, last_attempt.get(dependency))
this_round.append(done)
if done is True:
waited.add(dependency)
@@ -156,7 +158,7 @@ class Runner(object):
time.sleep(0.1)
- def wait_for_dep(self, conf, wait_condition, start, last_attempt):
+ def wait_for_dep(self, ctxt, conf, wait_condition, start, last_attempt):
"""Wait for this image"""
from harpoon.option_spec.image_objs import WaitCondition
conditions = list(wait_condition.conditions(start, last_attempt))
@@ -168,13 +170,13 @@ class Runner(object):
log.debug("Running condition\tcondition=%s", condition)
command = 'bash -c "{0}"'.format(condition)
try:
- exec_id = conf.harpoon.docker_context.exec_create(conf.container_id, command, tty=False)
+ exec_id = ctxt.exec_create(conf.container_id, command, tty=False)
except DockerAPIError as error:
log.error("Failed to run condition\tcondition=%s\tdependency=%s\terror=%s", condition, conf.name, error)
return False
- output = conf.harpoon.docker_context.exec_start(exec_id)
- inspection = conf.harpoon.docker_context.exec_inspect(exec_id)
+ output = ctxt.exec_start(exec_id)
+ inspection = ctxt.exec_inspect(exec_id)
exit_code = inspection["ExitCode"]
if exit_code != 0:
log.error("Condition says no\tcondition=%s\toutput:\n\t%s", condition, "\n\t".join(line for line in output.split('\n'))) | Making a new context for exec commands
So we can avoid Hijack errors | delfick_harpoon | train |
b123a132ef6f035187fae51c6b64fca0d5f8de1d | diff --git a/lib/squib/graphics/cairo_context_wrapper.rb b/lib/squib/graphics/cairo_context_wrapper.rb
index <HASH>..<HASH> 100644
--- a/lib/squib/graphics/cairo_context_wrapper.rb
+++ b/lib/squib/graphics/cairo_context_wrapper.rb
@@ -22,7 +22,7 @@ module Squib
:show_pango_layout, :rounded_rectangle, :set_line_width, :stroke, :fill,
:set_source, :scale, :render_rsvg_handle, :circle, :triangle, :line_to,
:operator=, :show_page, :clip, :transform, :mask, :create_pango_layout,
- :antialias=, :curve_to, :get_matrix
+ :antialias=, :curve_to, :matrix
# :nodoc:
# @api private
diff --git a/lib/squib/graphics/text.rb b/lib/squib/graphics/text.rb
index <HASH>..<HASH> 100644
--- a/lib/squib/graphics/text.rb
+++ b/lib/squib/graphics/text.rb
@@ -138,6 +138,7 @@ module Squib
cc.translate(x,y)
cc.rotate(angle)
cc.move_to(0, 0)
+ initial_matrix = cc.matrix.to_a
font_desc = Pango::FontDescription.new(font)
font_desc.size = font_size * Pango::SCALE unless font_size.nil?
@@ -161,12 +162,15 @@ module Squib
cc.move_to(0, vertical_start)
cc.update_pango_layout(layout)
+ before_show = cc.matrix.to_a
cc.show_pango_layout(layout)
begin
embed_draws.each { |ed| ed[:draw].call(self, ed[:x], ed[:y] + vertical_start) }
rescue Exception => e
puts "====EXCEPTION!===="
- puts "Cairo matrix: #{cc.get_matrix}"
+ puts "Initial matrix: #{initial_matrix}"
+ puts "Before show matrix: #{before_show}"
+ puts "Current matrix: #{cc.matrix.to_a}"
puts e
puts "=================="
end | Better debugging to fix this travis issue | andymeneely_squib | train |
5c979f415aa1b047398ee8820d5ba932d1d5a6ce | diff --git a/dirutility/__init__.py b/dirutility/__init__.py
index <HASH>..<HASH> 100644
--- a/dirutility/__init__.py
+++ b/dirutility/__init__.py
@@ -1,9 +1,10 @@
from dirutility.move import FlattenTree, CreateTree, move_files_to_folders
from dirutility.walk import DirPaths, DirTree
from dirutility.view import desktop, open_window
-from dirutility.ftp import FTP
from dirutility.backup import ZipBackup
+from dirutility.ftp import FTP
+from dirutility.permissions import Permissions
__all__ = ['FlattenTree', 'CreateTree', 'move_files_to_folders', 'DirTree', 'DirPaths', 'desktop', 'ZipBackup', 'FTP',
- 'open_window']
+ 'open_window', 'Permissions'] | ADD Permissions class to dirutility __all__ declaration | mrstephenneal_dirutility | train |
c1e0ea24561412b6bf9a706dbc725befeff1adfa | diff --git a/cyipopt/scipy_interface.py b/cyipopt/scipy_interface.py
index <HASH>..<HASH> 100644
--- a/cyipopt/scipy_interface.py
+++ b/cyipopt/scipy_interface.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cyipopt: Python wrapper for the Ipopt optimization package, written in Cython.
diff --git a/cyipopt/version.py b/cyipopt/version.py
index <HASH>..<HASH> 100644
--- a/cyipopt/version.py
+++ b/cyipopt/version.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cyipopt: Python wrapper for the Ipopt optimization package, written in Cython. | Remove shebang lines from “cyipopt” package modules
These files have no script-like content at all, so the shebangs were
unambiguously not useful. | matthias-k_cyipopt | train |
5f04aae415e803a51c5a9c625dd2c75c54167924 | diff --git a/dbt/adapters/redshift.py b/dbt/adapters/redshift.py
index <HASH>..<HASH> 100644
--- a/dbt/adapters/redshift.py
+++ b/dbt/adapters/redshift.py
@@ -60,7 +60,9 @@ class RedshiftAdapter(PostgresAdapter):
connection = cls.get_connection(profile, model_name)
- cls.commit(connection)
+ if connection.get('transaction_open'):
+ cls.commit(connection)
+
cls.begin(profile, connection.get('name'))
to_return = super(PostgresAdapter, cls).drop( | fix for commit/transaction error (#<I>)
Tried to commit transaction on connection "{model}", but it does not have one open! | fishtown-analytics_dbt | train |
604e4ce0cb14b8d96306f9e862203772c7619520 | diff --git a/lib/socket.js b/lib/socket.js
index <HASH>..<HASH> 100644
--- a/lib/socket.js
+++ b/lib/socket.js
@@ -560,6 +560,7 @@ Socket.prototype.onClose = function (reason, desc) {
setTimeout(function() {
self.writeBuffer = [];
self.callbackBuffer = [];
+ self.prevBufferLen = 0;
}, 0);
// ignore further transport communication | Don't lose packets writen during upgrade after a re-open
After a close, reset the prevBufferLen along with the buffers,
so a drain event after upgrade won't remove any packets that have been
queued up during upgrading. | socketio_engine.io-client | train |
84bfe8b35ddd51d1b0efaf20a56aa118c288a86e | diff --git a/src/Ui/DataProvider/Customers.php b/src/Ui/DataProvider/Customers.php
index <HASH>..<HASH> 100644
--- a/src/Ui/DataProvider/Customers.php
+++ b/src/Ui/DataProvider/Customers.php
@@ -11,7 +11,7 @@ use Magento\Framework\App\RequestInterface;
use Magento\Framework\UrlInterface;
use Magento\Framework\View\Element\UiComponent\DataProvider\Reporting;
use Magento\Store\Model\StoreManagerInterface;
-use Praxigento\Core\Repo\Criteria\IAdapter as ICriteriaAdapter;
+use Praxigento\Core\Repo\Query\Criteria\IAdapter as ICriteriaAdapter;
use Praxigento\Core\Ui\DataProvider\Base as BaseDataProvider;
use Praxigento\Odoo\Repo\Agg\IWarehouse as IRepoAggWarehouse; | MOBI-<I> - Refactoring for Repo interfaces | praxigento_mobi_mod_downline | train |
f1b04d6ad535735855145aeb7459e10f671abc5c | diff --git a/handlers/jobs.js b/handlers/jobs.js
index <HASH>..<HASH> 100644
--- a/handlers/jobs.js
+++ b/handlers/jobs.js
@@ -378,7 +378,7 @@ function submitJob (job, callback) {
parameters: job.parameters ? job.parameters : {},
notifications: [
{
- url: config.url + ':' + config.port + '/api/v1/jobs/${JOB_ID}/results',
+ url: config.url + ':' + config.port + config.apiPrefix + 'jobs/${JOB_ID}/results',
event: '*',
persistent: true
} | Fix job callbacks to work with configurable apiPrefix. | OpenNeuroOrg_openneuro | train |
d9fa1d172d165367f1a233c70fe409af62f1444e | diff --git a/examples/events.js b/examples/events.js
index <HASH>..<HASH> 100644
--- a/examples/events.js
+++ b/examples/events.js
@@ -1,5 +1,6 @@
import $ from 'jquery'
-import React, { Component, PropTypes } from 'react'
+import React, { Component } from 'react'
+import { PropTypes } from 'prop-types'
import ReactDOM from 'react-dom'
import ReactCSSTransitionGroup from 'react-addons-css-transition-group' | chore: Update obsolete use of React PropTypes. | livingdocsIO_editable.js | train |
f9c0076cae98133406bfc04d87dcff5f3658b0f7 | diff --git a/src/main/kernel-specs.js b/src/main/kernel-specs.js
index <HASH>..<HASH> 100644
--- a/src/main/kernel-specs.js
+++ b/src/main/kernel-specs.js
@@ -1,3 +1,11 @@
+import { ipcMain as ipc } from 'electron';
+
+const KERNEL_SPECS = {};
+
export default function saveKernelspecs(kernelSpecs) {
- global.KERNEL_SPECS = kernelSpecs;
+ Object.assign(KERNEL_SPECS, kernelSpecs);
}
+
+ipc.on('kernel_specs_request', (event) => {
+ event.sender.send('kernel_specs_reply', KERNEL_SPECS);
+});
diff --git a/src/notebook/epics/kernel-launch.js b/src/notebook/epics/kernel-launch.js
index <HASH>..<HASH> 100644
--- a/src/notebook/epics/kernel-launch.js
+++ b/src/notebook/epics/kernel-launch.js
@@ -5,7 +5,6 @@ import { launchSpec } from 'spawnteract';
import * as uuid from 'uuid';
import {
- remote,
ipcRenderer as ipc,
} from 'electron';
@@ -121,6 +120,20 @@ export const watchExecutionStateEpic = action$ =>
);
/**
+ * Get kernel specs from main process
+ *
+ * @returns {Observable} The reply from main process
+ */
+export const kernelSpecsObservable =
+ Rx.Observable.create((observer) => {
+ ipc.send('kernel_specs_request');
+ ipc.on('kernel_specs_reply', (event, specs) => {
+ observer.next(specs);
+ observer.complete();
+ });
+ });
+
+/**
* Gets information about newly launched kernel.
*
* @param {ActionObservable} The action type
@@ -142,10 +155,11 @@ export const newKernelByNameEpic = action$ =>
throw new Error('newKernelByNameEpic requires a kernel name');
}
})
- .mergeMap((action) => {
- const spec = remote.getGlobal('KERNEL_SPECS')[action.kernelSpecName];
- return Rx.Observable.of(newKernel(spec, action.cwd));
- }
+ .mergeMap(action =>
+ kernelSpecsObservable
+ .mergeMap(specs =>
+ Rx.Observable.of(newKernel(specs[action.kernelSpecName], action.cwd))
+ )
);
/**
diff --git a/test/setup.js b/test/setup.js
index <HASH>..<HASH> 100644
--- a/test/setup.js
+++ b/test/setup.js
@@ -117,14 +117,6 @@ mock('electron', {
'setDocumentEdited': function(){},
'setRepresentedFilename': function() {},
};
- },
- 'getGlobal': function() {
- return {
- python3: {
- name: 'python3',
- spec: { argv: {}, display_name: 'Python 3', language: 'python' }
- },
- }
}
},
'webFrame': {
@@ -132,7 +124,17 @@ mock('electron', {
'getZoomLevel': function() { return 1; },
},
'ipcRenderer': {
- 'on': function() {},
+ 'on': function(message, callback) {
+ if (message === 'kernel_specs_reply') {
+ const specs = {
+ python3: {
+ name: 'python3',
+ spec: { argv: {}, display_name: 'Python 3', language: 'python' }
+ },
+ };
+ callback(null, specs);
+ }
+ },
'send': function(message, action) {}
},
}); | chore(kernels): Use async ipc calls | nteract_nteract | train |
447798ab0853fd164599feab154505ffc1f37e2a | diff --git a/py/pysparkling/context.py b/py/pysparkling/context.py
index <HASH>..<HASH> 100644
--- a/py/pysparkling/context.py
+++ b/py/pysparkling/context.py
@@ -81,7 +81,7 @@ class H2OContext(object):
# explicitly check if we run on databricks cloud since there we must add the jar to the parent of context class loader
if cl.getClass().getName()=='com.databricks.backend.daemon.driver.DriverLocal$DriverLocalClassLoader':
- cl.getParent().addURL(url)
+ cl.getParent().getParent().addURL(url)
else:
cl.addURL(url) | [SW-<I>][PySparkling] Fix classloding of sparkling water JAR in Databricks | h2oai_sparkling-water | train |
7b51e0ad562f9514f7327effa93d00e48fdb4e6f | diff --git a/ph-schematron/src/main/java/com/helger/schematron/pure/validation/xpath/PSXPathValidationHandlerSVRL.java b/ph-schematron/src/main/java/com/helger/schematron/pure/validation/xpath/PSXPathValidationHandlerSVRL.java
index <HASH>..<HASH> 100644
--- a/ph-schematron/src/main/java/com/helger/schematron/pure/validation/xpath/PSXPathValidationHandlerSVRL.java
+++ b/ph-schematron/src/main/java/com/helger/schematron/pure/validation/xpath/PSXPathValidationHandlerSVRL.java
@@ -168,7 +168,8 @@ public class PSXPathValidationHandlerSVRL implements IPSValidationHandler
aRetRule.setContext (sContext);
aRetRule.setFlag (aRule.getFlag ());
aRetRule.setId (aRule.getID ());
- // TODO role
+ if (aRule.hasLinkable ())
+ aRetRule.setRole (aRule.getLinkable ().getRole ());
m_aSchematronOutput.getActivePatternAndFiredRuleAndFailedAssert ().add (aRetRule);
} | Fixing missing role; #<I> | phax_ph-schematron | train |
570b67e7028720ef9a605db84118858592906068 | diff --git a/lib/request_log_analyzer/file_format/oink.rb b/lib/request_log_analyzer/file_format/oink.rb
index <HASH>..<HASH> 100644
--- a/lib/request_log_analyzer/file_format/oink.rb
+++ b/lib/request_log_analyzer/file_format/oink.rb
@@ -85,7 +85,7 @@ class RequestLogAnalyzer::FileFormat::Oink < RequestLogAnalyzer::FileFormat::Rai
count_strings = value.split(' | ')
count_arrays = count_strings.map do |count_string|
if count_string =~ /^(\w+): (\d+)/
- [$1.downcase, $2.to_i]
+ [$1, $2.to_i]
end
end | Fixed oink instance counter spec. Strings really weren't supposed to be downcased. | wvanbergen_request-log-analyzer | train |
f788d493aa0eadfaa46eec891919221a2275ac97 | diff --git a/test/enumerated_field_test.rb b/test/enumerated_field_test.rb
index <HASH>..<HASH> 100644
--- a/test/enumerated_field_test.rb
+++ b/test/enumerated_field_test.rb
@@ -1,43 +1,5 @@
require File.dirname(__FILE__) + '/test_helper'
-class Apple
- include EnumeratedField
-
- attr_accessor :color, :kind
-
- enum_field :color, [['Red', :red], ['Green', :green]], :validate => false
- enum_field :kind, [['Fuji Apple', :fuji], ['Delicious Red Apple', :delicious]], :validate => false
-
- def initialize(color, kind)
- self.color = color
- self.kind = kind
- end
-
-end
-
-class Banana
- include EnumeratedField
- include ActiveModel::Validations
-
- attr_accessor :brand
- attr_accessor :color
- attr_accessor :tastiness
-
- enum_field :brand, [["Chiquita", :chiquita], ["Del Monte", :delmonte]]
- enum_field :color, [["Awesome Yellow", :yellow], ["Icky Green", :green]], :allow_nil => true
- # stressing the constantizing of the keys
- enum_field :tastiness, [
- ["Great", "great!"],
- ["Good", "it's good"],
- ["Bad", "hate-hate"],
- ], :validate => false
-
- def initialize(brand, color)
- self.brand = brand
- self.color = color
- end
-end
-
class EnumeratedFieldTest < Test::Unit::TestCase
context 'EnumeratedField class' do
@@ -64,13 +26,26 @@ class EnumeratedFieldTest < Test::Unit::TestCase
assert_equal "it's good", Banana::TASTINESS_IT_S_GOOD
assert_equal "hate-hate", Banana::TASTINESS_HATE_HATE
end
+
+ context 'that subclasses ActiveRecord::Base' do
+ subject { Apple }
+
+ should 'have scopes for each enumerated value' do
+ assert_equal 4, Apple.count
+ Apple.color_values.each do |a|
+ assert Apple.respond_to? "color_#{a[1]}"
+ assert Apple.send("color_#{a[1]}").any?
+ end
+ end
+ end
+
end
context 'EnumeratedField instance' do
setup do
- @red_apple = Apple.new(:red, :fuji)
- @green_apple = Apple.new(:green, :delicious)
+ @red_apple = Apple.new(:color => :red, :kind => :fuji)
+ @green_apple = Apple.new(:color => :green, :kind => :delicious)
end
should 'have color_display method' do
@@ -125,7 +100,7 @@ class EnumeratedFieldTest < Test::Unit::TestCase
should 'not occur if passed :validate => false' do
# no validations, accepts any choice
- apple = Apple.new(:orange, :macintosh)
+ apple = Apple.new(:color => :orange, :kind => :macintosh)
assert !apple.respond_to?(:valid)
end | Changed Apple to be an ActiveRecord model. Moved models to test_helper. Loading fixtures. | sportngin_enumerated_field | train |
b58ab3703276286dd64f56c6020f19e57093828f | diff --git a/star.py b/star.py
index <HASH>..<HASH> 100644
--- a/star.py
+++ b/star.py
@@ -40,7 +40,7 @@ def lightcurve(filename, min_obs=25, min_period=0.2, max_period=32.,
data = numpy.ma.masked_array(data=numpy.loadtxt(filename), mask=None)
while True: # Iteratively process and find models of the data
if get_signal(data).shape[0] < min_obs:
- if options.verbose:
+ if options["verbose"]:
print(name + " has too few observations - None")
return None
period = find_period(data, min_period, max_period, coarse_precision,
diff --git a/utils.py b/utils.py
index <HASH>..<HASH> 100644
--- a/utils.py
+++ b/utils.py
@@ -28,7 +28,6 @@ def get_files(directory, format):
total, progress = 0, 0
def map_reduce(func, args, verbose_init, callback, options):
- assert False, str(type(options))
if options.verbose:
verbose_init(len(args))
results = [] | Removing assertion debugging and possibly fixing bug | astroswego_plotypus | train |
5f0fa5388ea3b5d9111c7fe8273bad654a8fe088 | diff --git a/lib/poolparty/core/hash.rb b/lib/poolparty/core/hash.rb
index <HASH>..<HASH> 100644
--- a/lib/poolparty/core/hash.rb
+++ b/lib/poolparty/core/hash.rb
@@ -13,8 +13,15 @@ class Hash
end
end
- def value_at(*indecies)
- indecies.collect {|i| self[i] }
+ # extracted from activesupport
+ # Returns an array of the values at the specified indices:
+ #
+ # hash = HashWithIndifferentAccess.new
+ # hash[:a] = "x"
+ # hash[:b] = "y"
+ # hash.values_at("a", "b") # => ["x", "y"]
+ def values_at(*indices)
+ indices.collect {|key| self[key]}
end
#TODO: deprecate
diff --git a/lib/poolparty/monitors/stats_monitor_adaptor.rb b/lib/poolparty/monitors/stats_monitor_adaptor.rb
index <HASH>..<HASH> 100644
--- a/lib/poolparty/monitors/stats_monitor_adaptor.rb
+++ b/lib/poolparty/monitors/stats_monitor_adaptor.rb
@@ -20,13 +20,16 @@ module Butterfly
r = Aska::Rule.new(rul)
rule(name) << r
end
- first_put
+ fork_and_put
end
#TODO: first packet should be a post
def first_put(time_to_wait=60)
+ puts " waiting #{time_to_wait} seconds for a put, otherwise initiating. #{stats.inspect}"
sleep time_to_wait #lets see if we receive a stats update before puting a new one
if stats=={my_ip => {}}
+ puts "Initiating first put"
+ touch ''
fork_and_put
end
end
diff --git a/lib/poolparty/plugins/chef.rb b/lib/poolparty/plugins/chef.rb
index <HASH>..<HASH> 100644
--- a/lib/poolparty/plugins/chef.rb
+++ b/lib/poolparty/plugins/chef.rb
@@ -129,7 +129,6 @@ file_cache_path "/etc/chef"
end
def after_create
- puts "Called after_create in chef"
before_configure
end
diff --git a/vendor/gems/butterfly b/vendor/gems/butterfly
index <HASH>..<HASH> 160000
--- a/vendor/gems/butterfly
+++ b/vendor/gems/butterfly
@@ -1 +1 @@
-Subproject commit 9ac9a38a59b43fed34d16b4d828aa6b52fe249c0
+Subproject commit 2e81918ae2943cd6ba5cebef11ca24f3be7dfd53 | call fork_and_put on initialization of stats_monitor to ensure that the chain of communication is begun.
Update butterfly submodule | auser_poolparty | train |
8dd9481fe5618bf95feb0831dce6dbc51506be8e | diff --git a/includes/class-bitbucket-api.php b/includes/class-bitbucket-api.php
index <HASH>..<HASH> 100644
--- a/includes/class-bitbucket-api.php
+++ b/includes/class-bitbucket-api.php
@@ -77,7 +77,7 @@ class GitHub_Updater_Bitbucket_API extends GitHub_Updater {
$allowed_codes = array( 200, 404 );
if ( is_wp_error( $response ) ) { return false; }
- if ( ! in_array( $code, $allowed_codes, true ) ) { return false; }
+ if ( ! in_array( $code, $allowed_codes, false ) ) { return false; }
return json_decode( wp_remote_retrieve_body( $response ) );
} | strict checking seems to fail on some environments even if the returned code is ok | afragen_github-updater | train |
32ce1f6c1bd8b4c4103b6bd101b9db516d4d807e | diff --git a/salt/utils/master.py b/salt/utils/master.py
index <HASH>..<HASH> 100644
--- a/salt/utils/master.py
+++ b/salt/utils/master.py
@@ -72,17 +72,7 @@ class MasterPillarUtil(object):
use_cached_pillar=True,
grains_fallback=True,
pillar_fallback=True,
- opts=None,
- env=None):
- if env is not None:
- salt.utils.warn_until(
- 'Carbon',
- 'Passing a salt environment should be done using \'saltenv\' '
- 'not \'env\'. This functionality will be removed in Salt '
- 'Carbon.'
- )
- # Backwards compatibility
- saltenv = env
+ opts=None):
log.debug('New instance of {0} created.'.format(
self.__class__.__name__)) | salt/utils/master.py: remove env support | saltstack_salt | train |
8662a0630ab0fba7e3232b620c324ec1961fa341 | diff --git a/src/feat/agents/integrity/api.py b/src/feat/agents/integrity/api.py
index <HASH>..<HASH> 100644
--- a/src/feat/agents/integrity/api.py
+++ b/src/feat/agents/integrity/api.py
@@ -2,7 +2,7 @@ import operator
from feat.agents.integrity import integrity_agent
from feat.common import defer
-from feat.database import conflicts
+from feat.database import conflicts, update
from feat.gateway.application import featmodels
from feat.gateway import models
from feat.models import model, value, call, getter, response, action, effect
@@ -86,6 +86,28 @@ class Replication(model.Model):
call.model_perform('pause'),
response.done('done')],
result_info=value.Response()))
+ model.delete('del',
+ effect.context_value('key'),
+ call.model_perform('delete'),
+ response.deleted("Replication deleted"),
+ label="Delete",
+ desc=("Delete all the replication documents"))
+
+ def delete(self, value):
+ state = self.source._get_state()
+ connection = state.replicator
+ d = connection.query_view(conflicts.Replications,
+ key=('target', value))
+
+ def delete_all(replications):
+ d = defer.succeed(None)
+ for key, value, doc_id in replications:
+ d.addCallback(defer.drop_param, connection.update_document,
+ doc_id, update.delete)
+ return d
+
+ d.addCallback(delete_all)
+ return d
def pause(self, value):
if not self.continuous:
diff --git a/src/feat/test/test_agents_integrity_agent.py b/src/feat/test/test_agents_integrity_agent.py
index <HASH>..<HASH> 100644
--- a/src/feat/test/test_agents_integrity_agent.py
+++ b/src/feat/test/test_agents_integrity_agent.py
@@ -4,6 +4,7 @@ from feat.common import defer
from feat.database import conflicts, emu
from feat.test import common, dummies
from feat.test.integration.common import ModelTestMixin
+from feat.models import response
from feat.models.interface import InvalidParameters
@@ -127,6 +128,15 @@ class ApiTest(_Base):
self.assertEqual('test', repl.get('source'))
self.assertEqual('featjs/replication', repl.get('filter'))
+ # now delete the replication
+ r = yield submodel.perform_action('del')
+ self.assertIsInstance(r, response.Deleted)
+
+ view = yield self.connection.query_view(conflicts.Replications,
+ key=('source', 'test'),
+ include_docs=True)
+ self.assertEqual(0, len(view))
+
@defer.inlineCallbacks
def testCreateReplicationAlreadyExist(self):
get_replication_status = Method() | Expose a gateway action of integrity agent to delete a configured
replication. | f3at_feat | train |
63c660b29ce3fc9af7ac9683658c6e6e60cc8168 | diff --git a/core/src/main/java/io/grpc/ManagedChannelBuilder.java b/core/src/main/java/io/grpc/ManagedChannelBuilder.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/io/grpc/ManagedChannelBuilder.java
+++ b/core/src/main/java/io/grpc/ManagedChannelBuilder.java
@@ -200,10 +200,13 @@ public abstract class ManagedChannelBuilder<T extends ManagedChannelBuilder<T>>
* <p>EXPERIMENTAL: This method is here to enable an experimental feature, and may be changed or
* removed once the feature is stable.
*
+ * @throws UnsupportedOperationException if unsupported
* @since 1.7.0
*/
@ExperimentalApi("https://github.com/grpc/grpc-java/issues/3399")
- public abstract T enableFullStreamDecompression();
+ public T enableFullStreamDecompression() {
+ throw new UnsupportedOperationException();
+ }
/**
* Set the decompression registry for use in the channel. This is an advanced API call and | core: make enableFullStreamDecompression non-abstract | grpc_grpc-java | train |
18aa053fc931786c03abdac2fe18978cc7ee8f03 | diff --git a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java
+++ b/src/main/java/com/google/cloud/genomics/dataflow/readers/ReadReader.java
@@ -22,9 +22,11 @@ import com.google.api.services.genomics.model.SearchReadsResponse;
import com.google.cloud.genomics.dataflow.utils.GenomicsApi;
import java.io.IOException;
+import java.util.logging.Logger;
public class ReadReader extends GenomicsApiReader<SearchReadsRequest, Read> {
-private String readFields;
+ private static final Logger LOG = Logger.getLogger(ReadReader.class.getName());
+ private String readFields;
public ReadReader(String accessToken, String apiKey, String readFields) {
super(accessToken, apiKey);
@@ -34,7 +36,7 @@ private String readFields;
@Override
protected void processApiCall(GenomicsApi api, ProcessContext c, SearchReadsRequest request)
throws IOException {
-
+ long total = 0;
do {
SearchReadsResponse response = api.executeRequest(
api.getService().reads().search(request), readFields);
@@ -42,10 +44,13 @@ private String readFields;
if (response.getReads() == null) {
break;
}
-
+
for (Read read : response.getReads()) {
c.output(read);
}
+
+ total += response.getReads().size();
+ LOG.info("Read " + total + " reads");
request.setPageToken(response.getNextPageToken());
} while (request.getPageToken() != null);
} | Added logging to readreader | googlegenomics_dataflow-java | train |
ac184c8f52414239c0775793ca2222e2bd1942be | diff --git a/client_test.go b/client_test.go
index <HASH>..<HASH> 100644
--- a/client_test.go
+++ b/client_test.go
@@ -234,18 +234,15 @@ func TestTwoClientsArbitraryPorts(t *testing.T) {
}
func TestAddDropManyTorrents(t *testing.T) {
- cl, _ := NewClient(&TestingConfig)
+ cl, err := NewClient(&TestingConfig)
+ require.NoError(t, err)
defer cl.Close()
for i := range iter.N(1000) {
var spec TorrentSpec
binary.PutVarint(spec.InfoHash[:], int64(i))
tt, new, err := cl.AddTorrentSpec(&spec)
- if err != nil {
- t.Error(err)
- }
- if !new {
- t.FailNow()
- }
+ assert.NoError(t, err)
+ assert.True(t, new)
defer tt.Drop()
}
} | Tidy up a test that fails with the wrong reasons | anacrolix_torrent | train |
314ccebbbb9f72063e1fe77a29f891de618b21f7 | diff --git a/php/class-wp-cli.php b/php/class-wp-cli.php
index <HASH>..<HASH> 100644
--- a/php/class-wp-cli.php
+++ b/php/class-wp-cli.php
@@ -249,16 +249,12 @@ class WP_CLI {
*/
public static function add_command( $name, $callable, $args = array() ) {
$valid = false;
- if ( is_object( $callable ) && ( $callable instanceof \Closure ) ) {
- $valid = true;
- } else if ( is_string( $callable ) && function_exists( $callable ) ) {
+ if ( is_callable( $callable ) ) {
$valid = true;
} else if ( is_string( $callable ) && class_exists( (string) $callable ) ) {
$valid = true;
} else if ( is_object( $callable ) ) {
$valid = true;
- } else if ( is_array( $callable ) && is_callable( $callable ) ) {
- $valid = true;
}
if ( ! $valid ) {
if ( is_array( $callable ) ) { | Use `is_callable()` in `WP_CLI:add_command()`
This is much better than rolling our own equivalent logic | wp-cli_extension-command | train |
813639722d5d583d6921047a41309641d261aeae | diff --git a/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/entities/ConferenceDetailRecord.java b/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/entities/ConferenceDetailRecord.java
index <HASH>..<HASH> 100644
--- a/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/entities/ConferenceDetailRecord.java
+++ b/restcomm/restcomm.dao/src/main/java/org/restcomm/connect/dao/entities/ConferenceDetailRecord.java
@@ -266,4 +266,18 @@ public final class ConferenceDetailRecord {
this.masterIVREndpointId = masterIVREndpointId;
}
}
+
+ @Override
+ public String toString() {
+ return "ConferenceDetailRecord [sid=" + sid + ", dateCreated=" + dateCreated + ", dateUpdated=" + dateUpdated
+ + ", accountSid=" + accountSid + ", status=" + status + ", friendlyName=" + friendlyName
+ + ", apiVersion=" + apiVersion + ", uri=" + uri + ", masterMsId=" + masterMsId + ", masterPresent="
+ + masterPresent + ", masterConfernceEndpointId=" + masterConfernceEndpointId + ", masterIVREndpointId="
+ + masterIVREndpointId + ", masterIVREndpointSessionId=" + masterIVREndpointSessionId
+ + ", masterBridgeEndpointId=" + masterBridgeEndpointId + ", masterBridgeEndpointSessionId="
+ + masterBridgeEndpointSessionId + ", masterBridgeConnectionIdentifier="
+ + masterBridgeConnectionIdentifier + ", masterIVRConnectionIdentifier=" + masterIVRConnectionIdentifier
+ + ", moderatorPresent=" + moderatorPresent + "]";
+ }
+
}
diff --git a/restcomm/restcomm.mrb/src/main/java/org/restcomm/connect/mrb/ConferenceMediaResourceControllerGeneric.java b/restcomm/restcomm.mrb/src/main/java/org/restcomm/connect/mrb/ConferenceMediaResourceControllerGeneric.java
index <HASH>..<HASH> 100644
--- a/restcomm/restcomm.mrb/src/main/java/org/restcomm/connect/mrb/ConferenceMediaResourceControllerGeneric.java
+++ b/restcomm/restcomm.mrb/src/main/java/org/restcomm/connect/mrb/ConferenceMediaResourceControllerGeneric.java
@@ -72,23 +72,23 @@ public class ConferenceMediaResourceControllerGeneric extends UntypedActor{
// Finite State Machine
private final FiniteStateMachine fsm;
- protected final State uninitialized;
- protected final State acquiringConferenceInfo;
- protected final State creatingMediaGroup;
+ protected State uninitialized;
+ protected State acquiringConferenceInfo;
+ protected State creatingMediaGroup;
- protected final State preActive;
- protected final State active;
- protected final State stopping;
- protected final State inactive;
- protected final State failed;
+ protected State preActive;
+ protected State active;
+ protected State stopping;
+ protected State inactive;
+ protected State failed;
- protected final ActorRef localMediaGateway;
+ protected ActorRef localMediaGateway;
protected ActorRef mediaGroup;
protected MediaSession localMediaSession;
protected ActorRef localConfernceEndpoint;
- protected final DaoManager storage;
- protected final Configuration configuration;
+ protected DaoManager storage;
+ protected Configuration configuration;
protected ConferenceDetailRecord cdr;
protected Sid conferenceSid;
@@ -100,7 +100,7 @@ public class ConferenceMediaResourceControllerGeneric extends UntypedActor{
// Observer pattern
protected final List<ActorRef> observers;
- protected final ActorRef mrb;
+ protected ActorRef mrb;
public ConferenceMediaResourceControllerGeneric(ActorRef localMediaGateway, final Configuration configuration, final DaoManager storage, final ActorRef mrb){
super();
@@ -489,10 +489,12 @@ public class ConferenceMediaResourceControllerGeneric extends UntypedActor{
protected void updateConferenceStatus(String status){
if(cdr != null){
+ logger.info("updateConferenceStatus before in");
final ConferenceDetailRecordsDao dao = storage.getConferenceDetailRecordsDao();
cdr = dao.getConferenceDetailRecord(conferenceSid);
cdr = cdr.setStatus(status);
dao.updateConferenceDetailRecordStatus(cdr);
+ logger.info("updateConferenceStatus after in");
}
}
} | removed final and override toString for conference cdr | RestComm_Restcomm-Connect | train |
30e9f8dce97c6b1afdff72b5a32b175e1f237048 | diff --git a/provider/lxd/testing_test.go b/provider/lxd/testing_test.go
index <HASH>..<HASH> 100644
--- a/provider/lxd/testing_test.go
+++ b/provider/lxd/testing_test.go
@@ -204,7 +204,6 @@ func (s *BaseSuiteUnpatched) setConfig(c *gc.C, cfg *config.Config) {
}
func (s *BaseSuiteUnpatched) NewConfig(c *gc.C, updates testing.Attrs) *config.Config {
- //return NewCustomBaseConfig(c, updates)
if updates == nil {
updates = make(testing.Attrs)
} | Drop a superfluous line. | juju_juju | train |
2060811d888e9277f2483a2b8d77ad8654515ec6 | diff --git a/lib/Providers/Qr/EndroidQrCodeProvider.php b/lib/Providers/Qr/EndroidQrCodeProvider.php
index <HASH>..<HASH> 100755
--- a/lib/Providers/Qr/EndroidQrCodeProvider.php
+++ b/lib/Providers/Qr/EndroidQrCodeProvider.php
@@ -10,6 +10,8 @@ class EndroidQrCodeProvider implements IQRCodeProvider
public $color;
public $margin;
public $errorcorrectionlevel;
+ protected $logoPath;
+ protected $logoSize;
public function __construct($bgcolor = 'ffffff', $color = '000000', $margin = 0, $errorcorrectionlevel = 'H')
{
@@ -19,6 +21,17 @@ class EndroidQrCodeProvider implements IQRCodeProvider
$this->errorcorrectionlevel = $this->handleErrorCorrectionLevel($errorcorrectionlevel);
}
+ /**
+ * Adds an image to the middle of the QR Code.
+ * @param string $path Path to an image file
+ * @param array|int $size Just the width, or [width, height]
+ */
+ public function setLogo($path, $size = null)
+ {
+ $this->logoPath = $path;
+ $this->logoSize = (array)$size;
+ }
+
public function getMimeType()
{
return 'image/png';
@@ -34,6 +47,13 @@ class EndroidQrCodeProvider implements IQRCodeProvider
$qrCode->setBackgroundColor($this->bgcolor);
$qrCode->setForegroundColor($this->color);
+ if ($this->logoPath) {
+ $qrCode->setLogoPath($this->logoPath);
+ if ($this->logoSize) {
+ $qrCode->setLogoSize($this->logoSize[0], $this->logoSize[1]);
+ }
+ }
+
return $qrCode->writeString();
} | Add logo option to Endroid's provider
closes #<I> | RobThree_TwoFactorAuth | train |
ffbf84d2e2375ff722277df7e0ea9db049d2abda | diff --git a/src/edeposit/amqp/storage/storage_handler.py b/src/edeposit/amqp/storage/storage_handler.py
index <HASH>..<HASH> 100755
--- a/src/edeposit/amqp/storage/storage_handler.py
+++ b/src/edeposit/amqp/storage/storage_handler.py
@@ -72,18 +72,21 @@ def _put_into_indexes(pub):
"""
no_of_used_indexes = 0
for field_name, db_connector in list(_get_db_connectors()):
- attr = getattr(pub, field_name)
+ attr_value = getattr(pub, field_name)
- if attr is None: # index only by set attributes
+ if attr_value is None: # index only by set attributes
continue
- handler = db_connector.get(attr, OOTreeSet())
- handler.insert(pub)
- db_connector[attr] = handler
+ container = db_connector.get(attr_value, None)
+ if container is None:
+ container = OOTreeSet()
+ db_connector[attr_value] = container
+
+ container.insert(pub)
no_of_used_indexes += 1
- # make sure that atleast one attr was used
+ # make sure that atleast one `attr_value` was used
if no_of_used_indexes <= 0:
raise UnindexablePublication(
"You have to use atleast one of the identificators!"
@@ -124,7 +127,7 @@ def _get_subset_matches(query):
if attr is None: # don't use unset attributes
continue
- results = db_connector.get(attr, [])
+ results = db_connector.get(attr, OOTreeSet())
if results:
yield results | Small improvements lookup is now in one query to DB. Fixed bugs. | edeposit_edeposit.amqp.storage | train |
97083ab3200da353e6770f54ec15da8f8c8a9095 | diff --git a/src/test/java/integration/CollectionMethodsTest.java b/src/test/java/integration/CollectionMethodsTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/integration/CollectionMethodsTest.java
+++ b/src/test/java/integration/CollectionMethodsTest.java
@@ -163,4 +163,11 @@ public class CollectionMethodsTest extends IntegrationTest {
public void canGetCollectionLastElement() {
$$("#radioButtons input").last().shouldHave(value("woland"));
}
+
+ @Test
+ public void canFindElementsByMultipleSelectors() {
+ $$(".first_row").shouldHave(size(1));
+ $$(".second_row").shouldHave(size(1));
+ $$(".first_row,.second_row").shouldHave(size(2));
+ }
} | add test: can find elements by multiple selectors | selenide_selenide | train |
72603243f8bda0b51799bf41ce7fcedd3b0d5e2f | diff --git a/www/gatsby-config.js b/www/gatsby-config.js
index <HASH>..<HASH> 100644
--- a/www/gatsby-config.js
+++ b/www/gatsby-config.js
@@ -4,6 +4,8 @@ module.exports = {
description: `Technical documentation for the Paragon Design System.`,
author: `@edx`,
},
+ // Match the location of the site on github pages if no path prefix is specified
+ pathPrefix: 'PATH_PREFIX' in process.env ? process.env.PATH_PREFIX : '/paragon',
plugins: [
"gatsby-plugin-sass",
`gatsby-plugin-react-helmet`, | docs: fix path prefixing for github deployment (#<I>) | edx_paragon | train |
34471a33efb26ea4de822913c3c6b4be6918f507 | diff --git a/undertow/src/main/java/org/wildfly/extension/undertow/security/jaspi/JASPICSecureResponseHandler.java b/undertow/src/main/java/org/wildfly/extension/undertow/security/jaspi/JASPICSecureResponseHandler.java
index <HASH>..<HASH> 100644
--- a/undertow/src/main/java/org/wildfly/extension/undertow/security/jaspi/JASPICSecureResponseHandler.java
+++ b/undertow/src/main/java/org/wildfly/extension/undertow/security/jaspi/JASPICSecureResponseHandler.java
@@ -27,10 +27,10 @@ public class JASPICSecureResponseHandler implements HttpHandler {
} finally {
try {
JASPICContext context = exchange.getAttachment(JASPICContext.ATTACHMENT_KEY);
- ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
- String applicationIdentifier = JASPICAuthenticationMechanism.buildApplicationIdentifier(requestContext);
if (!JASPICAuthenticationMechanism.wasAuthExceptionThrown(exchange) && context != null) {
+ ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
+ String applicationIdentifier = JASPICAuthenticationMechanism.buildApplicationIdentifier(requestContext);
UndertowLogger.ROOT_LOGGER.debugf("secureResponse for layer [%s] and applicationContextIdentifier [%s].", JASPICAuthenticationMechanism.JASPI_HTTP_SERVLET_LAYER, applicationIdentifier);
context.getSam().secureResponse(context.getMessageInfo(), new Subject(), JASPICAuthenticationMechanism.JASPI_HTTP_SERVLET_LAYER, applicationIdentifier, context.getCbh()); | Move code into the if block where its needed.
Minor cleanup/perf thing I noticed when reviewing <URL> | wildfly_wildfly | train |
a4069e7897f18e3e00128121e5fe085ad780ba03 | diff --git a/lib/cli/common.rb b/lib/cli/common.rb
index <HASH>..<HASH> 100644
--- a/lib/cli/common.rb
+++ b/lib/cli/common.rb
@@ -11,7 +11,6 @@
# subcomponent's license, as noted in the LICENSE file.
#++
-require 'open-uri'
require 'cli/base'
require 'cli/config'
diff --git a/lib/uaa/token_issuer.rb b/lib/uaa/token_issuer.rb
index <HASH>..<HASH> 100644
--- a/lib/uaa/token_issuer.rb
+++ b/lib/uaa/token_issuer.rb
@@ -70,9 +70,23 @@ class TokenIssuer
status, body, headers = request(:post, uri, body, headers)
raise BadResponse, "status #{status}" unless status == 302
- loc = headers[:location].split('#')
- raise BadResponse, "bad location header" unless loc.length == 2 && URI.parse(loc[0]) == URI.parse(redir_uri)
- parse_implicit_params loc[1], state
+ req_uri, reply_uri = URI.parse(redir_uri), URI.parse(headers[:location])
+ fragment, reply_uri.fragment = reply_uri.fragment, nil
+ return parse_implicit_params(fragment, state) if req_uri == reply_uri
+
+ # work around bug when uaa is behind proxy that rewrites location header
+ if reply_uri.scheme == "https"
+ reply_uri.scheme = "http"
+ if req_uri == URI.parse(reply_uri.to_s)
+ logger.warn("Scheme of location URL in reply is different than requested")
+ return parse_implicit_params(fragment, state)
+ end
+ puts req_uri.inspect, reply_uri.inspect
+ end
+
+ raise BadResponse, "bad location header"
+ rescue URI::Error => e
+ raise BadResponse, "bad location header in reply: #{e.message}"
end
# constructs a uri that the client is to return to the browser to direct
diff --git a/lib/uaa/util.rb b/lib/uaa/util.rb
index <HASH>..<HASH> 100644
--- a/lib/uaa/util.rb
+++ b/lib/uaa/util.rb
@@ -13,7 +13,7 @@
require 'yajl/json_gem'
require 'logger'
-require 'open-uri'
+require 'uri'
module CF; module UAA end end | work around CFID-<I>, location header changed by CF
Change-Id: I<I>bd<I>a6dc<I>b3e9a<I>a<I>f6c<I>d | cloudfoundry_cf-uaa-lib | train |
787d6c1b54851bd9a82025c7b9a270a94d112bc4 | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index <HASH>..<HASH> 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -19,6 +19,9 @@ Changelog
* Star coordinates can now be offered for any epoch, not just J2000.
`#166 <https://github.com/skyfielders/python-skyfield/issues/166>`_
+* You can now create a time object given the UT1 date.
+ `#91 <https://github.com/skyfielders/python-skyfield/issues/91>`_
+
1.3 — 2018 April 15
-------------------
diff --git a/skyfield/tests/test_timelib.py b/skyfield/tests/test_timelib.py
index <HASH>..<HASH> 100644
--- a/skyfield/tests/test_timelib.py
+++ b/skyfield/tests/test_timelib.py
@@ -9,7 +9,7 @@ from datetime import datetime
one_second = 1.0 / DAY_S
epsilon = one_second * 42.0e-6 # 20.1e-6 is theoretical best precision
-time_parameter = ['tai', 'tt', 'tdb']
+time_parameter = ['tai', 'tt', 'tdb', 'ut1']
time_value = [(1973, 1, 18, 1, 35, 37.5), 2441700.56640625]
def ts():
@@ -66,6 +66,21 @@ def test_building_time_from_list_of_utc_datetimes(ts):
2442046.5, 2442047.5, 2442048.5, 2442049.5, 2442050.5, 2442051.5,
]).all()
+def test_converting_ut1_to_tt(ts):
+ ten_thousand_years = 365 * 10000
+
+ jd = api.T0 - ten_thousand_years
+ t = ts.ut1(jd=jd)
+ del t.ut1 # force re-computation of UT1
+ print(jd - t.ut1)
+ assert abs(jd - t.ut1) < 1e-10
+
+ jd = api.T0 + ten_thousand_years
+ t = ts.ut1(jd=jd)
+ del t.ut1 # force re-computation of UT1
+ print(jd - t.ut1)
+ assert abs(jd - t.ut1) < 1e-10
+
def test_indexing_time(ts):
t = ts.utc(1974, 10, range(1, 6))
assert t.shape == (5,)
diff --git a/skyfield/timelib.py b/skyfield/timelib.py
index <HASH>..<HASH> 100644
--- a/skyfield/timelib.py
+++ b/skyfield/timelib.py
@@ -197,6 +197,45 @@ class Timescale(object):
t.tdb = tdb
return t
+ def ut1(self, year=None, month=1, day=1, hour=0, minute=0, second=0.0,
+ jd=None):
+ """Return the Time corresponding to a specific moment in UT1.
+
+ You can supply the Universal Time (UT1) by providing either a
+ proleptic Gregorian calendar date or a raw Julian Date float.
+ The following two method calls are equivalent::
+
+ timescale.ut1(2014, 1, 18, 1, 35, 37.5)
+ timescale.ut1(jd=2456675.56640625)
+
+ """
+ if jd is not None:
+ ut1 = jd
+ else:
+ ut1 = julian_date(
+ _to_array(year), _to_array(month), _to_array(day),
+ _to_array(hour), _to_array(minute), _to_array(second),
+ )
+ ut1 = _to_array(ut1)
+
+ # Estimate TT = UT1, to get a rough Delta T estimate.
+ tt_approx = ut1
+ delta_t_approx = interpolate_delta_t(self.delta_t_table, tt_approx)
+
+ # Use the rough Delta T to make a much better estimate of TT,
+ # then generate an even better Delta T.
+ tt_approx = ut1 + delta_t_approx / DAY_S
+ delta_t_approx = interpolate_delta_t(self.delta_t_table, tt_approx)
+
+ # We can now estimate TT with an error of < 1e-9 seconds within
+ # 10 centuries of either side of the present; for details, see:
+ # https://github.com/skyfielders/astronomy-notebooks
+ # and look for the notebook "error-in-timescale-ut1.ipynb".
+ tt = ut1 + delta_t_approx / DAY_S
+ t = Time(self, tt)
+ t.ut1 = ut1
+ return t
+
def from_astropy(self, t):
"""Return a Skyfield time corresponding to the AstroPy time `t`."""
return self.tt(jd=t.tt.jd) | Allow time objects to be constructed from UT1
Fixes #<I>. | skyfielders_python-skyfield | train |
eddc691fc9e8ced74a33d3c4cbd568c2728fe6f1 | diff --git a/bridge/gitter/gitter.go b/bridge/gitter/gitter.go
index <HASH>..<HASH> 100644
--- a/bridge/gitter/gitter.go
+++ b/bridge/gitter/gitter.go
@@ -52,11 +52,14 @@ func (b *Bgitter) Disconnect() error {
}
func (b *Bgitter) JoinChannel(channel string) error {
- room := channel
- roomID := b.getRoomID(room)
- if roomID == "" {
+ roomID, err := b.c.GetRoomId(channel)
+ if err != nil {
return fmt.Errorf("Could not find roomID for %v. Please create the room on gitter.im", channel)
}
+ room, err := b.c.GetRoom(roomID)
+ if err != nil {
+ b.Rooms = append(b.Rooms, *room)
+ }
user, err := b.c.GetUser()
if err != nil {
return err
@@ -84,7 +87,7 @@ func (b *Bgitter) JoinChannel(channel string) error {
flog.Errorf("connection with gitter closed for room %s", room)
}
}
- }(stream, room)
+ }(stream, room.Name)
return nil
} | Join rooms not already joined by the bot (gitter). See #<I> | 42wim_matterbridge | train |
61c36dbfb10a347c4f739ac5fe9a973a84047130 | diff --git a/band/band_eu863_870.go b/band/band_eu863_870.go
index <HASH>..<HASH> 100644
--- a/band/band_eu863_870.go
+++ b/band/band_eu863_870.go
@@ -73,9 +73,9 @@ func newEU863Band(repeatedCompatible bool) (Band, error) {
6: {Modulation: LoRaModulation, SpreadFactor: 7, Bandwidth: 250, uplink: true, downlink: true},
7: {Modulation: FSKModulation, BitRate: 50000, uplink: true, downlink: true},
8: {Modulation: LRFHSSModulation, CodingRate: "1/3", OccupiedChannelWidth: 137000, uplink: true, downlink: false},
- 9: {Modulation: LRFHSSModulation, CodingRate: "2/3", OccupiedChannelWidth: 137000, uplink: true, downlink: false},
+ 9: {Modulation: LRFHSSModulation, CodingRate: "4/6", OccupiedChannelWidth: 137000, uplink: true, downlink: false},
10: {Modulation: LRFHSSModulation, CodingRate: "1/3", OccupiedChannelWidth: 336000, uplink: true, downlink: false},
- 11: {Modulation: LRFHSSModulation, CodingRate: "2/3", OccupiedChannelWidth: 336000, uplink: true, downlink: false},
+ 11: {Modulation: LRFHSSModulation, CodingRate: "4/6", OccupiedChannelWidth: 336000, uplink: true, downlink: false},
},
rx1DataRateTable: map[int][]int{
0: {0, 0, 0, 0, 0, 0},
diff --git a/band/band_eu863_870_test.go b/band/band_eu863_870_test.go
index <HASH>..<HASH> 100644
--- a/band/band_eu863_870_test.go
+++ b/band/band_eu863_870_test.go
@@ -113,7 +113,7 @@ func TestEU863Band(t *testing.T) {
ExpectedDR: 8,
},
{
- DataRate: DataRate{Modulation: LRFHSSModulation, CodingRate: "2/3", OccupiedChannelWidth: 336000},
+ DataRate: DataRate{Modulation: LRFHSSModulation, CodingRate: "4/6", OccupiedChannelWidth: 336000},
Uplink: true,
ExpectedDR: 11,
},
diff --git a/band/band_us902_928.go b/band/band_us902_928.go
index <HASH>..<HASH> 100644
--- a/band/band_us902_928.go
+++ b/band/band_us902_928.go
@@ -167,7 +167,7 @@ func newUS902Band(repeaterCompatible bool) (Band, error) {
3: {Modulation: LoRaModulation, SpreadFactor: 7, Bandwidth: 125, uplink: true},
4: {Modulation: LoRaModulation, SpreadFactor: 8, Bandwidth: 500, uplink: true},
5: {Modulation: LRFHSSModulation, CodingRate: "1/3", OccupiedChannelWidth: 1523000, uplink: true, downlink: false},
- 6: {Modulation: LRFHSSModulation, CodingRate: "2/3", OccupiedChannelWidth: 1523000, uplink: true, downlink: false},
+ 6: {Modulation: LRFHSSModulation, CodingRate: "4/6", OccupiedChannelWidth: 1523000, uplink: true, downlink: false},
// 7
8: {Modulation: LoRaModulation, SpreadFactor: 12, Bandwidth: 500, downlink: true},
9: {Modulation: LoRaModulation, SpreadFactor: 11, Bandwidth: 500, downlink: true}, | Update CodingRate strings to match lgw2_cr_enum2str.
While the Regional Parameters specify CR 2/3, the HAL enum to string
function returns CR 2/3 as 4/6, causing a mismatch error.
In the future this should be improved by changing the type from string
to enum so that these can be aliassed. | brocaar_lorawan | train |
4752ac2aad0379b781037b985ba4904f2f36a1c1 | diff --git a/lib/gatling.rb b/lib/gatling.rb
index <HASH>..<HASH> 100644
--- a/lib/gatling.rb
+++ b/lib/gatling.rb
@@ -49,7 +49,7 @@ module Gatling
comparison = nil
while !match && try < max_no_tries
actual_image = Gatling::ImageFromElement.new(actual_element, expected_reference_filename)
- comparison = Gatling::Comparison.new(expected_image, actual_image)
+ comparison = Gatling::Comparison.new(actual_image, expected_image)
match = comparison.matches?
if !match
sleep sleep_time
@@ -63,7 +63,6 @@ module Gatling
def save_image_as_diff(image)
image.save(:diff)
- image.save(:candidate)
raise "element did not match #{image.file_name}. A diff image: #{image.file_name} was created in " +
"#{image.path(:diff)} " +
"A new reference #{image.path(:candidate)} can be used to fix the test"
diff --git a/spec/gatling_spec.rb b/spec/gatling_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/gatling_spec.rb
+++ b/spec/gatling_spec.rb
@@ -37,7 +37,6 @@ describe Gatling do
it "#save_image_as_diff" do
@image_class_mock.should_receive(:save).with(:diff).and_return(@ref_path)
- @image_class_mock.should_receive(:save).with(:candidate).and_return(@ref_path)
@image_class_mock.should_receive(:file_name).at_least(:once).and_return("some_name")
expect {subject.save_image_as_diff(@image_class_mock)}.should raise_error
end
@@ -74,12 +73,12 @@ describe Gatling do
describe "#compare_until_match" do
before do
- @apple = mock("Gatling::Image")
- @orange = mock("Gatling::Image")
+ @ref_image = mock("Gatling::Image")
+ @actual_image = mock("Gatling::Image")
@element = mock(Gatling::CaptureElement)
@comparison = mock("Gatling::Comparison")
- Gatling::ImageFromFile.stub!(:new).and_return(@orange)
- Gatling::ImageFromElement.stub!(:new).and_return(@orange)
+ Gatling::ImageFromFile.stub!(:new).and_return(@ref_image)
+ Gatling::ImageFromElement.stub!(:new).and_return(@actual_image)
Gatling::Comparison.stub!(:new).and_return(@comparison)
end
@@ -92,5 +91,11 @@ describe "#compare_until_match" do
@comparison.should_receive(:matches?).exactly(1).times.and_return(true)
Gatling.compare_until_match(@element, "orange.png", 3)
end
+
+ it 'should compare image from the element with image from the file' do
+ @comparison.stub!(:matches?).and_return(true)
+ Gatling::Comparison.should_receive(:new).with(@actual_image, @ref_image).and_return(@comparison)
+ Gatling.compare_until_match(@element, "orange.png", 3)
+ end
end
end
\ No newline at end of file | Don't overwrite candidate image w/ diff image + fix the incorrect argument for Gatling::Comparison.new inside compare_until_match method | gabrielrotbart_gatling | train |
b5669a8ab6177ca2acc6511b6ce9dfdf22062373 | diff --git a/src/Store/Store.php b/src/Store/Store.php
index <HASH>..<HASH> 100644
--- a/src/Store/Store.php
+++ b/src/Store/Store.php
@@ -91,6 +91,16 @@ class Store
}
/**
+ * Returns the available type keys from the MetadataFactory
+ *
+ * @return array
+ */
+ public function getTypes()
+ {
+ return $this->mf->getAllTypeNames();
+ }
+
+ /**
* Finds all records (or filtered by specific identifiers) for a type.
*
* @todo Add sorting and pagination (limit/skip). | Add method to Store to get all available types | as3io_modlr | train |
05a5da620ee41adb7d8c83b412d804fecff20a5f | diff --git a/levels.go b/levels.go
index <HASH>..<HASH> 100644
--- a/levels.go
+++ b/levels.go
@@ -32,7 +32,6 @@ import (
"github.com/dgraph-io/badger/v2/pb"
"github.com/dgraph-io/badger/v2/table"
"github.com/dgraph-io/badger/v2/y"
- "github.com/dgraph-io/ristretto/z"
"github.com/pkg/errors"
)
@@ -701,10 +700,11 @@ nextTable:
mu.Lock()
newTables = append(newTables, tbl)
- num := atomic.LoadInt32(&table.NumBlocks)
+ // num := atomic.LoadInt32(&table.NumBlocks)
mu.Unlock()
- s.kv.opt.Debugf("Num Blocks: %d. Num Allocs (MB): %.2f\n", num, z.NumAllocsMB())
+ // TODO(ibrahim): When ristretto PR #186 merges, bring this back.
+ // s.kv.opt.Debugf("Num Blocks: %d. Num Allocs (MB): %.2f\n", num, (z.NumAllocBytes() / 1 << 20))
}(builder)
} | Don't call z.NumAllocBytes directly for now. | dgraph-io_badger | train |
0eeeb764391490585f690c8b424d9ce2d43e6a9b | diff --git a/codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectEncoder.java b/codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectEncoder.java
index <HASH>..<HASH> 100755
--- a/codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectEncoder.java
+++ b/codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectEncoder.java
@@ -97,12 +97,6 @@ public abstract class HttpObjectEncoder<H extends HttpMessage> extends MessageTo
switch (state) {
case ST_INIT:
throw new IllegalStateException("unexpected message type: " + StringUtil.simpleClassName(msg));
- case ST_CONTENT_ALWAYS_EMPTY:
- out.add(EMPTY_BUFFER);
- if (msg instanceof LastHttpContent) {
- state = ST_INIT;
- }
- return;
case ST_CONTENT_NON_CHUNK:
final long contentLength = contentLength(msg);
if (contentLength > 0) {
@@ -116,33 +110,43 @@ public abstract class HttpObjectEncoder<H extends HttpMessage> extends MessageTo
}
out.add(encodeAndRetain(msg));
}
- } else {
- if (buf != null) {
- out.add(buf);
- } else {
- // Need to produce some output otherwise an
- // IllegalStateException will be thrown
- out.add(EMPTY_BUFFER);
+
+ if (msg instanceof LastHttpContent) {
+ state = ST_INIT;
}
+
+ break;
}
+ // fall-through!
+ case ST_CONTENT_ALWAYS_EMPTY:
- if (msg instanceof LastHttpContent) {
- state = ST_INIT;
+ if (buf != null) {
+ // We allocated a buffer so add it now.
+ out.add(buf);
+ } else {
+ // Need to produce some output otherwise an
+ // IllegalStateException will be thrown
+ out.add(EMPTY_BUFFER);
}
- return;
+
+ break;
case ST_CONTENT_CHUNK:
if (buf != null) {
+ // We allocated a buffer so add it now.
out.add(buf);
}
encodeChunkedContent(ctx, msg, contentLength(msg), out);
- return;
+
+ break;
default:
throw new Error();
}
- } else {
- if (buf != null) {
- out.add(buf);
+
+ if (msg instanceof LastHttpContent) {
+ state = ST_INIT;
}
+ } else if (buf != null) {
+ out.add(buf);
}
}
@@ -184,14 +188,10 @@ public abstract class HttpObjectEncoder<H extends HttpMessage> extends MessageTo
buf.writeBytes(CRLF);
out.add(buf);
}
-
- state = ST_INIT;
- } else {
- if (contentLength == 0) {
- // Need to produce some output otherwise an
- // IllegalstateException will be thrown
- out.add(EMPTY_BUFFER);
- }
+ } else if (contentLength == 0) {
+ // Need to produce some output otherwise an
+ // IllegalstateException will be thrown
+ out.add(EMPTY_BUFFER);
}
}
diff --git a/codec-http/src/test/java/io/netty/handler/codec/http/HttpServerCodecTest.java b/codec-http/src/test/java/io/netty/handler/codec/http/HttpServerCodecTest.java
index <HASH>..<HASH> 100644
--- a/codec-http/src/test/java/io/netty/handler/codec/http/HttpServerCodecTest.java
+++ b/codec-http/src/test/java/io/netty/handler/codec/http/HttpServerCodecTest.java
@@ -145,6 +145,32 @@ public class HttpServerCodecTest {
assertFalse(ch.finishAndReleaseAll());
}
+ @Test
+ public void testChunkedHeadFullHttpResponse() {
+ EmbeddedChannel ch = new EmbeddedChannel(new HttpServerCodec());
+
+ // Send the request headers.
+ assertTrue(ch.writeInbound(Unpooled.copiedBuffer(
+ "HEAD / HTTP/1.1\r\n\r\n", CharsetUtil.UTF_8)));
+
+ HttpRequest request = ch.readInbound();
+ assertEquals(HttpMethod.HEAD, request.method());
+ LastHttpContent content = ch.readInbound();
+ assertFalse(content.content().isReadable());
+ content.release();
+
+ FullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
+ HttpUtil.setTransferEncodingChunked(response, true);
+ assertTrue(ch.writeOutbound(response));
+ assertTrue(ch.finish());
+
+ ByteBuf buf = ch.readOutbound();
+ assertEquals("HTTP/1.1 200 OK\r\ntransfer-encoding: chunked\r\n\r\n", buf.toString(CharsetUtil.US_ASCII));
+ buf.release();
+
+ assertFalse(ch.finishAndReleaseAll());
+ }
+
private static ByteBuf prepareDataChunk(int size) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < size; ++i) { | Fix handling of FullHttpResponse when respond to HEAD in HttpServerCodec
Motivation:
cb<I>f<I>e7ada2d<I>ca<I>ed<I> introduced special handling of response to HEAD requests. Due a bug we failed to handle FullHttpResponse correctly.
Modifications:
Correctly handle FullHttpResponse for HEAD requests.
Result:
Works as expected. | netty_netty | train |
a9c1e799f26508974e78699431985569284e9604 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@ setup(
description='2gis library for Python',
long_description=__doc__,
packages=find_packages(),
- requires=('requests',),
+ requires=('requests'),
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers', | Awesome bug that breaks `python setup.py sdist upload` command | svartalf_python-2gis | train |
16580b58e57fb9bd2b6659a9f4b7bf7d7c20409d | diff --git a/changelog.html b/changelog.html
index <HASH>..<HASH> 100644
--- a/changelog.html
+++ b/changelog.html
@@ -60,7 +60,9 @@ Upcoming changes</a>
<!-- Record your changes in the trunk here. -->
<div id="trunk" style="display:none"><!--=TRUNK-BEGIN=-->
<ul class=image>
- <li class=>
+ <li class=bug>
+ Fixed a persistence problem in <tt>View$PropertyList</tt>
+ (<a href="http://issues.jenkins-ci.org/browse/JENKINS-9367">issue 9367</a>)
</ul>
</div><!--=TRUNK-END=-->
diff --git a/core/src/main/java/hudson/model/ListView.java b/core/src/main/java/hudson/model/ListView.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/ListView.java
+++ b/core/src/main/java/hudson/model/ListView.java
@@ -91,7 +91,8 @@ public class ListView extends View implements Saveable {
this.owner = owner;
}
- private Object readResolve() {
+ protected Object readResolve() {
+ super.readResolve();
if(includeRegex!=null)
includePattern = Pattern.compile(includeRegex);
initColumns();
diff --git a/core/src/main/java/hudson/model/View.java b/core/src/main/java/hudson/model/View.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/View.java
+++ b/core/src/main/java/hudson/model/View.java
@@ -118,7 +118,7 @@ public abstract class View extends AbstractModelObject implements AccessControll
* List of {@link ViewProperty}s configured for this view.
* @since 1.406
*/
- private volatile DescribableList<ViewProperty,ViewPropertyDescriptor> properties = new PropertyList();
+ private volatile DescribableList<ViewProperty,ViewPropertyDescriptor> properties = new PropertyList(this);
protected View(String name) {
this.name = name;
@@ -129,9 +129,11 @@ public abstract class View extends AbstractModelObject implements AccessControll
this.owner = owner;
}
- private Object readResolve() {
+ protected Object readResolve() {
if (properties == null) {
- properties = new PropertyList();
+ properties = new PropertyList(this);
+ } else {
+ properties.setOwner(this);
}
return this;
}
@@ -762,15 +764,22 @@ public abstract class View extends AbstractModelObject implements AccessControll
return v;
}
- private class PropertyList extends DescribableList<ViewProperty,ViewPropertyDescriptor> {
- private PropertyList() {
- super(View.this);
+ public static class PropertyList extends DescribableList<ViewProperty,ViewPropertyDescriptor> {
+ private PropertyList(View owner) {
+ super(owner);
+ }
+
+ public PropertyList() {// needed for XStream deserialization
+ }
+
+ public View getOwner() {
+ return (View)owner;
}
@Override
protected void onModified() throws IOException {
for (ViewProperty p : this)
- p.setView(View.this);
+ p.setView(getOwner());
}
}
}
diff --git a/test/src/test/java/hudson/model/ViewTest.java b/test/src/test/java/hudson/model/ViewTest.java
index <HASH>..<HASH> 100644
--- a/test/src/test/java/hudson/model/ViewTest.java
+++ b/test/src/test/java/hudson/model/ViewTest.java
@@ -118,4 +118,14 @@ public class ViewTest extends HudsonTestCase {
assertNull(p.getView("list"));
}
+
+ @Bug(9367)
+ public void testPersistence() throws Exception {
+ ListView view = new ListView("foo", hudson);
+ hudson.addView(view);
+
+ ListView v = (ListView)Hudson.XSTREAM.fromXML(Hudson.XSTREAM.toXML(view));
+ System.out.println(v.getProperties());
+ assertNotNull(v.getProperties());
+ }
} | [FIXED JENKINS-<I>] persistence problem in View$PropertyList | jenkinsci_jenkins | train |
7a40a32ecbb3b22ca35587ab8919da0960351a2d | diff --git a/Access/Response.php b/Access/Response.php
index <HASH>..<HASH> 100644
--- a/Access/Response.php
+++ b/Access/Response.php
@@ -15,6 +15,7 @@ class Response
* Create a new response.
*
* @param string|null $message
+ * @return void
*/
public function __construct($message = null)
{
diff --git a/Events/Attempting.php b/Events/Attempting.php
index <HASH>..<HASH> 100644
--- a/Events/Attempting.php
+++ b/Events/Attempting.php
@@ -23,6 +23,7 @@ class Attempting
*
* @param array $credentials
* @param bool $remember
+ * @return void
*/
public function __construct($credentials, $remember)
{
diff --git a/Events/Failed.php b/Events/Failed.php
index <HASH>..<HASH> 100644
--- a/Events/Failed.php
+++ b/Events/Failed.php
@@ -23,6 +23,7 @@ class Failed
*
* @param \Illuminate\Contracts\Auth\Authenticatable|null $user
* @param array $credentials
+ * @return void
*/
public function __construct($user, $credentials)
{ | Add missing return docblocks (#<I>) | illuminate_auth | train |
3ff71a70a58600a845fed0a1ba8184d32a5aaae1 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-from distutils.core import setup
+from setuptools import setup, find_packages
setup(name='django-locksmith',
version='0.6.0', | Converted setup.py from distutils to setuptools. | sunlightlabs_django-locksmith | train |
66000d879ee338b718238196541ec227798b7354 | diff --git a/src/com/google/javascript/jscomp/LinkedFlowScope.java b/src/com/google/javascript/jscomp/LinkedFlowScope.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/LinkedFlowScope.java
+++ b/src/com/google/javascript/jscomp/LinkedFlowScope.java
@@ -555,9 +555,14 @@ class LinkedFlowScope implements FlowScope {
// 5) The type is declared in joinedScopeA and joinedScopeB. Join
// the two types.
+ // Stores names that are not in a common ancestor of slotsA and slotsB for later removal
+ Set<ScopedName> obsoleteNames = new HashSet<>();
for (ScopedName var : Sets.union(slotsA.keySet(), slotsB.keySet())) {
if (!commonAncestorScopeRootNodes.contains(var.getScopeRoot())) {
// Variables not defined in a common ancestor no longer exist after the join.
+ // Since this.symbols is initialized to slotsA, this.symbols may already contain var.
+ // Remove obsolete names after this for loop (to avoid a ConcurrentModificationException)
+ obsoleteNames.add(var);
continue;
}
LinkedFlowSlot slotA = slotsA.get(var);
@@ -591,6 +596,9 @@ class LinkedFlowScope implements FlowScope {
symbols.put(var, new LinkedFlowSlot(var, joinedType, null));
}
}
+ for (ScopedName var : obsoleteNames) {
+ this.symbols.remove(var);
+ }
}
}
}
diff --git a/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java b/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java
index <HASH>..<HASH> 100644
--- a/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java
+++ b/test/com/google/javascript/jscomp/TypeCheckNoTranspileTest.java
@@ -160,6 +160,23 @@ public final class TypeCheckNoTranspileTest extends CompilerTypeTestCase {
"}"));
}
+ public void testBlockScopedVarInLoop3() {
+ // Tests that the qualified name alias.num is reset between loop iterations
+ testTypes(
+ lines(
+ "function takesNumber(/** number */ n) {}",
+ "",
+ "function f(/** {num: ?number} */ obj) {",
+ " for (const _ in {}) {",
+ " const alias = obj;",
+ " if (alias.num === null) {",
+ " continue;",
+ " }",
+ " takesNumber(alias.num);",
+ " }",
+ "}"));
+ }
+
public void testGlobalEnumWithLet() {
testTypes(
lines( | Make sure to remove block-scoped names in LinkedFlowScope.JoinOp
-------------
Created by MOE: <URL> | google_closure-compiler | train |
b321a19cd6f8d2f80ca5b469b95d89e41c27be2a | diff --git a/src/main/java/org/eobjects/analyzer/beans/valuedist/ValueCountListImpl.java b/src/main/java/org/eobjects/analyzer/beans/valuedist/ValueCountListImpl.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/eobjects/analyzer/beans/valuedist/ValueCountListImpl.java
+++ b/src/main/java/org/eobjects/analyzer/beans/valuedist/ValueCountListImpl.java
@@ -37,6 +37,10 @@ public class ValueCountListImpl implements Serializable, ValueCountList {
return new ValueCountListImpl(-1, true);
}
+ public static ValueCountList emptyList() {
+ return new ValueCountListImpl(0, true);
+ }
+
public static ValueCountListImpl createTopList(int topFrequentValues) {
return new ValueCountListImpl(topFrequentValues, true);
}
diff --git a/src/main/java/org/eobjects/analyzer/connection/JdbcDatastore.java b/src/main/java/org/eobjects/analyzer/connection/JdbcDatastore.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/eobjects/analyzer/connection/JdbcDatastore.java
+++ b/src/main/java/org/eobjects/analyzer/connection/JdbcDatastore.java
@@ -118,7 +118,7 @@ public final class JdbcDatastore extends UsageAwareDatastore {
installDriver = false;
break;
}
- } catch (SQLException e) {
+ } catch (Exception e) {
logger.warn("Driver threw exception when acceptURL(...) was invoked", e);
}
}
diff --git a/src/main/java/org/eobjects/analyzer/result/ValueDistributionResult.java b/src/main/java/org/eobjects/analyzer/result/ValueDistributionResult.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/eobjects/analyzer/result/ValueDistributionResult.java
+++ b/src/main/java/org/eobjects/analyzer/result/ValueDistributionResult.java
@@ -46,23 +46,29 @@ public class ValueDistributionResult implements AnalyzerResult {
_nullCount = nullCount;
}
- public ValueDistributionResult(InputColumn<?> column, ValueCountList topValues, ValueCountListImpl bottomValues,
+ public ValueDistributionResult(InputColumn<?> column, ValueCountList topValues, ValueCountList bottomValues,
int nullCount, Collection<String> uniqueValues) {
this(column, topValues, bottomValues, nullCount);
_uniqueValues = uniqueValues;
}
- public ValueDistributionResult(InputColumn<?> column, ValueCountList topValues, ValueCountListImpl bottomValues,
+ public ValueDistributionResult(InputColumn<?> column, ValueCountList topValues, ValueCountList bottomValues,
int nullCount, int uniqueValueCount) {
this(column, topValues, bottomValues, nullCount);
_uniqueValueCount = uniqueValueCount;
}
public ValueCountList getTopValues() {
+ if (_topValues == null) {
+ return ValueCountListImpl.emptyList();
+ }
return _topValues;
}
public ValueCountList getBottomValues() {
+ if (_bottomValues == null) {
+ return ValueCountListImpl.emptyList();
+ }
return _bottomValues;
} | Minor improvement to JdbcDatastore, catching any exception when attempting to detect if driver is installed | datacleaner_AnalyzerBeans | train |
a6d9bff834e171f935917140c5c951aaeb2eb860 | diff --git a/stagemonitor-core/src/main/java/org/stagemonitor/core/configuration/Configuration.java b/stagemonitor-core/src/main/java/org/stagemonitor/core/configuration/Configuration.java
index <HASH>..<HASH> 100644
--- a/stagemonitor-core/src/main/java/org/stagemonitor/core/configuration/Configuration.java
+++ b/stagemonitor-core/src/main/java/org/stagemonitor/core/configuration/Configuration.java
@@ -308,10 +308,8 @@ public class Configuration {
* @return <code>true</code>, if the password is correct, <code>false</code> otherwise
*/
public boolean isPasswordCorrect(String password) {
- if (password == null) {
- password = "";
- }
- return isPasswordSet() && getString(updateConfigPasswordKey).equals(password);
+ final String actualPassword = getString(updateConfigPasswordKey);
+ return "".equals(actualPassword) || isPasswordSet() && actualPassword.equals(password);
}
/** | Consider each password correct if set to empty string
This makes the password brute force warning only appear
if the password is actually set | stagemonitor_stagemonitor | train |
dd8ae347c760217ee4caf32f33385e9853b854b8 | diff --git a/text.go b/text.go
index <HASH>..<HASH> 100644
--- a/text.go
+++ b/text.go
@@ -17,6 +17,16 @@ const (
AlignRight
)
+// CharacterSide shows which side of a character is
+// clicked
+type CharacterSide int
+
+const (
+ CSLeft CharacterSide = iota
+ CSRight
+ CSUnknown
+)
+
type Text struct {
font *Font
@@ -64,7 +74,8 @@ type Text struct {
Width float32
Height float32
- String string
+ String string
+ CharSpacing []float32
}
func (t *Text) GetLength() int {
@@ -360,6 +371,37 @@ func (t *Text) setDataPosition(lowerLeft Point) (err error) {
return
}
+// PrintCharSpacing is used for debugging
+func (t *Text) PrintCharSpacing() {
+ fmt.Printf("\n%s:\n", t.String)
+ at := t.X1.X
+ for i, cs := range t.CharSpacing {
+ at = cs + at
+ fmt.Printf("%c %.2f ", t.String[i], at)
+ }
+}
+
+// ClickedCharacter should only be called after a bounding box hit is confirmed because
+// it does not check y-axis values at all. Returns the index and side of the char clicked.
+func (t *Text) ClickedCharacter(xPos float64) (index int, side CharacterSide) {
+ // transform from screen coordinates to... window coordinates?
+ xPos = xPos - float64(t.font.WindowWidth/2)
+
+ // could do a binary search...
+ at := float64(t.X1.X)
+ for i, cs := range t.CharSpacing {
+ at = float64(cs) + at
+ if xPos <= at && xPos > at-float64(cs) {
+ if xPos-(at-float64(cs)) > float64(cs)/2 {
+ return i, CSRight
+ } else {
+ return i, CSLeft
+ }
+ }
+ }
+ return -1, CSUnknown
+}
+
func (t *Text) HasRune(r rune) bool {
for _, runes := range t.font.Config.RuneRanges {
if r >= runes.Low && r <= runes.High {
@@ -377,6 +419,8 @@ func (t *Text) makeBufferData(indices []rune) {
eboIndex := 0
lineX := float32(0)
eboOffset := int32(0)
+
+ t.CharSpacing = make([]float32, 0)
for i, r := range indices {
glyphIndex := t.font.Config.RuneRanges.GetGlyphIndex(r)
if glyphIndex >= 0 {
@@ -388,6 +432,9 @@ func (t *Text) makeBufferData(indices []rune) {
vw := float32(glyphs[glyphIndex].Width)
vh := float32(glyphs[glyphIndex].Height)
+ // used to determine which character inside of the text was clicked
+ t.CharSpacing = append(t.CharSpacing, vw)
+
// variable width characters will produce a bounding box that is just
// a bit too long on the right-hand side unless we trim off the excess
// when processing the right-most character
diff --git a/text_test.go b/text_test.go
index <HASH>..<HASH> 100644
--- a/text_test.go
+++ b/text_test.go
@@ -27,3 +27,31 @@ func TestHasRune(t *testing.T) {
t.Error("Should not have 41.")
}
}
+
+// TestClickedCharacter tests a hypothetical string of length 3 with variable width chars
+func TestClickedCharacter(t *testing.T) {
+ text := &Text{}
+ text.font = &Font{}
+ text.font.WindowWidth = 100
+ text.X1.X = -20
+ text.String = "ABC"
+
+ // click was just around the middle of the screen
+ xPos := float64(51)
+
+ // -20 to -10 is A
+ // -10 to +10 is B
+ // +10 to +20 is C
+ text.CharSpacing = make([]float32, 0)
+ text.CharSpacing = append(text.CharSpacing, 10)
+ text.CharSpacing = append(text.CharSpacing, 20)
+ text.CharSpacing = append(text.CharSpacing, 10)
+
+ index, side := text.ClickedCharacter(xPos)
+ if index != 1 {
+ t.Error("Expecting index 1")
+ }
+ if side != CSRight {
+ t.Error("Expecting right side click")
+ }
+} | Adding methods for detecting where in the string of a text object it is
that the user clicked. | 4ydx_gltext | train |
91548f76bef02801ab3c4ad86a8c6363155e1a00 | diff --git a/code/MSSQLDatabase.php b/code/MSSQLDatabase.php
index <HASH>..<HASH> 100644
--- a/code/MSSQLDatabase.php
+++ b/code/MSSQLDatabase.php
@@ -1236,7 +1236,7 @@ class MSSQLDatabase extends SS_Database {
// just use the function directly in the order by instead of the alias
$selects = $query->getSelect();
foreach($orderby as $field => $dir) {
- if(preg_match('/SortColumn/', $field)) {
+ if(preg_match('/_SortColumn/', $field)) {
unset($orderby[$field]);
$orderby[$selects[str_replace('"', '', $field)]] = $dir;
} | Checking for underscore in SortColumn just to be sure | silverstripe_silverstripe-mssql | train |
4cbbae8aae07e8fd529f826ed238a0687eb7d8c8 | diff --git a/PhpUnit/AbstractContainerBuilderTestCase.php b/PhpUnit/AbstractContainerBuilderTestCase.php
index <HASH>..<HASH> 100644
--- a/PhpUnit/AbstractContainerBuilderTestCase.php
+++ b/PhpUnit/AbstractContainerBuilderTestCase.php
@@ -87,6 +87,20 @@ abstract class AbstractContainerBuilderTestCase extends \PHPUnit_Framework_TestC
}
/**
+ * Assert that the ContainerBuilder for this test does not have a service definition with the given id.
+ *
+ * @param $serviceId
+ */
+ protected function assertContainerBuilderNotHasService($serviceId)
+ {
+ self::assertThat(
+ $this->container,
+ new \PHPUnit_Framework_Constraint_Not(new ContainerBuilderHasServiceDefinitionConstraint($serviceId, null, false))
+ );
+ }
+
+
+ /**
* Assert that the ContainerBuilder for this test has a synthetic service with the given id.
*
* @param $serviceId
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -293,6 +293,8 @@ These are the available semantic assertions for each of the test cases shown abo
<dd>Assert that the <code>ContainerBuilder</code> for this test has a service definition with the given id.</dd>
<dt><code>assertContainerBuilderHasService($serviceId, $expectedClass)</code></dt>
<dd>Assert that the <code>ContainerBuilder</code> for this test has a service definition with the given id and class.</dd>
+<dt><code>assertContainerBuilderNotHasService($serviceId)</code></dt>
+<dd>Assert that the <code>ContainerBuilder</code> for this test does not have a service definition with the given id.</dd>
<dt><code>assertContainerBuilderHasSyntheticService($serviceId)</code></dt>
<dd>Assert that the <code>ContainerBuilder</code> for this test has a synthetic service with the given id.</dd>
<dt><code>assertContainerBuilderHasAlias($aliasId)</code></dt>
diff --git a/Tests/PhpUnit/AbstractExtensionTestCaseTest.php b/Tests/PhpUnit/AbstractExtensionTestCaseTest.php
index <HASH>..<HASH> 100644
--- a/Tests/PhpUnit/AbstractExtensionTestCaseTest.php
+++ b/Tests/PhpUnit/AbstractExtensionTestCaseTest.php
@@ -237,4 +237,26 @@ class AbstractExtensionTestCaseTest extends AbstractExtensionTestCase
array('a wrong argument')
);
}
+
+ /**
+ * @test
+ */
+ public function if_service_is_defined_it_fails()
+ {
+ $this->load();
+
+ $this->setExpectedException('\PHPUnit_Framework_ExpectationFailedException');
+
+ $this->assertContainerBuilderNotHasService('loaded_service_id');
+ }
+
+ /**
+ * @test
+ */
+ public function if_service_is_not_defined_it_does_not_fail()
+ {
+ $this->load();
+
+ $this->assertContainerBuilderNotHasService('undefined');
+ }
} | add assertContainerBuilderNotHasService to test that a service is not defined | SymfonyTest_SymfonyDependencyInjectionTest | train |
bc81a0a9f2b2a0960948892e4d5d34008af5d74b | diff --git a/src/qtism/cli/Cli.php b/src/qtism/cli/Cli.php
index <HASH>..<HASH> 100644
--- a/src/qtism/cli/Cli.php
+++ b/src/qtism/cli/Cli.php
@@ -252,6 +252,6 @@ abstract class Cli
protected function isVerbose()
{
$arguments = $this->getArguments();
- return $this->arguments['verbose'];
+ return $this->arguments['verbose'] === true;
}
}
\ No newline at end of file
diff --git a/src/qtism/cli/Render.php b/src/qtism/cli/Render.php
index <HASH>..<HASH> 100644
--- a/src/qtism/cli/Render.php
+++ b/src/qtism/cli/Render.php
@@ -46,6 +46,7 @@ class Render extends Cli
{
$arguments = new Arguments(array('strict' => false));
+ // -- Options
// Flavour option.
$arguments->addOption(
array('flavour', 'f'),
@@ -63,6 +64,13 @@ class Render extends Cli
)
);
+ // -- Flags
+ // Document option.
+ $arguments->addFlag(
+ array('document', 'd'),
+ 'Embed the rendering into a document.'
+ );
+
return $arguments;
}
@@ -134,12 +142,10 @@ class Render extends Cli
/**
* Run the rendering behaviour related to the "aQTI" flavour.
*
- * @param AqtiRenderingEngine $engine
+ * @param \qtism\runtime\rendering\markup\aqti\AqtiRenderingEngine $renderer
*/
- private function runAqti(AqtiRenderingEngine $engine) {
+ private function runAqti(AqtiRenderingEngine $renderer) {
$arguments = $this->getArguments();
-
- $renderer = $this->instantiateEngine();
$source = $arguments['source'];
$profile = $arguments['flavour'];
@@ -149,11 +155,17 @@ class Render extends Cli
$xml = $renderer->render($doc->getDocumentComponent());
$xml->formatOutput = true;
- $header = "<!doctype html>\n";
+ $header = "";
+ $footer = "";
+
+ if ($arguments['document'] === true) {
+ $header .= "<!doctype html>\n";
+ }
+
$xpath = new DOMXPath($xml);
$assessmentItemElts = $xpath->query("//div[contains(@class, 'qti-assessmentItem')]");
- if ($assessmentItemElts->length > 0) {
+ if ($assessmentItemElts->length > 0 && $arguments['document'] === true) {
$htmlAttributes = array();
// Take the content of <assessmentItem> and put it into <html>.
@@ -181,7 +193,11 @@ class Render extends Cli
$body = $xml->saveXml($xml->documentElement) . "\n";
}
- $footer = "</html>\n";
+ if ($arguments['document'] === true) {
+ $footer = "</html>\n";
+ }
+ } else {
+ $body = $xml->saveXml($xml->documentElement) . "\n";
}
$this->out("{$header}{$body}{$footer}", false);
@@ -190,12 +206,10 @@ class Render extends Cli
/**
* Run the rendering behaviour related to the "XHTML" flavour.
*
- * @param XhtmlRenderingEngine $engine
+ * @param \qtism\runtime\rendering\markup\xhtml\XhtmlRenderingEngine $renderer
*/
- private function runXhtml(XhtmlRenderingEngine $engine) {
+ private function runXhtml(XhtmlRenderingEngine $renderer) {
$arguments = $this->getArguments();
-
- $renderer = $this->instantiateEngine();
$source = $arguments['source'];
$profile = $arguments['flavour'];
@@ -205,21 +219,33 @@ class Render extends Cli
$xml = $renderer->render($doc->getDocumentComponent());
$xml->formatOutput = true;
- $header = "<!doctype html>\n";
- $header .= "<html>\n";
- $header .= "<head>\n";
- $header .= "<meta charset=\"utf-8\">\n";
- $header .= "</head>\n";
- $header .= "<body>\n";
-
- $footer = "</body>\n";
- $footer .= "</html>\n";
+ $header = "";
+ $footer = "";
+
+ if ($arguments['document'] === true) {
+ $header .= "<!doctype html>\n";
+ $header .= "<html>\n";
+ $header .= "<head>\n";
+ $header .= "<meta charset=\"utf-8\">\n";
+ $header .= "</head>\n";
+ $header .= "<body>\n";
+
+ $footer = "</body>\n";
+ $footer .= "</html>\n";
+ }
$body = $xml->saveXml($xml->documentElement) . "\n";
-
$this->out("{$header}{$body}{$footer}", false);
}
+ /**
+ * Instantiate an appropriate Rendering Engine.
+ *
+ * The instantiated Rendering Engine implementation will depend on the "flavour"
+ * CLI argument.
+ *
+ * @return \qtism\runtime\rendering\markup\AbstractMarkupRenderingEngine
+ */
private function instantiateEngine() {
$arguments = $this->getArguments();
switch (strtolower($arguments['flavour'])) { | -d flag in qtisdk render binary to embed (or not) rendering in a doc. | oat-sa_qti-sdk | train |
f9e07770bd9b5142de517b05a85ffbb42d1895da | diff --git a/property.go b/property.go
index <HASH>..<HASH> 100644
--- a/property.go
+++ b/property.go
@@ -122,7 +122,6 @@ func toPropertyDescriptor(value Value) (descriptor _property) {
}
if objectDescriptor.hasProperty("writable") {
- descriptor.value = UndefinedValue() // FIXME Is this the right place for this?
if objectDescriptor.get("writable").toBoolean() {
descriptor.writeOn()
} else { | [[Value]] should NOT be set to Undefined if missing | robertkrimen_otto | train |
644e7098f63c1787d9b0af46b939541aed07356f | diff --git a/structr-ui/src/main/resources/structr/css/main.css b/structr-ui/src/main/resources/structr/css/main.css
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/resources/structr/css/main.css
+++ b/structr-ui/src/main/resources/structr/css/main.css
@@ -4484,12 +4484,11 @@ div.attr-mapping td.transform {
}
div.attr-mapping td.transform input {
- width: 99%;
+ width: calc(100% - 10px);
}
div.attr-mapping select.attr-mapping {
- width: calc(100% - 5px);
- margin-left: 5px;
+ width: 100%;
}
div#xml-import {
@@ -4541,7 +4540,7 @@ div#xml-config label {
font-weight: bold;
}
-div#xml-config select {
+div#xml-config select.xml-config-select {
width: 45%;
}
diff --git a/structr-ui/src/main/resources/structr/js/importer.js b/structr-ui/src/main/resources/structr/js/importer.js
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/resources/structr/js/importer.js
+++ b/structr-ui/src/main/resources/structr/js/importer.js
@@ -360,7 +360,8 @@ var Importer = {
'<tr><td data-name="' + key + '" data-level="' + level + '"' +
' class="xml-mapping" id="' + localPath + '"' +
' style="padding-left: ' + (level * 30) + 'px;">' +
- '⯈ ' + key + '</td></tr>'
+ _Icons.getHtmlForIcon(_Icons.collapsed_icon) +
+ ' ' + key + '</td></tr>'
);
$('#' + localPath).on('click', function() {
@@ -374,7 +375,7 @@ var Importer = {
var config = $('#config');
config.append('<label>Select action:</label>');
- config.append('<select id="action-select"></select>');
+ config.append('<select id="action-select" class="xml-config-select"></select>');
var action = $('#action-select');
action.append('<option value="">Skip</option>');
@@ -502,7 +503,7 @@ var Importer = {
}
el.append('<label>Select type:</label>');
- el.append('<select id="type-select"><option>-- select --</option></select>');
+ el.append('<select id="type-select" class="xml-config-select"><option>-- select --</option></select>');
if (!isRoot) {
el.append('<div id="non-root-options"></div>');
@@ -546,7 +547,7 @@ var Importer = {
nonRoot.empty();
nonRoot.append('<label>Select property name:</label>');
- nonRoot.append('<select id="name-select"></select>');
+ nonRoot.append('<select id="name-select" class="xml-config-select"></select>');
var nameSelect = $('#name-select');
@@ -639,7 +640,7 @@ var Importer = {
} else {
el.append('<label>Select property for text content:</label>');
- el.append('<select id="text-select"><option value="">--- ignore ---</option></select>');
+ el.append('<select id="text-select" class="xml-config-select"><option value="">--- ignore ---</option></select>');
var textSelect = $('#text-select');
var typeConfig = configuration[key];
@@ -723,9 +724,9 @@ var Importer = {
var elem = $('td.xml-mapping[data-name="' + key + '"]');
elem.empty();
if (value && value.length) {
- elem.append('<b>⯈ ' + value + '</b>');
+ elem.append('<b>' + _Icons.getHtmlForIcon(_Icons.collapsed_icon) + ' ' + value + '</b>');
} else {
- elem.append('⯈ ' + key);
+ elem.append(_Icons.getHtmlForIcon(_Icons.collapsed_icon) + ' ' + key);
}
}
};
\ No newline at end of file
diff --git a/structr-ui/src/main/resources/structr/js/init.js b/structr-ui/src/main/resources/structr/js/init.js
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/resources/structr/js/init.js
+++ b/structr-ui/src/main/resources/structr/js/init.js
@@ -490,6 +490,10 @@ var _Icons = {
default:
return _Icons.edition_community_icon;
}
+ },
+
+ getHtmlForIcon: function (icon) {
+ return '<i class="' + _Icons.getFullSpriteClass(icon) + '" />';
}
}; | UI Improvements in the XML import dialogue | structr_structr | train |
a382f538278015738637d2e92694d8cbaf702a07 | diff --git a/tests/test_config.py b/tests/test_config.py
index <HASH>..<HASH> 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -3,6 +3,7 @@ from tempfile import NamedTemporaryFile, gettempdir
import pytest
from uwsgiconf.config import Section, Configuration
+from uwsgiconf.presets.nice import Section as NiceSection
from uwsgiconf.exceptions import ConfigurationError
@@ -173,3 +174,11 @@ def test_configuration(capsys, assert_lines):
assert 'ini = :another' in Configuration([s1, s2], autoinclude_sections=True).format()
assert Configuration([s1, s2]).print_ini()
+
+
+def test_args_formatter(capsys, assert_lines):
+
+ formatted = NiceSection().as_configuration().format(formatter='args')
+ assert '--master' in formatted
+ assert 'true' not in formatted # no values for bools
+ assert '%k' not in formatted # no config vars support for CLI
diff --git a/uwsgiconf/formatters.py b/uwsgiconf/formatters.py
index <HASH>..<HASH> 100644
--- a/uwsgiconf/formatters.py
+++ b/uwsgiconf/formatters.py
@@ -69,6 +69,8 @@ def format_print_text(text, color_fg=None, color_bg=None):
class FormatterBase(object):
"""Base class for configuration formatters."""
+ alias = None
+
def __init__(self, sections):
self.sections = sections
@@ -86,6 +88,8 @@ class FormatterBase(object):
class IniFormatter(FormatterBase):
"""Translates a configuration as INI file."""
+ alias = 'ini'
+
def format(self):
lines = []
last_section = ''
@@ -100,3 +104,36 @@ class IniFormatter(FormatterBase):
lines = '\n'.join(lines)
return lines
+
+
+class ArgsFormatter(FormatterBase):
+ """Translates a configuration to command line arguments."""
+
+ alias = 'args'
+
+ def format(self):
+ lines = []
+
+ for section_name, key, value in self.iter_options():
+
+ if section_name == 'uwsgi':
+ value = str(value).strip()
+
+ if value == 'true':
+ lines.append('--%s' % key)
+
+ elif value.startswith('%'):
+ # No config var support is available in command line.
+ continue
+
+ else:
+ lines.extend(['--%s' % key, '%s' % value])
+
+ return lines
+
+
+FORMATTERS = {formatter.alias: formatter for formatter in (
+ ArgsFormatter,
+ IniFormatter,
+)}
+"""Available formatters by alias.""" | Added ArgsFormatter (repesent options as command line args). | idlesign_uwsgiconf | train |
f21f6658ddf12c7c47af33e41e2717f7ab49f550 | diff --git a/lib/airbrake-ruby/config.rb b/lib/airbrake-ruby/config.rb
index <HASH>..<HASH> 100644
--- a/lib/airbrake-ruby/config.rb
+++ b/lib/airbrake-ruby/config.rb
@@ -96,9 +96,9 @@ module Airbrake
self.blacklist_keys = []
self.whitelist_keys = []
- self.root_directory = (
+ self.root_directory = File.realpath(
(defined?(Bundler) && Bundler.root) ||
- File.expand_path(Dir.pwd)
+ Dir.pwd
)
merge(user_config)
diff --git a/spec/config_spec.rb b/spec/config_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/config_spec.rb
+++ b/spec/config_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Airbrake::Config do
end
it "sets the default root_directory" do
- expect(config.root_directory).to eq Bundler.root
+ expect(config.root_directory).to eq Bundler.root.realpath.to_s
end
it "doesn't set the default environment" do | resolve symlinked roots since that is what is shown in backtraces | airbrake_airbrake-ruby | train |
ba2efa90ea2d40ac6415e95eaecfe8cbadd3871c | diff --git a/glue/segmentdb/segmentdb_utils.py b/glue/segmentdb/segmentdb_utils.py
index <HASH>..<HASH> 100644
--- a/glue/segmentdb/segmentdb_utils.py
+++ b/glue/segmentdb/segmentdb_utils.py
@@ -33,6 +33,10 @@ def get_all_files_in_range(dirname, starttime, endtime):
ret = []
+ # Maybe the user just wants one file...
+ if os.path.isfile(dirname):
+ return [dirname]
+
first_four_start = starttime / 100000
first_four_end = endtime / 100000 | Allow the segment-url to be a file or a directory | gwastro_pycbc-glue | train |
89d1ab3518789966f4c8b73a959eb9f0c9f5175c | diff --git a/Module.php b/Module.php
index <HASH>..<HASH> 100644
--- a/Module.php
+++ b/Module.php
@@ -20,10 +20,10 @@ class Module
*
* @return void
*/
- public function init()
+ public function init($e)
{
- $events = StaticEventManager::getInstance();
- $events->attach('Zend\Mvc\Application', 'finish', array($this, 'cache'));
+ $events = $e->getEventManager()->getSharedManager();
+ $events->attach('application', 'finish', array($this, 'cache'));
}
/**
@@ -34,7 +34,7 @@ class Module
*/
public function cache($e)
{
- if (!$e->getRequest()->query()->get('buildCache')) {
+ if (!$e->getRequest()->getQuery()->get('buildCache')) {
return;
} | Upate for ZF2 api changes | EvanDotPro_EdpSuperluminal | train |
51d6de498d3b245d515254465ad38b2d06fd0fa9 | diff --git a/grok.go b/grok.go
index <HASH>..<HASH> 100644
--- a/grok.go
+++ b/grok.go
@@ -113,6 +113,24 @@ func (g *Grok) Parse(pattern string, text string) (map[string]string, error) {
return captures, nil
}
+// ParseToMultiMap works just like Parse, except that it allows to map multiple values to the same capture name.
+func (g* Grok) ParseToMultiMap(pattern string, text string) (map[string][]string, error) {
+ captures := make(map[string][]string)
+ cr, err := g.compile(pattern)
+ if err != nil {
+ return nil, err
+ }
+
+ match := cr.FindStringSubmatch(text)
+ for i, name := range cr.SubexpNames() {
+ if len(match) > 0 {
+ captures[name] = append(captures[name], match[i])
+ }
+ }
+
+ return captures, nil
+}
+
// AddPatternsFromPath loads grok patterns from a file or files from a directory
func (g *Grok) AddPatternsFromPath(path string) error {
diff --git a/grok_test.go b/grok_test.go
index <HASH>..<HASH> 100644
--- a/grok_test.go
+++ b/grok_test.go
@@ -125,6 +125,24 @@ func TestParse(t *testing.T) {
}
}
+func TestParseToMultiMap(t *testing.T) {
+ g := New()
+ g.AddPatternsFromPath("./patterns")
+ res, _ := g.ParseToMultiMap("%{DAY} %{DAY} %{DAY}", "Tue Wed Fri")
+ if len(res["DAY"]) != 3 {
+ t.Fatalf("DAY should be an array of 3 elements, but is '%s'", res["DAY"])
+ }
+ if res["DAY"][0] != "Tue" {
+ t.Fatalf("DAY[0] should be 'Tue' have '%s'", res["DAY"][0])
+ }
+ if res["DAY"][1] != "Wed" {
+ t.Fatalf("DAY[1] should be 'Wed' have '%s'", res["DAY"][1])
+ }
+ if res["DAY"][2] != "Fri" {
+ t.Fatalf("DAY[2] should be 'Fri' have '%s'", res["DAY"][2])
+ }
+}
+
func TestCaptures(t *testing.T) {
g := New()
g.AddPatternsFromPath("./patterns") | added g.ParseToMultiMap | vjeantet_grok | train |
7b4f6ad241a2d01229e038dfc0d2548638f7f273 | diff --git a/lib/sensu/server.rb b/lib/sensu/server.rb
index <HASH>..<HASH> 100644
--- a/lib/sensu/server.rb
+++ b/lib/sensu/server.rb
@@ -108,23 +108,14 @@ module Sensu
if check['type'] == 'metric'
handle_event(event)
else
- if check['status'] == 0
- @redis.hexists('events:' + client['name'], check['name']).callback do |exists|
- if exists == 1
- @redis.hdel('events:' + client['name'], check['name'])
- event['action'] = 'resolve'
- handle_event(event)
- end
- end
- else
- @redis.hget('events:' + client['name'], check['name']).callback do |event_json|
- occurrences = 1
- unless event_json.nil?
- previous_event = JSON.parse(event_json)
- if previous_event['status'] == check['status']
- occurrences = previous_event['occurrences'] += 1
- end
- end
+ @redis.hget('events:' + client['name'], check['name']).callback do |event_json|
+ previous_event = event_json ? JSON.parse(event_json) : nil
+ if previous_event && check['status'] == 0
+ @redis.hdel('events:' + client['name'], check['name'])
+ event['action'] = 'resolve'
+ handle_event(event)
+ else
+ occurrences = previous_event ? previous_event['occurrences'] += 1 : 1
@redis.hset('events:' + client['name'], check['name'], {'status' => check['status'], 'output' => check['output'], 'occurrences' => occurrences}.to_json).callback do
event['occurrences'] = occurrences
event['action'] = 'create' | [occurrence-and-issued] setup_results, a little refactoring | sensu_sensu | train |
Subsets and Splits