hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
317cb41ff33557a64d132253a2830440603c67c2 | diff --git a/languagetool-language-modules/en/src/main/java/org/languagetool/rules/en/EnglishUnpairedBracketsRule.java b/languagetool-language-modules/en/src/main/java/org/languagetool/rules/en/EnglishUnpairedBracketsRule.java
index <HASH>..<HASH> 100644
--- a/languagetool-language-modules/en/src/main/java/org/languagetool/rules/en/EnglishUnpairedBracketsRule.java
+++ b/languagetool-language-modules/en/src/main/java/org/languagetool/rules/en/EnglishUnpairedBracketsRule.java
@@ -48,7 +48,7 @@ public class EnglishUnpairedBracketsRule extends GenericUnpairedBracketsRule {
public EnglishUnpairedBracketsRule(ResourceBundle messages, Language language) {
super(messages, EN_START_SYMBOLS, EN_END_SYMBOLS);
- addExamplePair(Example.wrong("<marker>\"</marker>I'm over here, she said."),
+ addExamplePair(Example.wrong("\"I'm over here,<marker></marker> she said."),
Example.fixed("\"I'm over here,<marker>\"</marker> she said."));
} | [en] change example sentence markup (#<I>) | languagetool-org_languagetool | train |
a4d001709d38ca39069fa4f4b472953052ff4f87 | diff --git a/spice.py b/spice.py
index <HASH>..<HASH> 100644
--- a/spice.py
+++ b/spice.py
@@ -24,7 +24,7 @@ def search(query, medium):
elif medium == MANGA:
return [Manga(entry) for entry in results.manga.findAll('entry')]
else:
- return
+ return None
def search_id(id, medium):
scrape_query = helpers.get_scrape_url(id, medium) | Change search invalid argument return to None
Forgot this. | Utagai_spice | train |
c725579ce933d30022699c3228bb5275be1581d2 | diff --git a/lib/js-yaml/core.js b/lib/js-yaml/core.js
index <HASH>..<HASH> 100644
--- a/lib/js-yaml/core.js
+++ b/lib/js-yaml/core.js
@@ -8,20 +8,87 @@ var $$ = module.exports = {};
////////////////////////////////////////////////////////////////////////////////
+// merges two or more objects
+$$.merge = function merge(receiver) {
+ var i, l, key;
+
+ receiver = receiver || {};
+
+ for (i = 1, l = arguments.length; i < l; i++) {
+ if ('object' === typeof arguments[i]) {
+ for (key in arguments[i]) {
+ if (arguments[i].hasOwnProperty(key)) {
+ receiver[key] = arguments[i][key];
+ }
+ }
+ }
+ }
+
+ return receiver;
+};
+
+
+// simple inheritance algorithm
+$$.inherits = function inherits(child, parent) {
+ $$.merge(child.prototype, parent.prototype);
+ $$.merge(child, parent);
+
+ child.__parent__ = parent;
+};
+
+
+// wrapper for instanceof that allows to check inheritance after $$.inherits
+$$.isInstanceOf = function isInstanceOf(obj, klass) {
+ var parent;
+
+ if (obj instanceof klass) {
+ return true;
+ }
+
+ if (!!obj && !!obj.constructor) {
+ parent = obj.constructor.__parent__;
+ return (parent === klass || $$.isInstanceOf(parent, klass));
+ }
+
+ return false;
+};
+
+
// returns object with exported properties of all required modules
// example: var __ = $$.import('errors', 'nodes');
$$.import = function import_modules() {
- var box = {}, i, each;
+ var box = {}, i, mod;
- each = function (src) {
- var mod = require('./' + src);
+ for (i = 0; i < arguments.length; i++) {
+ mod = require('./' + arguments[i]);
Object.getOwnPropertyNames(mod).forEach(function (prop) {
box[prop] = mod[prop];
});
- };
+ }
+
+ return box;
+};
+
+
+// returns object with key-value pairs, where keys are names of given functions
+// and values are functions themselves.
+//
+// ##### example
+//
+// function foo() {};
+// function bar() {};
+//
+// module.exports = $$.expose(foo, bar);
+//
+// module.exports.foo === foo;
+// module.exports.bar === bar;
+$$.expose = function expose_functions() {
+ var box = {}, i;
for (i = 0; i < arguments.length; i++) {
- each(arguments[i]);
+ if ('function' === typeof arguments[i] && !!arguments[i].name) {
+ box[arguments[i].name] = arguments[i];
+ }
}
return box;
@@ -209,6 +276,17 @@ $$.Hash = function Hash(defaultValue) {
};
+// Simple implementation of classes for JS
+$$.Class = function Class(name, methods) {
+
+ var constructor = function constructor() {
+ var instance = (this instanceof constructor) ? this : new constructor();
+ methods.__init__([instance].concat(arguments));
+ return instance;
+ };
+};
+
+
////////////////////////////////////////////////////////////////////////////////
// vim:ts=2:sw=2
//////////////////////////////////////////////////////////////////////////////// | Added helpers to replace JS.Class | nodeca_js-yaml | train |
0ba26f6b0b5938fed2f600d3f0b79992c9a6548a | diff --git a/src/AutobahnPHP/Dealer.php b/src/AutobahnPHP/Dealer.php
index <HASH>..<HASH> 100644
--- a/src/AutobahnPHP/Dealer.php
+++ b/src/AutobahnPHP/Dealer.php
@@ -15,6 +15,7 @@ use AutobahnPHP\Message\ErrorMessage;
use AutobahnPHP\Message\Message;
use AutobahnPHP\Message\RegisteredMessage;
use AutobahnPHP\Message\RegisterMessage;
+use AutobahnPHP\Message\UnregisteredMessage;
use AutobahnPHP\Message\UnregisterMessage;
use AutobahnPHP\Message\YieldMessage;
@@ -91,6 +92,23 @@ class Dealer extends AbstractRole
public function unregister(Session $session, UnregisterMessage $msg)
{
+ //find the procedure by request id
+ $this->registrations->rewind();
+ while ($this->registrations->valid()) {
+ $registration = $this->registrations->current();
+ if ($registration->getId() == $msg->getRegistrationId()) {
+ $this->registrations->next();
+ echo 'Unegistered: ' . $registration->getProcedureName();
+ $this->registrations->detach($registration);
+
+ return new UnregisteredMessage($msg->getRequestId());
+ }
+ }
+
+ $errorMsg = ErrorMessage::createErrorMessageFromMessage($msg);
+ echo 'No registration: ' . $msg->getRegistrationId();
+
+ return $errorMsg->setErrorURI('wamp.error.no_such_registration');
}
diff --git a/src/AutobahnPHP/Message/UnregisterMessage.php b/src/AutobahnPHP/Message/UnregisterMessage.php
index <HASH>..<HASH> 100644
--- a/src/AutobahnPHP/Message/UnregisterMessage.php
+++ b/src/AutobahnPHP/Message/UnregisterMessage.php
@@ -9,13 +9,28 @@
namespace AutobahnPHP\Message;
-class UnregisterMessage extends Message {
+use Voryx\Wamp2\Wamp2Connection;
+
+class UnregisterMessage extends Message
+{
+
+ private $requestId;
+
+ private $registrationId;
+
+ function __construct($requestId, $registrationId)
+ {
+ $this->registrationId = $registrationId;
+ $this->requestId = $requestId;
+ }
+
+
/**
* @return int
*/
public function getMsgCode()
{
- // TODO: Implement getMsgCode() method.
+ return static::MSG_UNREGISTER;
}
/**
@@ -26,7 +41,7 @@ class UnregisterMessage extends Message {
*/
public function getAdditionalMsgFields()
{
- // TODO: Implement getAdditionalMsgFields() method.
+ return array();
}
/**
@@ -34,7 +49,25 @@ class UnregisterMessage extends Message {
*/
public function getValidConnectionStates()
{
- // TODO: Implement getValidConnectionStates() method.
+ return array(Wamp2Connection::STATE_ESTABLISHED);
+ }
+
+ /**
+ * @return mixed
+ */
+ public function getRegistrationId()
+ {
+ return $this->registrationId;
+ }
+
+ /**
+ * @return mixed
+ */
+ public function getRequestId()
+ {
+ return $this->requestId;
}
+
+
}
\ No newline at end of file
diff --git a/src/AutobahnPHP/Message/UnregisteredMessage.php b/src/AutobahnPHP/Message/UnregisteredMessage.php
index <HASH>..<HASH> 100644
--- a/src/AutobahnPHP/Message/UnregisteredMessage.php
+++ b/src/AutobahnPHP/Message/UnregisteredMessage.php
@@ -9,14 +9,25 @@
namespace AutobahnPHP\Message;
-class UnregisteredMessage extends Message {
+use Voryx\Wamp2\Wamp2Connection;
+
+class UnregisteredMessage extends Message
+{
+
+ private $requestId;
+
+ function __construct($requestId)
+ {
+ $this->requestId = $requestId;
+ }
+
/**
* @return int
*/
public function getMsgCode()
{
- // TODO: Implement getMsgCode() method.
+ return static::MSG_UNREGISTERED;
}
/**
@@ -27,7 +38,7 @@ class UnregisteredMessage extends Message {
*/
public function getAdditionalMsgFields()
{
- // TODO: Implement getAdditionalMsgFields() method.
+ return array($this->getRequestId());
}
/**
@@ -35,6 +46,6 @@ class UnregisteredMessage extends Message {
*/
public function getValidConnectionStates()
{
- // TODO: Implement getValidConnectionStates() method.
+ return array(Wamp2Connection::STATE_ESTABLISHED);
}
}
\ No newline at end of file | Added unregistration to dealer - untested | voryx_Thruway | train |
3efb86751207414a9b1d3196a85bcb959128c15c | diff --git a/src/org/opencms/db/generic/CmsVfsDriver.java b/src/org/opencms/db/generic/CmsVfsDriver.java
index <HASH>..<HASH> 100644
--- a/src/org/opencms/db/generic/CmsVfsDriver.java
+++ b/src/org/opencms/db/generic/CmsVfsDriver.java
@@ -1,7 +1,7 @@
/*
* File : $Source: /alkacon/cvs/opencms/src/org/opencms/db/generic/CmsVfsDriver.java,v $
- * Date : $Date: 2003/10/06 15:51:01 $
- * Version: $Revision: 1.142 $
+ * Date : $Date: 2003/10/06 16:55:51 $
+ * Version: $Revision: 1.143 $
*
* This library is part of OpenCms -
* the Open Source Content Mananagement System
@@ -75,7 +75,7 @@ import source.org.apache.java.util.Configurations;
*
* @author Thomas Weckert ([email protected])
* @author Michael Emmerich ([email protected])
- * @version $Revision: 1.142 $ $Date: 2003/10/06 15:51:01 $
+ * @version $Revision: 1.143 $ $Date: 2003/10/06 16:55:51 $
* @since 5.1
*/
public class CmsVfsDriver extends Object implements I_CmsDriver, I_CmsVfsDriver {
@@ -554,12 +554,12 @@ public class CmsVfsDriver extends Object implements I_CmsDriver, I_CmsVfsDriver
}
// now try to write the backup propertydefinition as well
- try {
+ /*try {
m_driverManager.getBackupDriver().createBackupPropertyDefinition(name, resourcetype);
} catch (CmsException ex) {
// do nothing here
// an error is thrown if the propertydefnition is already existing in the backup tables
- }
+ }*/
return readPropertyDefinition(name, projectId, resourcetype);
}
@@ -2313,7 +2313,8 @@ public class CmsVfsDriver extends Object implements I_CmsDriver, I_CmsVfsDriver
if (propdef == null) {
// create the definition of the property optionally if it is missing
if (addDefinition) {
- propdef = createPropertyDefinition(meta, projectId, resourceType);
+ propdef = createPropertyDefinition(meta, projectId, resourceType);
+ m_driverManager.getBackupDriver().createBackupPropertyDefinition(meta,resourceType);
} else {
throw new CmsException("[" + this.getClass().getName() + ".writeProperty/1] " + meta, CmsException.C_NOT_FOUND);
} | Bugfix in creating Propertiydefintions: Backup values are written correctly | alkacon_opencms-core | train |
c18823b89f3bf48ebba694c6e719b60e9a101304 | diff --git a/lib/canned_soap/version.rb b/lib/canned_soap/version.rb
index <HASH>..<HASH> 100644
--- a/lib/canned_soap/version.rb
+++ b/lib/canned_soap/version.rb
@@ -1,3 +1,3 @@
module CannedSoap
- VERSION = "0.1.0"
+ VERSION = "0.1.1"
end | bumped to <I> | gknedo_canned_soap | train |
962541396bc8d0dad657921ef78bd938ca25ff56 | diff --git a/drivers/java/src/main/java/com/rethinkdb/gen/ast/MakeObj.java b/drivers/java/src/main/java/com/rethinkdb/gen/ast/MakeObj.java
index <HASH>..<HASH> 100644
--- a/drivers/java/src/main/java/com/rethinkdb/gen/ast/MakeObj.java
+++ b/drivers/java/src/main/java/com/rethinkdb/gen/ast/MakeObj.java
@@ -15,10 +15,12 @@ import com.rethinkdb.ast.ReqlAst;
public class MakeObj extends ReqlExpr {
-
public MakeObj(Object arg) {
this(new Arguments(arg), null);
}
+ public MakeObj(OptArgs opts){
+ this(new Arguments(), opts);
+ }
public MakeObj(Arguments args){
this(args, null);
}
@@ -29,7 +31,6 @@ public class MakeObj extends ReqlExpr {
super(termType, args, optargs);
}
-
public static MakeObj fromMap(java.util.Map<String, ReqlAst> map){
return new MakeObj(OptArgs.fromMap(map));
}
diff --git a/drivers/java/templates/ast/MakeObj.java b/drivers/java/templates/ast/MakeObj.java
index <HASH>..<HASH> 100644
--- a/drivers/java/templates/ast/MakeObj.java
+++ b/drivers/java/templates/ast/MakeObj.java
@@ -1,6 +1,22 @@
<%inherit file="../AstSubclass.java" />
-
-<%block name="special_methods">
+<%block name="constructors">\
+ public MakeObj(Object arg) {
+ this(new Arguments(arg), null);
+ }
+ public MakeObj(OptArgs opts){
+ this(new Arguments(), opts);
+ }
+ public MakeObj(Arguments args){
+ this(args, null);
+ }
+ public MakeObj(Arguments args, OptArgs optargs) {
+ this(TermType.MAKE_OBJ, args, optargs);
+ }
+ protected MakeObj(TermType termType, Arguments args, OptArgs optargs){
+ super(termType, args, optargs);
+ }
+</%block>
+<%block name="special_methods">\
public static MakeObj fromMap(java.util.Map<String, ReqlAst> map){
return new MakeObj(OptArgs.fromMap(map));
} | Fixed recursion in MakeObject | rethinkdb_rethinkdb | train |
b0f98940ccae620367093381d33e24108f651cb8 | diff --git a/src/JmesPath/Lexer.php b/src/JmesPath/Lexer.php
index <HASH>..<HASH> 100644
--- a/src/JmesPath/Lexer.php
+++ b/src/JmesPath/Lexer.php
@@ -250,8 +250,9 @@ class Lexer
$value = $primitiveMap[$primitives[$value]];
} elseif (strlen($value) == 0) {
$this->throwSyntax('Empty JSON literal', $this->pos - 2);
- } elseif (isset($decodeCharacters[$value[0]])) {
- // Only decode a JSON literal when the it isn't a string
+ } elseif (!isset($decodeCharacters[$value[0]])) {
+ $value = $this->decodeJson('"' . $value . '"');
+ } else {
$value = $this->decodeJson($value);
} | JSON decoding string literals without quotes to support unicode escapes, newlines, etc | jmespath_jmespath.php | train |
286be60a4faf5ffad9052a83a22c3bae097a1d58 | diff --git a/sievelib/managesieve.py b/sievelib/managesieve.py
index <HASH>..<HASH> 100644
--- a/sievelib/managesieve.py
+++ b/sievelib/managesieve.py
@@ -479,8 +479,7 @@ class Client(object):
:rtype: boolean
"""
try:
- self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.sock.connect((self.srvaddr, self.srvport))
+ self.sock = socket.create_connection((self.srvaddr, self.srvport))
self.sock.settimeout(Client.read_timeout)
except socket.error as msg:
raise Error("Connection to server failed: %s" % str(msg)) | provide support for non-IPv4 connections
Use socket.create_connection() when establishing the server connection.
This avoid hardcoding AF_INET and thus introduces support for IPv6 | tonioo_sievelib | train |
99912ed92635600eb6b03959c33638cb62f1bdca | diff --git a/activerecord/lib/active_record/relation.rb b/activerecord/lib/active_record/relation.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/relation.rb
+++ b/activerecord/lib/active_record/relation.rb
@@ -333,7 +333,7 @@ module ActiveRecord
# Please check unscoped if you want to remove all previous scopes (including
# the default_scope) during the execution of a block.
def scoping
- previous, klass.current_scope = klass.current_scope, self
+ previous, klass.current_scope = klass.current_scope(true), self
yield
ensure
klass.current_scope = previous
diff --git a/activerecord/lib/active_record/scoping.rb b/activerecord/lib/active_record/scoping.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/scoping.rb
+++ b/activerecord/lib/active_record/scoping.rb
@@ -10,8 +10,8 @@ module ActiveRecord
end
module ClassMethods
- def current_scope #:nodoc:
- ScopeRegistry.value_for(:current_scope, self)
+ def current_scope(skip_inherited_scope = false) # :nodoc:
+ ScopeRegistry.value_for(:current_scope, self, skip_inherited_scope)
end
def current_scope=(scope) #:nodoc:
@@ -75,8 +75,9 @@ module ActiveRecord
end
# Obtains the value for a given +scope_type+ and +model+.
- def value_for(scope_type, model)
+ def value_for(scope_type, model, skip_inherited_scope = false)
raise_invalid_scope_type!(scope_type)
+ return @registry[scope_type][model.name] if skip_inherited_scope
klass = model
base = model.base_class
while klass <= base
diff --git a/activerecord/test/cases/scoping/relation_scoping_test.rb b/activerecord/test/cases/scoping/relation_scoping_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/scoping/relation_scoping_test.rb
+++ b/activerecord/test/cases/scoping/relation_scoping_test.rb
@@ -229,6 +229,15 @@ class RelationScopingTest < ActiveRecord::TestCase
end
end
+ def test_scoping_is_correctly_restored
+ Comment.unscoped do
+ SpecialComment.unscoped.created
+ end
+
+ assert_nil Comment.current_scope
+ assert_nil SpecialComment.current_scope
+ end
+
def test_circular_joins_with_scoping_does_not_crash
posts = Post.joins(comments: :post).scoping do
Post.first(10) | Fix to scoping is correctly restored
This regression was caused by #<I>.
If STI subclass is using scoping in parent class scoping,
`current_scope` in subclass is never restored.
I fixed to restore `current_scope` to previous value correctly. | rails_rails | train |
6f820ec80c633e862550acfeed942c2db758093d | diff --git a/php/class-extension.php b/php/class-extension.php
index <HASH>..<HASH> 100644
--- a/php/class-extension.php
+++ b/php/class-extension.php
@@ -7,10 +7,6 @@ namespace Rarst\Meadow;
*/
class Extension extends \Twig_Extension implements \Twig_Extension_GlobalsInterface {
- public function getName() {
- return 'meadow';
- }
-
public function getFunctions() {
$options = array( | Removed deprecated getName method from extension. | Rarst_meadow | train |
b2d69bbed697f8d8049c6c9faf4266678f378de6 | diff --git a/senseapi.py b/senseapi.py
index <HASH>..<HASH> 100755
--- a/senseapi.py
+++ b/senseapi.py
@@ -159,12 +159,12 @@ class SenseAPI:
else:
# if any of the calls fails, we cannot be cannot be sure about the sensors in CommonSense
return None
-
+
if len(s) < 1000:
break
-
+
j += 1
-
+
return sensors
@@ -179,7 +179,7 @@ class SenseAPI:
@return (string) - sensor_id of sensor or None if not found
"""
- if device_type == None:
+ if device_type == None:
for sensor in sensors:
if sensor['name'] == sensor_name:
return sensor['id']
@@ -198,7 +198,7 @@ class SenseAPI:
heads.update(headers)
body = ''
http_url = url
- if self.__authentication__ == 'not_authenticated' and url == '/users.json' and method == 'POST':
+ if self.__authentication__ == 'not_authenticated' and (url == '/users.json' or url == '/users.json?disable_mail=1') and method == 'POST':
heads.update({"Content-type": "application/json", "Accept":"*"})
body = json.dumps(parameters)
elif self.__authentication__ == 'not_authenticated':
@@ -1060,6 +1060,21 @@ class SenseAPI:
self.__error__ = "api call unsuccessful"
return False
+ def UsersChangePassword (self, current_password, new_password):
+ """
+ Change the password for the current user
+
+ @param current_password (string) - md5 hash of the current password of the user
+ @param new_password (string) - md5 hash of the new password of the user (make sure to doublecheck!)
+
+ @return (bool) - Boolean indicating whether ChangePassword was successful.
+ """
+ if self.__SenseApiCall__('/change_password', "POST", {"current_password":current_password, "new_password":new_password}):
+ return True
+ else:
+ self.__error__ = "api call unsuccessful"
+ return False
+
def UsersDelete (self, user_id):
"""
Delete user.
@@ -1652,7 +1667,7 @@ class SenseAPI:
else:
self.__error__ = "api call unsuccessful"
return False
-
+
def DomainAddUserPost_Parameters(self):
return {'users': [{'id':'1'}]}
@@ -1753,6 +1768,13 @@ class SenseAPI:
else:
self.__error__ = "api call unsuccessful"
return False
+
+ def DataProcessorsPut(self, dataProcessorId, parameters):
+ if self.__SenseApiCall__('/dataprocessors/{id}.json'.format(id = dataProcessorId), 'PUT', parameters):
+ return True
+ else:
+ self.__error__ = "api call unsuccessful"
+ return False
#==================================
# N O N C L A S S M E T H O D S =
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,6 @@
from setuptools import setup
-setup(name='senseapi',
- version = '0.5.8',
+setup(name = 'senseapi',
+ version = '0.5.11',
packages = [''],
install_requires = ['oauth>=1.0.1'],
author = "Sense Observation Systems", | Added support for creating users without welcome email. Added call for changing current user's password | senseobservationsystems_commonsense-python-lib | train |
33e54539799961288f624e7e74b8a62773e6eaeb | diff --git a/cmd/smith/app/bundle_controller.go b/cmd/smith/app/bundle_controller.go
index <HASH>..<HASH> 100644
--- a/cmd/smith/app/bundle_controller.go
+++ b/cmd/smith/app/bundle_controller.go
@@ -136,6 +136,7 @@ func (c *BundleControllerConstructor) New(config *controller.Config, cctx *contr
// Controller
cntrlr := &bundlec.Controller{
Logger: config.Logger,
+ ReadyForWork: cctx.ReadyForWork,
BundleClient: config.SmithClient.SmithV1(),
BundleStore: bs,
SmartClient: config.SmartClient,
diff --git a/pkg/controller/bundlec/controller.go b/pkg/controller/bundlec/controller.go
index <HASH>..<HASH> 100644
--- a/pkg/controller/bundlec/controller.go
+++ b/pkg/controller/bundlec/controller.go
@@ -31,6 +31,7 @@ type Controller struct {
Logger *zap.Logger
+ ReadyForWork func()
BundleClient smithClient_v1.BundlesGetter
BundleStore BundleStore
SmartClient SmartClient
@@ -69,7 +70,7 @@ func (c *Controller) Prepare(crdInf cache.SharedIndexInformer, resourceInfs map[
}
// Run begins watching and syncing.
-// All informers must be synched before this method is invoked.
+// All informers must be synced before this method is invoked.
func (c *Controller) Run(ctx context.Context) {
defer c.wg.Wait()
defer c.crdContextCancel() // should be executed after stopping is set to true
@@ -82,5 +83,7 @@ func (c *Controller) Run(ctx context.Context) {
c.Logger.Info("Starting Bundle controller")
defer c.Logger.Info("Shutting down Bundle controller")
+ c.ReadyForWork()
+
<-ctx.Done()
}
diff --git a/pkg/controller/generic.go b/pkg/controller/generic.go
index <HASH>..<HASH> 100644
--- a/pkg/controller/generic.go
+++ b/pkg/controller/generic.go
@@ -42,8 +42,12 @@ func NewGeneric(config *Config, logger *zap.Logger, queue workqueue.RateLimiting
if _, ok := controllers[descr.Gvk]; ok {
return nil, errors.Errorf("duplicate controller for GVK %s", descr.Gvk)
}
+ readyForWork := make(chan struct{})
queueGvk := wq.NewQueueForGvk(descr.Gvk)
iface, err := constr.New(config, &Context{
+ ReadyForWork: func() {
+ close(readyForWork)
+ },
Informers: informers,
Controllers: controllers,
WorkQueue: queueGvk,
@@ -68,6 +72,7 @@ func NewGeneric(config *Config, logger *zap.Logger, queue workqueue.RateLimiting
holders[descr.Gvk] = ControllerHolder{
Cntrlr: iface,
ZapNameField: descr.ZapNameField,
+ ReadyForWork: readyForWork,
}
}
return &Generic{
@@ -98,11 +103,19 @@ func (g *Generic) Run(ctx context.Context) {
}
g.logger.Info("Informers synced")
- // Stage: start all controllers
+ // Stage: start all controllers then wait for them to signal ready for work
stage = stgr.NextStage()
for _, c := range g.Controllers {
stage.StartWithContext(c.Cntrlr.Run)
}
+ for gvk, c := range g.Controllers {
+ select {
+ case <-ctx.Done():
+ g.logger.Sugar().Infof("Was waiting for the controller for %s to become ready for processing", gvk)
+ return
+ case <-c.ReadyForWork:
+ }
+ }
// Stage: start workers
stage = stgr.NextStage()
@@ -117,4 +130,5 @@ func (g *Generic) Run(ctx context.Context) {
type ControllerHolder struct {
Cntrlr Interface
ZapNameField ZapNameField
+ ReadyForWork <-chan struct{}
}
diff --git a/pkg/controller/types.go b/pkg/controller/types.go
index <HASH>..<HASH> 100644
--- a/pkg/controller/types.go
+++ b/pkg/controller/types.go
@@ -73,8 +73,15 @@ type Config struct {
}
type Context struct {
+ // ReadyForWork is a function that the controller must call from its Run() method once it is ready to
+ // process work using it's Process() method. This should be used to delay processing while some initialization
+ // is being performed.
+ ReadyForWork func()
+ // Will contain all informers once Generic controller constructs all controllers.
+ // This is a read only field, must not be modified.
Informers map[schema.GroupVersionKind]cache.SharedIndexInformer
- // Will contain all controllers once Generic controller constructs them
+ // Will contain all controllers once Generic controller constructs them.
+ // This is a read only field, must not be modified.
Controllers map[schema.GroupVersionKind]Interface
WorkQueue WorkQueueProducer
} | Ability for controllers to do initialization before starting to do work | atlassian_smith | train |
c26572437151b657d783542a90c4d5515fc5109c | diff --git a/project_generator/tools/uvision.py b/project_generator/tools/uvision.py
index <HASH>..<HASH> 100644
--- a/project_generator/tools/uvision.py
+++ b/project_generator/tools/uvision.py
@@ -49,7 +49,15 @@ class uVisionDefinitions():
'Utilities': {
'Flash2': 'Segger\JL2CM3.dll',
},
- }
+ },
+ 'ulink-pro': {
+ 'TargetDlls': {
+ 'Driver': 'BIN\\ULP2CM3.dll',
+ },
+ 'Utilities': {
+ 'Flash2': 'BIN\\ULP2CM3.dll',
+ },
+ }
} | Add ULINK Pro to uvision tool
This patch adds ULINK Pro to the debuggers supported by uvision. | project-generator_project_generator | train |
aeced91ee1e441e82d3e45cb52a0f6bf7302051f | diff --git a/qiskit/_quantumprogram.py b/qiskit/_quantumprogram.py
index <HASH>..<HASH> 100644
--- a/qiskit/_quantumprogram.py
+++ b/qiskit/_quantumprogram.py
@@ -945,7 +945,7 @@ class QuantumProgram(object):
max_credits (int): the max credits to use 3, or 5
seed (int): the intial seed the simulatros use
qobj_id (str): identifier of the qobj.
- hpc (json): This will setup some parameter for
+ hpc (dict): This will setup some parameter for
ibmqx_hpc_qasm_simulator, using a JSON-like format like:
{
'multi_shot_optimization': Boolean,
diff --git a/test/python/test_quantumprogram.py b/test/python/test_quantumprogram.py
index <HASH>..<HASH> 100644
--- a/test/python/test_quantumprogram.py
+++ b/test/python/test_quantumprogram.py
@@ -1734,6 +1734,7 @@ class TestQuantumProgram(QiskitTestCase):
self.assertEqual(ex.message,
'Error waiting for Job results: Timeout after 0.01 seconds.')
+ @unittest.skipIf(TRAVIS_FORK_PULL_REQUEST, 'Travis fork pull request')
def test_hpc_parameter_is_correct(self):
"""Test for checking HPC parameter in compile() method.
It must be only used when the backend is ibmqx_hpc_qasm_simulator.
@@ -1757,6 +1758,7 @@ class TestQuantumProgram(QiskitTestCase):
'omp_num_threads': 16})
self.assertTrue(qobj)
+ @unittest.skipIf(TRAVIS_FORK_PULL_REQUEST, 'Travis fork pull request')
def test_hpc_parameter_is_incorrect(self):
"""Test for checking HPC parameter in compile() method.
It must be only used when the backend is ibmqx_hpc_qasm_simulator. | Fix hpc tests and adjust docstring | Qiskit_qiskit-terra | train |
13e8c8de64a910395e464adaf9eef7c015dfdca9 | diff --git a/request/request_test.go b/request/request_test.go
index <HASH>..<HASH> 100644
--- a/request/request_test.go
+++ b/request/request_test.go
@@ -122,6 +122,7 @@ func TestRequestSend(t *testing.T) {
"url": "http://docs.qingcloud.com/object_storage/api/object/get.html"
}`
httpResponse.Body = ioutil.NopCloser(bytes.NewReader([]byte(responseString)))
+ httpResponse.ContentLength = int64(len(responseString))
assert.Nil(t, err)
r.HTTPResponse = httpResponse
diff --git a/request/response/unpacker.go b/request/response/unpacker.go
index <HASH>..<HASH> 100644
--- a/request/response/unpacker.go
+++ b/request/response/unpacker.go
@@ -262,13 +262,17 @@ func (b *unpacker) parseError() error {
return nil
}
+ qsError := &errors.QingStorError{
+ StatusCode: b.resp.StatusCode,
+ RequestID: b.resp.Header.Get(http.CanonicalHeaderKey("X-QS-Request-ID")),
+ }
+
// QingStor nginx could refuse user's request directly and only return status code.
- // We should handle this and build a qingstor error with message.
+ // We should handle this and return qsError directly.
+ if b.resp.ContentLength <= 0 {
+ return qsError
+ }
if !strings.Contains(b.resp.Header.Get("Content-Type"), "application/json") {
- qsError := &errors.QingStorError{
- StatusCode: b.resp.StatusCode,
- Message: http.StatusText(b.resp.StatusCode),
- }
return qsError
}
@@ -284,17 +288,11 @@ func (b *unpacker) parseError() error {
return err
}
- qsError := &errors.QingStorError{}
- if buffer.Len() > 0 {
+ if buffer.Len() > 0 && json.Valid(buffer.Bytes()) {
err := json.Unmarshal(buffer.Bytes(), qsError)
if err != nil {
return err
}
}
- qsError.StatusCode = b.resp.StatusCode
- if qsError.RequestID == "" {
- qsError.RequestID = b.resp.Header.Get(http.CanonicalHeaderKey("X-QS-Request-ID"))
- }
-
return qsError
}
diff --git a/request/response/unpacker_test.go b/request/response/unpacker_test.go
index <HASH>..<HASH> 100644
--- a/request/response/unpacker_test.go
+++ b/request/response/unpacker_test.go
@@ -74,6 +74,7 @@ func TestSimpleUnpackHTTPRequest(t *testing.T) {
httpResponse.Header.Set("Content-Type", "application/json")
responseString := `{"a": "el_a", "b": "el_b", "cd": 1024, "ef": 2048}`
httpResponse.Body = ioutil.NopCloser(bytes.NewReader([]byte(responseString)))
+ httpResponse.ContentLength = int64(len(responseString))
output := &FakeOutput{}
outputValue := reflect.ValueOf(output)
@@ -137,6 +138,7 @@ func TestUnpackHTTPRequest(t *testing.T) {
]
}`
httpResponse.Body = ioutil.NopCloser(bytes.NewReader([]byte(responseString)))
+ httpResponse.ContentLength = int64(len(responseString))
output := &ListBucketsOutput{}
outputValue := reflect.ValueOf(output)
@@ -168,6 +170,7 @@ func TestUnpackHTTPRequestWithError(t *testing.T) {
"url": "http://docs.qingcloud.com/object_storage/api/bucket/get.html"
}`
httpResponse.Body = ioutil.NopCloser(bytes.NewReader([]byte(responseString)))
+ httpResponse.ContentLength = int64(len(responseString))
output := &ListBucketsOutput{}
outputValue := reflect.ValueOf(output)
@@ -215,6 +218,7 @@ func TestUnpackHTTPRequestWithEmptyError(t *testing.T) {
httpResponse := &http.Response{Header: http.Header{}}
httpResponse.StatusCode = 400
httpResponse.Body = ioutil.NopCloser(strings.NewReader(""))
+ httpResponse.Header.Set("X-QS-Request-ID", "aa08cf7a43f611e5886952542e6ce14b")
output := &ListBucketsOutput{}
outputValue := reflect.ValueOf(output)
@@ -224,5 +228,6 @@ func TestUnpackHTTPRequestWithEmptyError(t *testing.T) {
switch e := err.(type) {
case *errors.QingStorError:
assert.Equal(t, 400, e.StatusCode)
+ assert.Equal(t, "aa08cf7a43f611e5886952542e6ce14b", e.RequestID)
}
} | Fix requestID not return while body is empty (#<I>)
* Fix requestID not return with error. | yunify_qingstor-sdk-go | train |
1280acdac2bb4adf38d33080cbecb7f8e9d4d8ef | diff --git a/platforms/keyboard/keyboard_driver.go b/platforms/keyboard/keyboard_driver.go
index <HASH>..<HASH> 100644
--- a/platforms/keyboard/keyboard_driver.go
+++ b/platforms/keyboard/keyboard_driver.go
@@ -25,7 +25,7 @@ type Driver struct {
//
func NewDriver() *Driver {
k := &Driver{
- name: "Keyboard",
+ name: gobot.DefaultName("Keyboard"),
connect: func(k *Driver) (err error) {
if err := configure(); err != nil {
return err | keyboard: use new improved default namer to avoid API conflicts | hybridgroup_gobot | train |
9ddea2d8d7b51470c3620da2913c74798471960c | diff --git a/scripts/updateLicense.py b/scripts/updateLicense.py
index <HASH>..<HASH> 100644
--- a/scripts/updateLicense.py
+++ b/scripts/updateLicense.py
@@ -50,10 +50,12 @@ def update_go_license(name, force=False):
if year == CURRENT_YEAR:
break
- new_line = COPYRIGHT_RE.sub('Copyright (c) %d' % CURRENT_YEAR, line)
- assert line != new_line, ('Could not change year in: %s' % line)
- lines[i] = new_line
- changed = True
+ # Avoid updating the copyright year.
+ #
+ # new_line = COPYRIGHT_RE.sub('Copyright (c) %d' % CURRENT_YEAR, line)
+ # assert line != new_line, ('Could not change year in: %s' % line)
+ # lines[i] = new_line
+ # changed = True
break
if not found: | Do not update year for copyright in license (#<I>) | jaegertracing_jaeger-lib | train |
4fb800232b07bfc4eddb5e21cc9bffb58be2d067 | diff --git a/lib/processes.js b/lib/processes.js
index <HASH>..<HASH> 100644
--- a/lib/processes.js
+++ b/lib/processes.js
@@ -119,14 +119,16 @@ const Handler = function (proc, suiteData, options = {}) {
}
const closeFile = function () {
- fs.close(fd, (err) => {
+ if (writePending < 1 && closePending === true) {
closePending = false
- if (err) {
- closeRejecter(err)
- } else {
- closeResolver()
- }
- })
+ fs.close(fd, (err) => {
+ if (err) {
+ closeRejecter(err)
+ } else {
+ closeResolver()
+ }
+ })
+ }
}
const close = function () {
@@ -134,9 +136,7 @@ const Handler = function (proc, suiteData, options = {}) {
return new Promise((resolve, reject) => {
closeResolver = resolve
closeRejecter = reject
- if (writePending < 1) {
- closeFile()
- }
+ closeFile()
})
} | Do not execute fs.close more than once, when process was closed but logs are pending to be written | javierbrea_narval | train |
e317dc6b6314d60a0767aefb44eabe35302d925c | diff --git a/openfisca_web_api/environment.py b/openfisca_web_api/environment.py
index <HASH>..<HASH> 100644
--- a/openfisca_web_api/environment.py
+++ b/openfisca_web_api/environment.py
@@ -156,7 +156,9 @@ def load_environment(global_conf, app_conf):
model.input_variables_extractor = input_variables_extractors.setup(tax_benefit_system)
global country_package_dir_path
- country_package_dir_path = pkg_resources.get_distribution(conf['country_package']).location
+ # Using pkg_resources.get_distribution(conf["country_package"]).location
+ # returns a wrong path in virtualenvs (<venv>/lib versus <venv>/local/lib).
+ country_package_dir_path = country_package.__path__[0]
global api_package_version
api_package_version = pkg_resources.get_distribution('openfisca_web_api').version | Do not use pkg_resources but importlib
to get country_package dir | openfisca_openfisca-web-api | train |
59d7b2e72f1f0df5a0dcbd02468f878a5fdfa04d | diff --git a/src/main/java/io/iron/ironmq/Messages.java b/src/main/java/io/iron/ironmq/Messages.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/iron/ironmq/Messages.java
+++ b/src/main/java/io/iron/ironmq/Messages.java
@@ -31,10 +31,10 @@ public class Messages {
}
public MessageOptions[] toMessageOptions() {
- int length = messages.length;
+ int length = messages.size();
MessageOptions[] result = new MessageOptions[length];
for (int i = 0; i < length; i++)
- result[i] = new MessageOptions(messages[i].getId(), messages[i].getReservationId());
+ result[i] = new MessageOptions(messages.get(i).getId(), messages.get(i).getReservationId());
return result;
}
}
diff --git a/src/main/java/io/iron/ironmq/Queue.java b/src/main/java/io/iron/ironmq/Queue.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/iron/ironmq/Queue.java
+++ b/src/main/java/io/iron/ironmq/Queue.java
@@ -267,18 +267,6 @@ public class Queue {
}
/**
- * Deletes multiple messages from the queue.
- *
- * @param messages The list of the messages to delete.
- *
- * @throws HTTPException If the IronMQ service returns a status other than 200 OK.
- * @throws IOException If there is an error accessing the IronMQ server.
- */
- public void deleteMessages(Messages messages) throws IOException {
- deleteMessages(new Ids(messages));
- }
-
- /**
* Destroy the queue.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
diff --git a/src/test/java/io/iron/ironmq/IronMQTest.java b/src/test/java/io/iron/ironmq/IronMQTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/io/iron/ironmq/IronMQTest.java
+++ b/src/test/java/io/iron/ironmq/IronMQTest.java
@@ -417,6 +417,23 @@ public class IronMQTest {
Assert.assertEquals(0, queue.getInfoAboutQueue().getSize());
}
+ @Test
+ public void testDeleteReservedMessagesPartially() throws IOException {
+ Queue queue = new Queue(client, "my_queue_" + ts());
+ queue.clear();
+ queue.pushMessages(new String[]{"Test message 1", "Test message 2", "Test message 3", "Test message 4"});
+ Messages messages = queue.reserve(4);
+
+ Assert.assertEquals(4, queue.getInfoAboutQueue().getSize());
+
+ Messages messagesToDelete = new Messages();
+ messagesToDelete.add(messages.getMessage(1));
+ messagesToDelete.add(messages.getMessage(3));
+ queue.deleteMessages(messagesToDelete);
+
+ Assert.assertEquals(2, queue.getInfoAboutQueue().getSize());
+ }
+
@Test(expected = HTTPException.class)
public void testDeleteReservedMessagesWithoutReservationId() throws IOException {
Queue queue = new Queue(client, "my_queue_" + ts()); | remove method from lib-v1 after merge | iron-io_iron_mq_java | train |
e09dbdd2bc3e2323e659b77409ea66eb6a155610 | diff --git a/sos/plugins/foreman.py b/sos/plugins/foreman.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/foreman.py
+++ b/sos/plugins/foreman.py
@@ -218,12 +218,12 @@ class Foreman(Plugin):
for table in foremandb:
_cmd = self.build_query_cmd(foremandb[table])
self.add_cmd_output(_cmd, suggest_filename=table, timeout=600,
- env=self.env)
+ sizelimit=100, env=self.env)
for dyn in foremancsv:
_cmd = self.build_query_cmd(foremancsv[dyn], csv=True)
self.add_cmd_output(_cmd, suggest_filename=dyn, timeout=600,
- env=self.env)
+ sizelimit=100, env=self.env)
# collect http[|s]_proxy env.variables
self.add_env_var(["http_proxy", "https_proxy"]) | [foreman] increase sizelimit for tasks export
When foreman stores tens to hundreds of thousands of tasks, default
sizelimit causes the dynflow* or foreman_tasks_tasks files are truncated.
Let increase the sizelimit to <I>MB.
Resolves: #<I> | sosreport_sos | train |
dedfa0ae91aef4723cea903495f89a31774248e9 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -39,9 +39,9 @@ except:
try:
- from setuptools import setup, find_packages
+ from setuptools import setup
except ImportError:
- from distutils.core import setup, find_packages
+ from distutils.core import setup
import codecs
@@ -85,7 +85,9 @@ setup(name='PyVISA',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
- packages=find_packages(),
+ packages=['pyvisa', 'pyvisa.compat',
+ 'pyvisa.ctwrapper', 'pyvisa.legacy',
+ 'pyvisa.testsuite'],
platforms="Linux, Windows,Mac",
py_modules=['visa'],
use_2to3=False, | Removed find_package dependency
Close #<I> | pyvisa_pyvisa | train |
3234e29cd5df486a771369061ea93a978ecd0606 | diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -46,6 +46,7 @@ SolrWrapper.wrap port: 8983,
| solr_options | (Hash) |
| env | (Hash) |
| persist | (Boolean) Preserves the data in you collection between startups |
+| contrib | (Array<Hash>) |
```ruby
solr.with_collection(name: 'collection_name', dir: 'path_to_solr_configs')
diff --git a/lib/solr_wrapper/configuration.rb b/lib/solr_wrapper/configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/solr_wrapper/configuration.rb
+++ b/lib/solr_wrapper/configuration.rb
@@ -157,6 +157,10 @@ module SolrWrapper
options.fetch(:poll_interval, 1)
end
+ def contrib
+ options.fetch(:contrib, [])
+ end
+
private
def self.slice(source, *keys)
@@ -182,11 +186,22 @@ module SolrWrapper
$stderr.puts "Unable to parse config #{config_file}" if verbose?
return {}
end
- convert_keys(config)
+ config.transform_keys(&:to_sym)
+ absoluteize_paths(config, root: File.dirname(config_file))
end
- def convert_keys(hash)
- hash.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
+ def absoluteize_paths(config, root: Dir.pwd)
+ return config unless config[:contrib]
+
+ config = config.dup
+
+ config[:contrib] = config[:contrib].map do |mapping|
+ mapping = mapping.transform_keys(&:to_sym)
+ mapping[:from] = File.expand_path(mapping[:from], root)
+ mapping[:to] ||= 'contrib/'
+ end
+
+ config
end
def default_configuration_paths
diff --git a/lib/solr_wrapper/instance.rb b/lib/solr_wrapper/instance.rb
index <HASH>..<HASH> 100644
--- a/lib/solr_wrapper/instance.rb
+++ b/lib/solr_wrapper/instance.rb
@@ -244,6 +244,14 @@ module SolrWrapper
raise_error_unless_extracted
FileUtils.cp config.solr_xml, File.join(config.instance_dir, 'server', 'solr', 'solr.xml') if config.solr_xml
FileUtils.cp_r File.join(config.extra_lib_dir, '.'), File.join(config.instance_dir, 'server', 'solr', 'lib') if config.extra_lib_dir
+
+ config.contrib.each do |mapping|
+ if File.directory? mapping[:from]
+ FileUtils.cp_r mapping[:from], File.join(config.instance_dir, mapping[:to])
+ else
+ FileUtils.cp mapping[:from], File.join(config.instance_dir, mapping[:to])
+ end
+ end
end
def extract_and_configure | Add 'contrib' configuration for copying files into the solr instance dir | cbeer_solr_wrapper | train |
dca8c2dbc1e799b031492eda34e8c32363f8f1f5 | diff --git a/tests/Unit/Route/RoutePsr7Test.php b/tests/Unit/Route/RoutePsr7Test.php
index <HASH>..<HASH> 100644
--- a/tests/Unit/Route/RoutePsr7Test.php
+++ b/tests/Unit/Route/RoutePsr7Test.php
@@ -7,7 +7,7 @@ use Siler\Route;
use Zend\Diactoros\ServerRequest;
use Zend\Diactoros\ServerRequestFactory;
-class RoutePsr7Test //extends \PHPUnit\Framework\TestCase
+class RoutePsr7Test extends \PHPUnit\Framework\TestCase
{
public function testPsr7()
{ | Restore PSR-7 route test :<I>:% coverage | leocavalcante_siler | train |
79acd905b01ff3d76da985ab1191db07a7c898f4 | diff --git a/ingress-proxy/src/main/java/com/networknt/proxy/LightProxyHandler.java b/ingress-proxy/src/main/java/com/networknt/proxy/LightProxyHandler.java
index <HASH>..<HASH> 100644
--- a/ingress-proxy/src/main/java/com/networknt/proxy/LightProxyHandler.java
+++ b/ingress-proxy/src/main/java/com/networknt/proxy/LightProxyHandler.java
@@ -24,6 +24,7 @@ import io.undertow.server.HttpServerExchange;
import io.undertow.server.handlers.ResponseCodeHandler;
import io.undertow.server.handlers.proxy.LoadBalancingProxyClient;
import io.undertow.util.HeaderMap;
+import io.undertow.util.Headers;
import io.undertow.util.HttpString;
import org.jose4j.jwt.JwtClaims;
import org.jose4j.jwt.consumer.InvalidJwtException;
@@ -49,7 +50,6 @@ import java.util.function.Consumer;
public class LightProxyHandler implements HttpHandler {
static final String CONFIG_NAME = "proxy";
static final String CLAIMS_KEY = "jwtClaims";
- private static final String AUTH_HEADER_NAME = "Authorization";
private static final int LONG_CLOCK_SKEW = 1000000;
static final Logger logger = LoggerFactory.getLogger(LightProxyHandler.class);
@@ -96,9 +96,9 @@ public class LightProxyHandler implements HttpHandler {
private JwtClaims extractClaimsFromJwt(HeaderMap headerValues) {
// make sure request actually contained authentication header value
- if(headerValues.get(AUTH_HEADER_NAME) != null)
+ if(headerValues.get(Headers.AUTHORIZATION_STRING) != null)
{
- String jwt = String.valueOf(headerValues.get(AUTH_HEADER_NAME)).split(" ")[1];
+ String jwt = String.valueOf(headerValues.get(Headers.AUTHORIZATION_STRING)).split(" ")[1];
JwtConsumer jwtConsumer = new JwtConsumerBuilder()
.setSkipSignatureVerification()
.setSkipAllDefaultValidators() | fixes #<I> use the Header constant for the Authorization | networknt_light-4j | train |
b98545541057d01eb5b80586c3834254e1deac27 | diff --git a/hazelcast/src/main/java/com/hazelcast/spi/impl/BasicInvocation.java b/hazelcast/src/main/java/com/hazelcast/spi/impl/BasicInvocation.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/spi/impl/BasicInvocation.java
+++ b/hazelcast/src/main/java/com/hazelcast/spi/impl/BasicInvocation.java
@@ -257,7 +257,7 @@ abstract class BasicInvocation implements Callback<Object>, BackupCompletionCall
if (invTarget == null) {
remote = false;
if (nodeEngine.isActive()) {
- notify(new WrongTargetException(thisAddress, invTarget, partitionId, replicaIndex, op.getClass().getName(), serviceName));
+ notify(new WrongTargetException(thisAddress, null, partitionId, replicaIndex, op.getClass().getName(), serviceName));
} else {
notify(new HazelcastInstanceNotActiveException());
} | Removed a load of a known null variable | hazelcast_hazelcast | train |
0c7c143ad1fd45a73de3f7ba71c6e9e142bb7a48 | diff --git a/plugins/provisioners/puppet/provisioner/puppet.rb b/plugins/provisioners/puppet/provisioner/puppet.rb
index <HASH>..<HASH> 100644
--- a/plugins/provisioners/puppet/provisioner/puppet.rb
+++ b/plugins/provisioners/puppet/provisioner/puppet.rb
@@ -149,7 +149,7 @@ module VagrantPlugins
command = "#{facter}puppet apply #{options}"
if config.working_directory
if windows?
- command = "cd #{config.working_directory}; if ($?) \{ #{command} \}"
+ command = "cd #{config.working_directory}; if (`$?) \{ #{command} \}"
else
command = "cd #{config.working_directory} && #{command}"
end | fix escaping of powershell variable
As this is nested in a powershell variable $command, it must be escaped
otherwise it is evaluated when the variable is created, giving an error that
"The term 'True' is not recognized as the name of a cmdlet, function,
script". This prevented using a puppet.working_directory on Windows. | hashicorp_vagrant | train |
a546017c59387361560842968b9ee6e50752e335 | diff --git a/src/Config.php b/src/Config.php
index <HASH>..<HASH> 100644
--- a/src/Config.php
+++ b/src/Config.php
@@ -197,7 +197,7 @@ class Config implements ConfigInterface
$matches = [];
if (!preg_match_all(
- '#{\s*(?:=|(?<function>\w+)\s*:\s*)(?<value>[^}]+)\s*}#',
+ '#' . static::ENCAPSULATION_START . '\s*(?:=|(?<function>\w+)\s*:\s*)(?<value>[^}]+)\s*' . static::ENCAPSULATION_END . '#',
$value,
$matches,
PREG_OFFSET_CAPTURE | PREG_UNMATCHED_AS_NULL | Use constants to define encapsulation characters of functions | BerliozFramework_Config | train |
7d9f8cd96f8e5abc5b7c517d02fb166f9e51b877 | diff --git a/packages/platform-ios/src/commands/runIOS/index.js b/packages/platform-ios/src/commands/runIOS/index.js
index <HASH>..<HASH> 100644
--- a/packages/platform-ios/src/commands/runIOS/index.js
+++ b/packages/platform-ios/src/commands/runIOS/index.js
@@ -11,9 +11,8 @@
import child_process from 'child_process';
import fs from 'fs';
import path from 'path';
-
+import chalk from 'chalk';
import type {ConfigT} from 'types';
-
import findXcodeProject from './findXcodeProject';
import parseIOSDevicesList from './parseIOSDevicesList';
import findMatchingSimulator from './findMatchingSimulator';
@@ -64,6 +63,12 @@ function runIOS(_: Array<string>, ctx: ConfigT, args: FlagsT) {
}`,
);
+ const {device, udid} = args;
+
+ if (!device && !udid) {
+ return runOnSimulator(xcodeProject, scheme, args);
+ }
+
const devices = parseIOSDevicesList(
// $FlowExpectedError https://github.com/facebook/flow/issues/5675
child_process.execFileSync('xcrun', ['instruments', '-s'], {
@@ -71,33 +76,31 @@ function runIOS(_: Array<string>, ctx: ConfigT, args: FlagsT) {
}),
);
- const device = ((args.device: any): string);
- const udid = ((args.udid: any): string);
- if (device || udid) {
- const selectedDevice = device
- ? matchingDevice(devices, device)
- : matchingDeviceByUdid(devices, udid);
-
- if (selectedDevice) {
- return runOnDevice(selectedDevice, scheme, xcodeProject, args);
- }
+ if (devices.length === 0) {
+ return logger.error('No iOS devices connected.');
+ }
- if (devices && devices.length > 0) {
- const message = device
- ? `Could not find device with the name: "${device}". Choose one of the following:\n${printFoundDevices(
- devices,
- )}`
- : `Could not find device with the udid: "${udid}". Choose one of the following:\n${printFoundDevices(
- devices,
- )}`;
+ const selectedDevice = matchingDevice(devices, device, udid);
- return logger.error(message);
- }
+ if (selectedDevice) {
+ return runOnDevice(selectedDevice, scheme, xcodeProject, args);
+ }
- return logger.error('No iOS devices connected.');
+ if (device) {
+ return logger.error(
+ `Could not find a device named: "${chalk.bold(
+ device,
+ )}". ${printFoundDevices(devices)}`,
+ );
}
- return runOnSimulator(xcodeProject, scheme, args);
+ if (udid) {
+ return logger.error(
+ `Could not find a device with udid: "${chalk.bold(
+ udid,
+ )}". ${printFoundDevices(devices)}`,
+ );
+ }
}
async function runOnSimulator(xcodeProject, scheme, args: FlagsT) {
@@ -331,33 +334,26 @@ function xcprettyAvailable() {
return true;
}
-function matchingDevice(devices, deviceName) {
+function matchingDevice(devices, deviceName, udid) {
+ if (udid) {
+ return matchingDeviceByUdid(devices, udid);
+ }
if (deviceName === true && devices.length === 1) {
logger.info(
- `Using first available device ${
- devices[0].name
- } due to lack of name supplied.`,
+ `Using first available device named "${chalk.bold(
+ devices[0].name,
+ )}" due to lack of name supplied.`,
);
return devices[0];
}
- for (let i = devices.length - 1; i >= 0; i--) {
- if (
- devices[i].name === deviceName ||
- formattedDeviceName(devices[i]) === deviceName
- ) {
- return devices[i];
- }
- }
- return null;
+ return devices.find(
+ device =>
+ device.name === deviceName || formattedDeviceName(device) === deviceName,
+ );
}
function matchingDeviceByUdid(devices, udid) {
- for (let i = devices.length - 1; i >= 0; i--) {
- if (devices[i].udid === udid) {
- return devices[i];
- }
- }
- return null;
+ return devices.find(device => device.udid === udid);
}
function formattedDeviceName(simulator) {
@@ -365,11 +361,10 @@ function formattedDeviceName(simulator) {
}
function printFoundDevices(devices) {
- let output = '';
- for (let i = devices.length - 1; i >= 0; i--) {
- output += `${devices[i].name} Udid: ${devices[i].udid}\n`;
- }
- return output;
+ return [
+ 'Available devices:',
+ ...devices.map(device => ` - ${device.name} (${device.udid})`),
+ ].join('\n');
}
function getProcessOptions({packager, terminal, port}) { | feat: refactor `run-ios` for readability and better error messages (#<I>)
Summary:
---------
Refactor `run-ios` for readability and better error messages:
<img width="<I>" alt="Screenshot <I>-<I>-<I> at <I> <I> <I>" src="<URL> | react-native-community_cli | train |
5db4b958f1e953b92ed0ec689d5a056bcac37491 | diff --git a/cli/lib/kontena/cli/apps/deploy_command.rb b/cli/lib/kontena/cli/apps/deploy_command.rb
index <HASH>..<HASH> 100644
--- a/cli/lib/kontena/cli/apps/deploy_command.rb
+++ b/cli/lib/kontena/cli/apps/deploy_command.rb
@@ -43,7 +43,9 @@ module Kontena::Cli::Apps
def deploy_services(queue)
queue.each do |service|
name = service['id'].split('/').last
- deploy_service(token, name, {})
+ options = {}
+ options[:force] = true if force_deploy?
+ deploy_service(token, name, options)
print "deploying #{name.sub("#{service_prefix}-", '').colorize(:cyan)}"
unless async?
wait_for_deploy_to_finish(token, service['id'])
@@ -102,7 +104,6 @@ module Kontena::Cli::Apps
id = prefixed_name(id)
data = parse_data(options)
update_service(token, id, data)
- deploy_service(token, id, {force: true}) if force_deploy?
end
# @param [String] name | handle force_deploy flag correctly on app deploy command | kontena_kontena | train |
9dd9f69e67d6077e3ec79d0d1789d1fcd218149d | diff --git a/aioxmpp/xml.py b/aioxmpp/xml.py
index <HASH>..<HASH> 100644
--- a/aioxmpp/xml.py
+++ b/aioxmpp/xml.py
@@ -39,6 +39,8 @@ Utility functions
.. autofunction:: serialize_single_xso
+.. autofunction:: write_single_xso
+
.. autofunction:: read_xso
.. autofunction:: read_single_xso
@@ -860,6 +862,16 @@ def serialize_single_xso(x):
return buf.getvalue().decode("utf8")
+def write_single_xso(x, dest):
+ """
+ Write a single XSO `x` to a binary file-like object `dest`.
+ """
+ gen = XMPPXMLGenerator(dest,
+ short_empty_elements=True,
+ sorted_attributes=True)
+ x.unparse_to_sax(gen)
+
+
def read_xso(src, xsomap):
"""
Read a single XSO from a binary file-like input `src` containing an XML
diff --git a/docs/api/changelog.rst b/docs/api/changelog.rst
index <HASH>..<HASH> 100644
--- a/docs/api/changelog.rst
+++ b/docs/api/changelog.rst
@@ -74,8 +74,8 @@ Version 0.5
:class:`aioxmpp.xso.CapturingXSO` and the
:attr:`~aioxmpp.disco.xso.InfoQuery.captured_events` attribute was added.
-* :func:`aioxmpp.xml.read_xso` and :func:`aioxmpp.xml.read_single_xso` were
- added.
+* :func:`aioxmpp.xml.write_single_xso`, :func:`aioxmpp.xml.read_xso` and
+ :func:`aioxmpp.xml.read_single_xso` were added.
Version 0.4
diff --git a/tests/test_xml.py b/tests/test_xml.py
index <HASH>..<HASH> 100644
--- a/tests/test_xml.py
+++ b/tests/test_xml.py
@@ -1401,6 +1401,28 @@ class Testserialize_single_xso(unittest.TestCase):
)
+class Testwrite_single_xso(unittest.TestCase):
+ def test_simple(self):
+ class TestXSO(xso.XSO):
+ TAG = ("uri:foo", "bar")
+ DECLARE_NS = {
+ None: "uri:foo",
+ }
+
+ attr = xso.Attr("foo")
+
+ b = io.BytesIO()
+ x = TestXSO()
+ x.attr = "test"
+
+ xml.write_single_xso(x, b)
+
+ self.assertEqual(
+ b'<bar xmlns="uri:foo" foo="test"/>',
+ b.getvalue(),
+ )
+
+
class Testread_xso(unittest.TestCase):
def test_read_from_io(self):
base = unittest.mock.Mock() | Add function to write single XSO to a file | horazont_aioxmpp | train |
423f531f975e9b787ff96d29eff6701f830cb75e | diff --git a/modules/webui/plugins/impacts/impacts.py b/modules/webui/plugins/impacts/impacts.py
index <HASH>..<HASH> 100644
--- a/modules/webui/plugins/impacts/impacts.py
+++ b/modules/webui/plugins/impacts/impacts.py
@@ -25,6 +25,7 @@
import time
from shinken.util import safe_print
+from shinken.misc.filter import only_related_to
# Global value that will be changed by the main app
app = None
@@ -55,7 +56,7 @@ def show_impacts():
app.bottle.redirect("/user/login")
#return {'app': app, 'impacts': {}, 'valid_user': False, 'user': user}
- all_imp_impacts = app.datamgr.get_important_elements()
+ all_imp_impacts = only_related_to(app.datamgr.get_important_elements(),user)
all_imp_impacts.sort(hst_srv_sort)
impacts = {} | Fixing filtering host in the impact tab according to user rights | Alignak-monitoring_alignak | train |
ca32a201efe0882e2d1aade292cc408936b75f91 | diff --git a/native/xxhash_safe.go b/native/xxhash_safe.go
index <HASH>..<HASH> 100644
--- a/native/xxhash_safe.go
+++ b/native/xxhash_safe.go
@@ -14,12 +14,12 @@ func newbyteReader(in []byte) byteReader {
}
func (br byteReader) Uint32(i int) uint32 {
- br = br[i : i+4]
+ br = br[i : i+4 : len(br)]
return uint32(br[0]) | uint32(br[1])<<8 | uint32(br[2])<<16 | uint32(br[3])<<24
}
func (br byteReader) Uint64(i int) uint64 {
- br = br[i : i+8]
+ br = br[i : i+8 : len(br)]
return uint64(br[0]) | uint64(br[1])<<8 | uint64(br[2])<<16 | uint64(br[3])<<24 |
uint64(br[4])<<32 | uint64(br[5])<<40 | uint64(br[6])<<48 | uint64(br[7])<<56
} | use the len in subslicing | OneOfOne_xxhash | train |
d09309b9eb48822b055318a5d75bf47a2d819da8 | diff --git a/varify/static/scripts/javascript/min/ui/exporter.js b/varify/static/scripts/javascript/min/ui/exporter.js
index <HASH>..<HASH> 100644
--- a/varify/static/scripts/javascript/min/ui/exporter.js
+++ b/varify/static/scripts/javascript/min/ui/exporter.js
@@ -1 +1 @@
-define(["cilantro"],function(e){var t=e.ui.ExporterDialog.extend({template:"varify/export/dialog",_events:{"click [data-action=change-columns]":"changeColumnsClicked"},initialize:function(){this.events=_.extend({},this._events,this.events),e.ui.ExporterDialog.prototype.initialize.call(this)},onRender:function(){e.ui.ExporterDialog.prototype.onRender.call(this)},changeColumnsClicked:function(){e.dialogs.columns.open()}});return{ExporterDialog:t}})
\ No newline at end of file
+define(["cilantro"],function(e){var t=e.ui.ExporterDialog.extend({template:"varify/export/dialog",_events:{"click [data-action=change-columns]":"changeColumnsClicked"},initialize:function(){this.events=_.extend({},this._events,this.events),e.ui.ExporterDialog.prototype.initialize.call(this)},changeColumnsClicked:function(){e.dialogs.columns.open()}});return{ExporterDialog:t}})
\ No newline at end of file
diff --git a/varify/static/scripts/javascript/src/ui/exporter.js b/varify/static/scripts/javascript/src/ui/exporter.js
index <HASH>..<HASH> 100644
--- a/varify/static/scripts/javascript/src/ui/exporter.js
+++ b/varify/static/scripts/javascript/src/ui/exporter.js
@@ -17,10 +17,6 @@ define([
c.ui.ExporterDialog.prototype.initialize.call(this);
},
- onRender: function() {
- c.ui.ExporterDialog.prototype.onRender.call(this);
- },
-
changeColumnsClicked: function() {
c.dialogs.columns.open();
} | Remove unnecessary onRender override in exporter | chop-dbhi_varify | train |
2622413585990c225969ab1f8d38369501679484 | diff --git a/tests/pytesseract_test.py b/tests/pytesseract_test.py
index <HASH>..<HASH> 100644
--- a/tests/pytesseract_test.py
+++ b/tests/pytesseract_test.py
@@ -50,7 +50,7 @@ def test_file():
@pytest.fixture(scope='session')
def test_invalid_file():
- return 'invalid' + TEST_JPEG
+ return TEST_JPEG + 'invalid'
@pytest.fixture(scope='session')
@@ -281,34 +281,31 @@ def test_wrong_tesseract_cmd(monkeypatch, test_file, test_path):
import pytesseract
monkeypatch.setattr(
- 'pytesseract.pytesseract.tesseract_cmd',
- test_path,
+ 'pytesseract.pytesseract.tesseract_cmd', test_path,
)
with pytest.raises(TesseractNotFoundError):
pytesseract.pytesseract.image_to_string(test_file)
def test_main_not_found_cases(
- capsys,
- monkeypatch,
- test_file,
- test_invalid_file,
+ capsys, monkeypatch, test_file, test_invalid_file,
):
"""Test wrong or missing tesseract command in main."""
import pytesseract
monkeypatch.setattr('sys.argv', ['', test_invalid_file])
- pytesseract.pytesseract.main()
+ with pytest.raises(SystemExit):
+ pytesseract.pytesseract.main()
assert capsys.readouterr().err.startswith('ERROR: Could not open file')
monkeypatch.setattr(
- 'pytesseract.pytesseract.tesseract_cmd',
- 'wrong_tesseract',
+ 'pytesseract.pytesseract.tesseract_cmd', 'wrong_tesseract',
)
monkeypatch.setattr('sys.argv', ['', test_file])
- pytesseract.pytesseract.main()
+ with pytest.raises(SystemExit):
+ pytesseract.pytesseract.main()
assert capsys.readouterr().err.endswith(
- "is not installed or it's not in your PATH"
+ "is not installed or it's not in your PATH",
)
@@ -322,8 +319,7 @@ def test_proper_oserror_exception_handling(monkeypatch, test_file, test_path):
import pytesseract
monkeypatch.setattr(
- 'pytesseract.pytesseract.tesseract_cmd',
- test_path,
+ 'pytesseract.pytesseract.tesseract_cmd', test_path,
)
with pytest.raises(
TesseractNotFoundError if IS_PYTHON_2 and test_path else OSError, | Fix pep8 and catch SystemExit in tests | madmaze_pytesseract | train |
d5f57a69d54cf647c2efd2fffe43d4a4b0eaffa1 | diff --git a/modules/wycs/src/wycs/WycsMain.java b/modules/wycs/src/wycs/WycsMain.java
index <HASH>..<HASH> 100644
--- a/modules/wycs/src/wycs/WycsMain.java
+++ b/modules/wycs/src/wycs/WycsMain.java
@@ -197,7 +197,7 @@ public class WycsMain {
// + " reductions, " + Solver.numInferences
// + " inferences)\n");
//
- SimpleRewriter rw = new SimpleRewriter(Solver.inferences,Solver.reductions);
+ SimpleRewriter rw = new SimpleRewriter(Solver.inferences,Solver.reductions,Solver.SCHEMA);
rw.apply(automaton);
int total = rw.numSuccessfulActivations() + rw.numFailedActivations();
System.err.println("\n\n=> (" + rw.numSuccessfulActivations() + " / " + total + " succeesful actications)\n");
diff --git a/modules/wycs/src/wycs/transforms/VerificationCheck.java b/modules/wycs/src/wycs/transforms/VerificationCheck.java
index <HASH>..<HASH> 100644
--- a/modules/wycs/src/wycs/transforms/VerificationCheck.java
+++ b/modules/wycs/src/wycs/transforms/VerificationCheck.java
@@ -161,7 +161,8 @@ public class VerificationCheck implements Transform<WycsFile> {
// Solver.MAX_STEPS = 100000;
// infer(automaton);
- new SimpleRewriter(Solver.inferences,Solver.reductions).apply(automaton);
+ new SimpleRewriter(Solver.inferences, Solver.reductions, Solver.SCHEMA)
+ .apply(automaton);
if(!automaton.get(automaton.getRoot(0)).equals(Solver.False)) {
String msg = stmt.message; | WYCS: some minor fixes. | Whiley_WhileyCompiler | train |
ebc251d986216117509c6d52c16238b82c78b5a8 | diff --git a/heron/schedulers/tests/java/com/twitter/heron/scheduler/slurm/SlurmLauncherTest.java b/heron/schedulers/tests/java/com/twitter/heron/scheduler/slurm/SlurmLauncherTest.java
index <HASH>..<HASH> 100644
--- a/heron/schedulers/tests/java/com/twitter/heron/scheduler/slurm/SlurmLauncherTest.java
+++ b/heron/schedulers/tests/java/com/twitter/heron/scheduler/slurm/SlurmLauncherTest.java
@@ -28,6 +28,7 @@ import org.powermock.modules.junit4.PowerMockRunner;
import com.twitter.heron.spi.common.Config;
import com.twitter.heron.spi.common.ConfigKeys;
import com.twitter.heron.spi.packing.PackingPlan;
+import com.twitter.heron.spi.scheduler.IScheduler;
import com.twitter.heron.spi.utils.SchedulerUtils;
@RunWith(PowerMockRunner.class)
@@ -49,6 +50,9 @@ public class SlurmLauncherTest {
return config;
}
+ /**
+ * Test slurm scheduler launcher
+ */
@Test
public void testLaunch() throws Exception {
Config config = createRunnerConfig();
@@ -79,17 +83,32 @@ public class SlurmLauncherTest {
PowerMockito.doReturn(false).when(SchedulerUtils.class, "onScheduleAsLibrary",
Mockito.any(Config.class),
Mockito.any(Config.class),
- Mockito.any(Config.class),
+ Mockito.any(IScheduler.class),
Mockito.any(PackingPlan.class));
+ PowerMockito.doReturn(true).when(slurmLauncher).setupWorkingDirectory();
Assert.assertFalse(slurmLauncher.launch(Mockito.mock(PackingPlan.class)));
+ PowerMockito.verifyStatic();
+ SchedulerUtils.onScheduleAsLibrary(
+ Mockito.any(Config.class),
+ Mockito.any(Config.class),
+ Mockito.any(IScheduler.class),
+ Mockito.any(PackingPlan.class));
// happy path
- PowerMockito.doReturn(true).when(slurmLauncher).setupWorkingDirectory();
PowerMockito.mockStatic(SchedulerUtils.class);
PowerMockito.doReturn(true).when(SchedulerUtils.class, "onScheduleAsLibrary",
- Mockito.any(Config.class), Mockito.any(Config.class),
- Mockito.any(Config.class), Mockito.any(PackingPlan.class));
- Mockito.verify(slurmLauncher, Mockito.times(2)).launch(Mockito.any(PackingPlan.class));
+ Mockito.any(Config.class),
+ Mockito.any(Config.class),
+ Mockito.any(IScheduler.class),
+ Mockito.any(PackingPlan.class));
+ Assert.assertTrue(slurmLauncher.launch(Mockito.mock(PackingPlan.class)));
+ Mockito.verify(slurmLauncher, Mockito.times(3)).launch(Mockito.any(PackingPlan.class));
+ PowerMockito.verifyStatic();
+ SchedulerUtils.onScheduleAsLibrary(
+ Mockito.any(Config.class),
+ Mockito.any(Config.class),
+ Mockito.any(IScheduler.class),
+ Mockito.any(PackingPlan.class));
slurmLauncher.close();
}
} | Use correct method signature and fix test (#<I>) | apache_incubator-heron | train |
803de7468546277940d4acd0b7a30416780d8627 | diff --git a/test/e2e/lifecycle/cluster_upgrade.go b/test/e2e/lifecycle/cluster_upgrade.go
index <HASH>..<HASH> 100644
--- a/test/e2e/lifecycle/cluster_upgrade.go
+++ b/test/e2e/lifecycle/cluster_upgrade.go
@@ -24,7 +24,7 @@ import (
"path/filepath"
"regexp"
"strings"
- "sync"
+ "sync/atomic"
"time"
"k8s.io/apimachinery/pkg/util/version"
@@ -434,11 +434,19 @@ type chaosMonkeyAdapter struct {
func (cma *chaosMonkeyAdapter) Test(sem *chaosmonkey.Semaphore) {
start := time.Now()
- var once sync.Once
+
+ // Using an atomic with a CAS is a potential workaround for #74890.
+ //
+ // This is a speculative workaround - we are really seeing if
+ // this is better; if not we should revert.
+ //
+ // If it is better we should file a bug against go 1.12, and
+ // then revert!
+ var onceWithoutMutex uint32
ready := func() {
- once.Do(func() {
+ if atomic.CompareAndSwapUint32(&onceWithoutMutex, 0, 1) {
sem.Ready()
- })
+ }
}
defer finalizeUpgradeTest(start, cma.testReport)
defer ready() | Speculative workaround for #<I>
We try using an atomic with a CAS, as a potential workaround for
issue #<I>.
Kudos to @neolit<I> for the investigation & idea.
This is a speculative workaround - we are really seeing if this is
better; if not we should revert.
If it is better we should file a bug against go <I>, and then revert!
Issue #<I> | kubernetes_kubernetes | train |
ce74f75fafcd4392e57762d3c047b85986306771 | diff --git a/http.js b/http.js
index <HASH>..<HASH> 100644
--- a/http.js
+++ b/http.js
@@ -118,9 +118,9 @@ function start (entry, opts) {
})
})
- router.route(/^\/assets\/([^?]*)(\?.*)?$/, function (req, res, params) {
- var prefix = 'assets' // TODO: also accept 'content'
- var name = prefix + '/' + params[1]
+ router.route(/^\/(assets|content|public)\/([^?]*)(\?.*)?$/, function (req, res, params) {
+ var prefix = params[1] // asset dir
+ var name = prefix + '/' + params[2]
compiler.assets(name, function (err, filename) {
if (err) {
res.statusCode = 404
diff --git a/test/http.js b/test/http.js
index <HASH>..<HASH> 100644
--- a/test/http.js
+++ b/test/http.js
@@ -30,11 +30,12 @@ function setup () {
var dirname = 'manifest-pipeline-' + (Math.random() * 1e4).toFixed()
tmpDirname = path.join(os.tmpdir(), dirname)
+ var contentDirname = path.join(tmpDirname, 'content')
var assetDirname = path.join(tmpDirname, 'assets')
var assetSubdirname = path.join(assetDirname, 'images')
tmpScriptname = path.join(tmpDirname, 'index.js')
- var tmpFilename = path.join(assetDirname, 'file.txt')
+ var tmpFilename = path.join(contentDirname, 'file.txt')
var tmpAssetJsFilename = path.join(assetDirname, 'file.js')
var tmpAssetCssFilename = path.join(assetDirname, 'file.css')
var tmpJsonFilename = path.join(assetDirname, 'file.json')
@@ -42,6 +43,7 @@ function setup () {
var tmpJpgSubFilename = path.join(assetSubdirname, 'file.jpg')
fs.mkdirSync(tmpDirname)
+ fs.mkdirSync(contentDirname)
fs.mkdirSync(assetDirname)
fs.mkdirSync(assetSubdirname)
fs.writeFileSync(tmpScriptname, script)
@@ -74,8 +76,8 @@ tape('should route urls appropriately', function (assert) {
'/bundle.js?cache=busted',
'/bundle.css',
'/bundle.css?cache=busted',
- '/assets/file.txt',
- '/assets/file.txt?cache=busted',
+ '/content/file.txt',
+ '/content/file.txt?cache=busted',
'/assets/file.json',
'/assets/file.css',
'/assets/file.css?cache=busted', | support content and public dirs in http (#<I>) | choojs_bankai | train |
7a7e152d5f27a126ab24d5af785569b096f70a2e | diff --git a/markovgen/markovgen.py b/markovgen/markovgen.py
index <HASH>..<HASH> 100644
--- a/markovgen/markovgen.py
+++ b/markovgen/markovgen.py
@@ -139,7 +139,8 @@ REGEXPS = {
'xchat': '[a-z.]+ [0-9]+ [0-9:]+ <[^ ]+> (<[^ ]+> )?(?P<message>.*)$',
'supybot': '^[^ ]* (<[^ ]+> )?(?P<message>.*)$',
'srt': '^(?P<message>[^0-9].*)$',
- 'plain': '^(?P<message>.*)$'
+ 'plain': '^(?P<message>.*)$',
+ 'znc': '^\[[0-9]+:[0-9]+:[0-9]+\] (<[^ ]+> )?(?P<message>.*)$'
} | Add ZNC as a source for logs | ProgVal_markovgen | train |
a4d79b0eb5a9e1cbf257fb38232bc3d8f4988171 | diff --git a/question/type/numerical/questiontype.php b/question/type/numerical/questiontype.php
index <HASH>..<HASH> 100644
--- a/question/type/numerical/questiontype.php
+++ b/question/type/numerical/questiontype.php
@@ -96,11 +96,11 @@ class question_numerical_qtype extends question_shortanswer_qtype {
function save_question_options($question) {
// Get old versions of the objects
- if (!$oldanswers = get_records("question_answers", "question", $question->id)) {
+ if (!$oldanswers = get_records('question_answers', 'question', $question->id, 'id ASC')) {
$oldanswers = array();
}
- if (!$oldoptions = get_records("question_numerical", "question", $question->id)) {
+ if (!$oldoptions = get_records('question_numerical', 'question', $question->id, 'answer ASC')) {
$oldoptions = array();
}
@@ -193,7 +193,7 @@ class question_numerical_qtype extends question_shortanswer_qtype {
}
function save_numerical_units($question) {
- if (!$oldunits = get_records("question_numerical_units", "question", $question->id)) {
+ if (!$oldunits = get_records('question_numerical_units', 'question', $question->id, 'id ASC')) {
$oldunits = array();
}
@@ -424,7 +424,7 @@ class question_numerical_qtype extends question_shortanswer_qtype {
$status = true;
- $numericals = get_records("question_numerical","question",$question,"id");
+ $numericals = get_records('question_numerical', 'question', $question, 'id ASC');
//If there are numericals
if ($numericals) {
//Iterate over each numerical
diff --git a/question/type/shortanswer/editquestion.php b/question/type/shortanswer/editquestion.php
index <HASH>..<HASH> 100644
--- a/question/type/shortanswer/editquestion.php
+++ b/question/type/shortanswer/editquestion.php
@@ -5,7 +5,7 @@
$options->usecase = 0;
}
if (!empty($options->answers)) {
- $answersraw = get_records_list("question_answers", "id", $options->answers);
+ $answersraw = get_records_list('question_answers', 'id', $options->answers, 'id ASC');
}
$answers = array();
diff --git a/question/type/shortanswer/questiontype.php b/question/type/shortanswer/questiontype.php
index <HASH>..<HASH> 100644
--- a/question/type/shortanswer/questiontype.php
+++ b/question/type/shortanswer/questiontype.php
@@ -37,7 +37,7 @@ class question_shortanswer_qtype extends default_questiontype {
function save_question_options($question) {
$result = new stdClass;
- if (!$oldanswers = get_records("question_answers", "question", $question->id, "id ASC")) {
+ if (!$oldanswers = get_records('question_answers', 'question', $question->id, 'id ASC')) {
$oldanswers = array();
}
@@ -228,7 +228,7 @@ class question_shortanswer_qtype extends default_questiontype {
$status = true;
- $shortanswers = get_records("question_shortanswer","question",$question,"id");
+ $shortanswers = get_records('question_shortanswer', 'question', $question, 'id ASC');
//If there are shortanswers
if ($shortanswers) {
//Iterate over each shortanswer | MDL-<I> Numerical questions occasionally mix up the order of their answers. Merged from MOODLE_<I>_STABLE. | moodle_moodle | train |
7a288ec6b3e60114cd779a5a48e0cfddcdbf57a5 | diff --git a/can-stache.js b/can-stache.js
index <HASH>..<HASH> 100644
--- a/can-stache.js
+++ b/can-stache.js
@@ -100,7 +100,7 @@ function stache (filename, template) {
if(section instanceof HTMLSectionBuilder) {
//!steal-remove-start
var last = state.sectionElementStack[state.sectionElementStack.length - 1];
- if (last.type === "section" && stache !== "" && stache !== last.tag) {
+ if (last.tag && last.type === "section" && stache !== "" && stache !== last.tag) {
if (filename) {
dev.warn(filename + ":" + lineNo + ": unexpected closing tag {{/" + stache + "}} expected {{/" + last.tag + "}}");
}
diff --git a/expressions/bracket.js b/expressions/bracket.js
index <HASH>..<HASH> 100644
--- a/expressions/bracket.js
+++ b/expressions/bracket.js
@@ -19,7 +19,7 @@ Bracket.prototype.value = function (scope, helpers) {
Bracket.prototype.closingTag = function() {
//!steal-remove-start
- return this[canSymbol.for('can-stache.originalKey')];
+ return this[canSymbol.for('can-stache.originalKey')] || '';
//!steal-remove-end
};
diff --git a/src/expression.js b/src/expression.js
index <HASH>..<HASH> 100644
--- a/src/expression.js
+++ b/src/expression.js
@@ -468,7 +468,7 @@ var expression = {
} else if (top.type === "Lookup" || top.type === "Bracket") {
var bracket = {type: "Bracket", root: top};
//!steal-remove-start
- canReflect.setKeyValue(bracket, canSymbol.for("can-stache.originalKey"), tokens.join('').trim());
+ canReflect.setKeyValue(bracket, canSymbol.for("can-stache.originalKey"), top.key);
//!steal-remove-end
stack.replaceTopAndPush(bracket);
} else if (top.type === "Call") {
diff --git a/test/stache-test.js b/test/stache-test.js
index <HASH>..<HASH> 100644
--- a/test/stache-test.js
+++ b/test/stache-test.js
@@ -7065,15 +7065,31 @@ function makeTest(name, doc, mutation) {
QUnit.test("section iteration of property using bracket notation should not warn about unexpected closing tag", function (){
var teardown = testHelpers.dev.willWarn(/unexpected closing tag/);
- stache("{{#items['foo:bar']}}{{this}}{{/items['foo:bar']}}");
+ stache("{{#items['foo:bar']}}{{this}}{{/items}}");
+
+ equal(teardown(), 0);
+ });
+
+ QUnit.test("passing bracket notation to method should not warn about unexpected closing tag", function (){
+ var teardown = testHelpers.dev.willWarn(/unexpected closing tag/);
+
+ stache("{{#eq(items['foo:bar'], 'baz')}}qux{{/eq}}");
+
+ equal(teardown(), 0);
+ });
+
+ QUnit.test("reading current scope with bracket notation should not warn about unexpected closing tag", function (){
+ var teardown = testHelpers.dev.willWarn(/unexpected closing tag/);
+
+ stache("{{#['foo:bar']}}qux{{/['foo:bar']}}");
equal(teardown(), 0);
});
QUnit.test("section iteration of property using bracket notation should warn about unexpected closing tag", function (){
- var teardown = testHelpers.dev.willWarn("1: unexpected closing tag {{/items}} expected {{/items['foo:bar']}}");
+ var teardown = testHelpers.dev.willWarn("1: unexpected closing tag {{/items['foo:bar']}} expected {{/items}}");
- stache("{{#items['foo:bar']}}{{this}}{{/items}}");
+ stache("{{#items['foo:bar']}}{{this}}{{/items['foo:bar']}}");
equal(teardown(), 1);
}); | Close bracket notation with the root, and don't warn when missing closing tag for warning | canjs_can-stache | train |
457018a0e4bf459b57d95daa40a6c3399d1bd90c | diff --git a/redisson/src/main/java/org/redisson/RedissonRateLimiter.java b/redisson/src/main/java/org/redisson/RedissonRateLimiter.java
index <HASH>..<HASH> 100644
--- a/redisson/src/main/java/org/redisson/RedissonRateLimiter.java
+++ b/redisson/src/main/java/org/redisson/RedissonRateLimiter.java
@@ -15,18 +15,7 @@
*/
package org.redisson;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import org.redisson.api.RFuture;
-import org.redisson.api.RRateLimiter;
-import org.redisson.api.RateIntervalUnit;
-import org.redisson.api.RateLimiterConfig;
-import org.redisson.api.RateType;
+import org.redisson.api.*;
import org.redisson.client.codec.LongCodec;
import org.redisson.client.codec.StringCodec;
import org.redisson.client.handler.State;
@@ -39,12 +28,15 @@ import org.redisson.command.CommandAsyncExecutor;
import org.redisson.misc.RPromise;
import org.redisson.misc.RedissonPromise;
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+
/**
*
* @author Nikita Koksharov
*
*/
-public class RedissonRateLimiter extends RedissonObject implements RRateLimiter {
+public class RedissonRateLimiter extends RedissonExpirable implements RRateLimiter {
public RedissonRateLimiter(CommandAsyncExecutor commandExecutor, String name) {
super(commandExecutor, name);
diff --git a/redisson/src/main/java/org/redisson/api/RRateLimiter.java b/redisson/src/main/java/org/redisson/api/RRateLimiter.java
index <HASH>..<HASH> 100644
--- a/redisson/src/main/java/org/redisson/api/RRateLimiter.java
+++ b/redisson/src/main/java/org/redisson/api/RRateLimiter.java
@@ -23,7 +23,7 @@ import java.util.concurrent.TimeUnit;
* @author Nikita Koksharov
*
*/
-public interface RRateLimiter extends RRateLimiterAsync, RObject {
+public interface RRateLimiter extends RRateLimiterAsync, RExpirable {
/**
* Initializes RateLimiter's state and stores config to Redis server.
diff --git a/redisson/src/main/java/org/redisson/api/RRateLimiterAsync.java b/redisson/src/main/java/org/redisson/api/RRateLimiterAsync.java
index <HASH>..<HASH> 100644
--- a/redisson/src/main/java/org/redisson/api/RRateLimiterAsync.java
+++ b/redisson/src/main/java/org/redisson/api/RRateLimiterAsync.java
@@ -23,7 +23,7 @@ import java.util.concurrent.TimeUnit;
* @author Nikita Koksharov
*
*/
-public interface RRateLimiterAsync extends RObjectAsync {
+public interface RRateLimiterAsync extends RExpirableAsync {
/**
* Initializes RateLimiter's state and stores config to Redis server.
diff --git a/redisson/src/main/java/org/redisson/api/RRateLimiterReactive.java b/redisson/src/main/java/org/redisson/api/RRateLimiterReactive.java
index <HASH>..<HASH> 100644
--- a/redisson/src/main/java/org/redisson/api/RRateLimiterReactive.java
+++ b/redisson/src/main/java/org/redisson/api/RRateLimiterReactive.java
@@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit;
* @author Nikita Koksharov
*
*/
-public interface RRateLimiterReactive extends RObjectReactive {
+public interface RRateLimiterReactive extends RExpirableReactive {
/**
* Initializes RateLimiter's state and stores config to Redis server.
diff --git a/redisson/src/main/java/org/redisson/api/RRateLimiterRx.java b/redisson/src/main/java/org/redisson/api/RRateLimiterRx.java
index <HASH>..<HASH> 100644
--- a/redisson/src/main/java/org/redisson/api/RRateLimiterRx.java
+++ b/redisson/src/main/java/org/redisson/api/RRateLimiterRx.java
@@ -26,7 +26,7 @@ import java.util.concurrent.TimeUnit;
* @author Nikita Koksharov
*
*/
-public interface RRateLimiterRx extends RObjectRx {
+public interface RRateLimiterRx extends RExpirableRx {
/**
* Initializes RateLimiter's state and stores config to Redis server.
diff --git a/redisson/src/test/java/org/redisson/RedissonTest.java b/redisson/src/test/java/org/redisson/RedissonTest.java
index <HASH>..<HASH> 100644
--- a/redisson/src/test/java/org/redisson/RedissonTest.java
+++ b/redisson/src/test/java/org/redisson/RedissonTest.java
@@ -729,8 +729,8 @@ public class RedissonTest {
process.shutdown();
assertThat(readonlyErrors).isZero();
- assertThat(errors).isLessThan(70);
- assertThat(success).isGreaterThan(600 - 70);
+ assertThat(errors).isLessThan(130);
+ assertThat(success).isGreaterThan(600 - 130);
} | Feature - RExpirable interface added to RRateLimiter. #<I> | redisson_redisson | train |
0c27aa500d46438eb542f042b476cbe623a4d57c | diff --git a/libraries/joomla/database/driver.php b/libraries/joomla/database/driver.php
index <HASH>..<HASH> 100644
--- a/libraries/joomla/database/driver.php
+++ b/libraries/joomla/database/driver.php
@@ -965,7 +965,7 @@ abstract class JDatabaseDriver extends JDatabase implements JDatabaseInterface
{
$this->connect();
- static $cursor;
+ static $cursor = null;
// Execute the query and get the result set cursor.
if (!($cursor = $this->execute()))
@@ -998,7 +998,7 @@ abstract class JDatabaseDriver extends JDatabase implements JDatabaseInterface
{
$this->connect();
- static $cursor;
+ static $cursor = null;
// Execute the query and get the result set cursor.
if (!($cursor = $this->execute()))
@@ -1246,9 +1246,9 @@ abstract class JDatabaseDriver extends JDatabase implements JDatabaseInterface
* risks and reserved word conflicts.
*
* @param mixed $name The identifier name to wrap in quotes, or an array of identifier names to wrap in quotes.
- * Each type supports dot-notation name.
+ * Each type supports dot-notation name.
* @param mixed $as The AS query part associated to $name. It can be string or array, in latter case it has to be
- * same length of $name; if is null there will not be any AS part for string or array element.
+ * same length of $name; if is null there will not be any AS part for string or array element.
*
* @return mixed The quote wrapped name, same type of $name.
*
diff --git a/libraries/joomla/database/query.php b/libraries/joomla/database/query.php
index <HASH>..<HASH> 100644
--- a/libraries/joomla/database/query.php
+++ b/libraries/joomla/database/query.php
@@ -289,7 +289,7 @@ abstract class JDatabaseQuery
break;
case 'qn':
- return $this->quoteName($args[0]);
+ return $this->quoteName($args[0], isset($args[1]) ? $args[1] : null);
break;
case 'e':
@@ -1249,21 +1249,24 @@ abstract class JDatabaseQuery
* $query->quoteName('#__a');
* $query->qn('#__a');
*
- * @param string $name The identifier name to wrap in quotes.
+ * @param mixed $name The identifier name to wrap in quotes, or an array of identifier names to wrap in quotes.
+ * Each type supports dot-notation name.
+ * @param mixed $as The AS query part associated to $name. It can be string or array, in latter case it has to be
+ * same length of $name; if is null there will not be any AS part for string or array element.
*
- * @return string The quote wrapped name.
+ * @return mixed The quote wrapped name, same type of $name.
*
* @since 11.1
* @throws RuntimeException if the internal db property is not a valid object.
*/
- public function quoteName($name)
+ public function quoteName($name, $as = null)
{
if (!($this->db instanceof JDatabaseDriver))
{
throw new RuntimeException('JLIB_DATABASE_ERROR_INVALID_DB_OBJECT');
}
- return $this->db->quoteName($name);
+ return $this->db->quoteName($name, $as);
}
/** | Fixed some inconsistency between JDatabaseQuery::quoteName and
JDatabaseDriver::quoteName and their aliases.
Also fixed two static variable initialization warnings. | joomla_joomla-framework | train |
a964095e10bf6b760ff7cc2a2392b6974cb6a097 | diff --git a/Kwf_js/EyeCandy/List.js b/Kwf_js/EyeCandy/List.js
index <HASH>..<HASH> 100644
--- a/Kwf_js/EyeCandy/List.js
+++ b/Kwf_js/EyeCandy/List.js
@@ -36,7 +36,8 @@ Ext.extend(Kwf.EyeCandy.List, Ext.util.Observable, {
'childMouseLeave': true,
'childClick': true,
'childStateChanged': true,
- 'activeChanged': true
+ 'activeChanged': true,
+ 'nextPreviousClick': true // fired in ActiveChanger/NextPreviousLinks.js
});
Ext.applyIf(this, {
plugins: [],
diff --git a/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/NextPreviousLinks.js b/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/NextPreviousLinks.js
index <HASH>..<HASH> 100644
--- a/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/NextPreviousLinks.js
+++ b/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/NextPreviousLinks.js
@@ -41,7 +41,11 @@ Kwf.EyeCandy.List.Plugins.ActiveChanger.NextPreviousLinks = Ext.extend(Kwf.EyeCa
} else {
item = this.list.getItem(this.list.getActiveItem().listIndex-1);
}
- if (item) this.list.setActiveItem(item);
+
+ if (item) {
+ this.list.setActiveItem(item);
+ this.list.fireEvent('nextPreviousClick', item);
+ }
},
onNext: function() {
var item;
@@ -50,6 +54,10 @@ Kwf.EyeCandy.List.Plugins.ActiveChanger.NextPreviousLinks = Ext.extend(Kwf.EyeCa
} else {
item = this.list.getItem(this.list.getActiveItem().listIndex+1);
}
- if (item) this.list.setActiveItem(item);
+
+ if (item) {
+ this.list.setActiveItem(item);
+ this.list.fireEvent('nextPreviousClick', item);
+ }
}
});
diff --git a/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/PlayPauseLink.js b/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/PlayPauseLink.js
index <HASH>..<HASH> 100644
--- a/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/PlayPauseLink.js
+++ b/Kwf_js/EyeCandy/List/Plugins/ActiveChanger/PlayPauseLink.js
@@ -23,6 +23,15 @@ Kwf.EyeCandy.List.Plugins.ActiveChanger.PlayPauseLink = Ext.extend(Kwf.EyeCandy.
this._isPlaying = true;
this.play.defer(this.interval, this);
}
+
+ this.list.on('childClick', function(item, ev) {
+ ev.stopEvent();
+ this.pause();
+ }, this);
+
+ this.list.on('nextPreviousClick', function(item) {
+ this.pause();
+ }, this);
},
play: function() { | do pause if a manual click is made in an eye candy list | koala-framework_koala-framework | train |
6eafca83dbb3ee5a300e9972109cfcd060bcf2d2 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -23,7 +23,8 @@ module.exports = () => {
ParseError : _error.ParseError,
LazyParseError : _error.LazyParseError,
- uncons: _stream.uncons,
+ uncons : _stream.uncons,
+ ArrayStream: _stream.ArrayStream,
Config : _parser.Config,
State : _parser.State, | :wrench: Fix `ArrayStream` is not exported | susisu_loquat-core | train |
f925fba059de5c9ac834d9a5ef2f668634cc1646 | diff --git a/tests/ClientTest.php b/tests/ClientTest.php
index <HASH>..<HASH> 100644
--- a/tests/ClientTest.php
+++ b/tests/ClientTest.php
@@ -143,14 +143,17 @@ class ClientTest extends TestCase
'000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
);
- $json = $history[0]['request']->getBody()->getContents();
- $body = $this->requestBody(
- 'getblockheader',
- '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ $json = json_decode(
+ $history[0]['request']->getBody()->getContents(),
+ true
);
+ $this->assertEquals($this->requestBody(
+ 'getblockheader',
+ $json['id'],
+ '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ ), $json);
$this->assertEquals(self::$getBlockResponse, $response->get());
- $this->assertArraySubset($body, json_decode($json, true));
}
/**
@@ -233,13 +236,16 @@ class ClientTest extends TestCase
$promise->wait();
- $json = $history[0]['request']->getBody()->getContents();
- $body = $this->requestBody(
- 'getblockheader',
- '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ $json = json_decode(
+ $history[0]['request']->getBody()->getContents(),
+ true
);
- $this->assertArraySubset($body, json_decode($json, true));
+ $this->assertEquals($this->requestBody(
+ 'getblockheader',
+ $json['id'],
+ '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ ), $json);
}
/**
@@ -261,13 +267,16 @@ class ClientTest extends TestCase
'000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
);
- $json = $history[0]['request']->getBody()->getContents();
- $body = $this->requestBody(
- 'getblockheader',
- '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ $json = json_decode(
+ $history[0]['request']->getBody()->getContents(),
+ true
);
- $this->assertArraySubset($body, json_decode($json, true));
+ $this->assertEquals($this->requestBody(
+ 'getblockheader',
+ $json['id'],
+ '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ ), $json);
$this->assertEquals(self::$getBlockResponse, $response->get());
}
@@ -301,13 +310,16 @@ class ClientTest extends TestCase
$promise->wait();
- $json = $history[0]['request']->getBody()->getContents();
- $body = $this->requestBody(
- 'getblockheader',
- '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ $json = json_decode(
+ $history[0]['request']->getBody()->getContents(),
+ true
);
- $this->assertArraySubset($body, json_decode($json, true));
+ $this->assertEquals($this->requestBody(
+ 'getblockheader',
+ $json['id'],
+ '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
+ ), $json);
}
/**
diff --git a/tests/TestCase.php b/tests/TestCase.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase.php
+++ b/tests/TestCase.php
@@ -206,15 +206,16 @@ abstract class TestCase extends \PHPUnit\Framework\TestCase
* Get request body.
*
* @param string $method
- * @param mixed $params
+ * @param mixed $params
*
* @return array
*/
- protected function requestBody($method, ...$params)
+ protected function requestBody($method, $id, ...$params)
{
return [
'method' => $method,
- 'params' => $params,
+ 'params' => (array) $params,
+ 'id' => $id,
];
}
} | Added id to request body assertion. | denpamusic_php-bitcoinrpc | train |
7473386f214a0725c9059497e181f2fe84824328 | diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/editorCommands.js b/bundles/org.eclipse.orion.client.ui/web/orion/editorCommands.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.ui/web/orion/editorCommands.js
+++ b/bundles/org.eclipse.orion.client.ui/web/orion/editorCommands.js
@@ -264,7 +264,7 @@ define([
commandRegistry.registerCommandContribution(this.saveToolbarId || this.toolbarId, "orion.openResource", 1, this.saveToolbarId ? "orion.menuBarFileGroup/orion.edit.saveGroup" : null, false, new mKeyBinding.KeyBinding('f', true, true)); //$NON-NLS-2$ //$NON-NLS-1$ //$NON-NLS-0$
commandRegistry.registerCommandContribution(this.saveToolbarId || this.toolbarId, "orion.edit.save", 2, this.saveToolbarId ? "orion.menuBarFileGroup/orion.edit.saveGroup" : null, false, new mKeyBinding.KeyBinding('s', true), null, this); //$NON-NLS-3$ //$NON-NLS-2$ //$NON-NLS-1$ //$NON-NLS-3$
commandRegistry.registerCommandContribution(this.saveToolbarId || this.toolbarId, "eclipse.file.refresh", 3, this.saveToolbarId ? "orion.menuBarFileGroup/orion.edit.saveGroup" : null, false, null, null, this);
- commandRegistry.registerCommandContribution(this.editToolbarId || this.pageNavId, "orion.edit.gotoLine", 3, this.editToolbarId ? "orion.menuBarEditGroup/orion.findGroup" : null, !this.editToolbarId, new mKeyBinding.KeyBinding('l', !util.isMac, false, false, util.isMac), new mCommandRegistry.URLBinding("gotoLine", "line"), this); //$NON-NLS-4$ //$NON-NLS-3$ //$NON-NLS-2$ //$NON-NLS-1$ //$NON-NLS-5$
+ commandRegistry.registerCommandContribution(this.editToolbarId || this.pageNavId, "orion.edit.gotoLine", 3, this.editToolbarId ? "orion.menuBarEditGroup/orion.findGroup" : null, !this.editToolbarId, new mKeyBinding.KeyBinding('g', !util.isMac, false, false, util.isMac), new mCommandRegistry.URLBinding("gotoLine", "line"), this); //$NON-NLS-4$ //$NON-NLS-3$ //$NON-NLS-2$ //$NON-NLS-1$ //$NON-NLS-5$
commandRegistry.registerCommandContribution(this.editToolbarId || this.pageNavId, "orion.edit.find", 0, this.editToolbarId ? "orion.menuBarEditGroup/orion.findGroup" : null, !this.editToolbarId, new mKeyBinding.KeyBinding('f', true), new mCommandRegistry.URLBinding("find", "find"), this); //$NON-NLS-4$ //$NON-NLS-3$ //$NON-NLS-2$ //$NON-NLS-1$ //$NON-NLS-5$
commandRegistry.registerCommandContribution(this.editToolbarId || this.pageNavId , "orion.edit.format", 2, this.editToolbarId ? "orion.menuBarEditGroup/orion.edit.formatGroup" : null, !this.editToolbarId, new mKeyBinding.KeyBinding('f', false, true, true), new mCommandRegistry.URLBinding("format", "format"), this); //$NON-NLS-4$ //$NON-NLS-3$ //$NON-NLS-2$ //$NON-NLS-1$ //$NON-NLS-5$
commandRegistry.registerCommandContribution(this.toolbarId, "orion.keyAssist", 0, "orion.menuBarToolsGroup", false, new mKeyBinding.KeyBinding(191, false, true, !util.isMac, util.isMac)); //$NON-NLS-1$ //$NON-NLS-0$ //$NON-NLS-2$
diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/common-nav.js b/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/common-nav.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/common-nav.js
+++ b/bundles/org.eclipse.orion.client.ui/web/orion/widgets/nav/common-nav.js
@@ -237,7 +237,7 @@ define([
var viewActionsScope = this.viewActionsScope;
var contextMenuActionsScope = this.contextMenuActionsScope;
- var renameBinding = new KeyBinding(113, true); // F2
+ var renameBinding = new KeyBinding(113); // F2
var delBinding = new KeyBinding(46); // Delete
var cutBinding = new KeyBinding('x', true); /* Ctrl+X */ //$NON-NLS-0$
var copySelections = new KeyBinding('c', true); /* Ctrl+C */ //$NON-NLS-0$ | Change keyboard shortcut for Rename and Goto Line commands (#<I>) | eclipse_orion.client | train |
0f9d6aa9c757e0408c2cbfa43dd66a53d750a7d4 | diff --git a/conn_linux_gteq_1.12_integration_test.go b/conn_linux_gteq_1.12_integration_test.go
index <HASH>..<HASH> 100644
--- a/conn_linux_gteq_1.12_integration_test.go
+++ b/conn_linux_gteq_1.12_integration_test.go
@@ -12,7 +12,7 @@ import (
"golang.org/x/sys/unix"
)
-func TestLinuxConnIntegrationTimeout(t *testing.T) {
+func TestIntegrationConnTimeout(t *testing.T) {
conn, err := netlink.Dial(unix.NETLINK_GENERIC, nil)
if err != nil {
t.Fatalf("failed to dial: %v", err)
@@ -38,7 +38,7 @@ func TestLinuxConnIntegrationTimeout(t *testing.T) {
}
}
-func TestLinuxConnIntegrationExecuteAfterReadDeadline(t *testing.T) {
+func TestIntegrationConnExecuteAfterReadDeadline(t *testing.T) {
conn, err := netlink.Dial(unix.NETLINK_GENERIC, nil)
if err != nil {
t.Fatalf("failed to dial: %v", err) | netlink: fix names of some integration tests | mdlayher_netlink | train |
a62e64d4b92b01475d140df884f91592e723b419 | diff --git a/turnstile/limits.py b/turnstile/limits.py
index <HASH>..<HASH> 100644
--- a/turnstile/limits.py
+++ b/turnstile/limits.py
@@ -25,6 +25,12 @@ import msgpack
from turnstile import utils
+class DeferLimit(Exception):
+ """Exception raised if limit should not be considered."""
+
+ pass
+
+
class BucketKey(object):
"""
Represent a bucket key. This class provides functionality to
@@ -190,12 +196,6 @@ def get_unit_name(value):
return _units_map.get(value, str(value))
-class DeferLimit(Exception):
- """Exception raised if limit should not be considered."""
-
- pass
-
-
class BucketLoader(object):
"""
Load a bucket from its list representation. | Move the DeferLimit exception to the top of the file. | klmitch_turnstile | train |
a8ff37da7c51455f86bff903d5fc5de30d2bed97 | diff --git a/cobald/utility/concurrent/thread_runner.py b/cobald/utility/concurrent/thread_runner.py
index <HASH>..<HASH> 100644
--- a/cobald/utility/concurrent/thread_runner.py
+++ b/cobald/utility/concurrent/thread_runner.py
@@ -63,10 +63,11 @@ class ThreadRunner(BaseRunner):
def _start_outstanding(self):
with self._lock:
- for subroutine in self._payloads:
- thread = CapturingThread(target=subroutine)
- thread.start()
- self._threads.add(thread)
- self._logger.debug('booted thread %s', thread)
+ payloads = self._payloads.copy()
self._payloads.clear()
+ for subroutine in payloads:
+ thread = CapturingThread(target=subroutine)
+ thread.start()
+ self._threads.add(thread)
+ self._logger.debug('booted thread %s', thread)
time.sleep(0) | reduced blocking of payload list for thread runner | MatterMiners_cobald | train |
6dc96dde14531b7d0af8fa3287e86c154f0b0a6c | diff --git a/elasticapm/contrib/asyncio/traces.py b/elasticapm/contrib/asyncio/traces.py
index <HASH>..<HASH> 100644
--- a/elasticapm/contrib/asyncio/traces.py
+++ b/elasticapm/contrib/asyncio/traces.py
@@ -29,9 +29,11 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import functools
+from types import TracebackType
+from typing import Optional, Type
from elasticapm.conf.constants import LABEL_RE
-from elasticapm.traces import DroppedSpan, capture_span, error_logger, execution_context
+from elasticapm.traces import SpanType, capture_span, execution_context
from elasticapm.utils import get_name_from_func
@@ -46,34 +48,13 @@ class async_capture_span(capture_span):
return decorated
- async def __aenter__(self):
- transaction = execution_context.get_transaction()
- if transaction and transaction.is_sampled:
- return transaction.begin_span(
- self.name,
- self.type,
- context=self.extra,
- leaf=self.leaf,
- labels=self.labels,
- span_subtype=self.subtype,
- span_action=self.action,
- sync=False,
- start=self.start,
- )
+ async def __aenter__(self) -> Optional[SpanType]:
+ return self.handle_enter(False)
- async def __aexit__(self, exc_type, exc_val, exc_tb):
- transaction = execution_context.get_transaction()
- if transaction and transaction.is_sampled:
- try:
- span = transaction.end_span(self.skip_frames)
- if exc_val and not isinstance(span, DroppedSpan):
- try:
- exc_val._elastic_apm_span_id = span.id
- except AttributeError:
- # could happen if the exception has __slots__
- pass
- except LookupError:
- error_logger.debug("ended non-existing span %s of type %s", self.name, self.type)
+ async def __aexit__(
+ self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
+ ):
+ self.handle_exit(exc_type, exc_val, exc_tb)
async def set_context(data, key="custom"):
diff --git a/elasticapm/traces.py b/elasticapm/traces.py
index <HASH>..<HASH> 100644
--- a/elasticapm/traces.py
+++ b/elasticapm/traces.py
@@ -35,7 +35,8 @@ import threading
import time
import timeit
from collections import defaultdict
-from typing import Any, Callable, Dict, Optional, Tuple, Union
+from types import TracebackType
+from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
from elasticapm.conf import constants
from elasticapm.conf.constants import LABEL_RE, SPAN, TRANSACTION
@@ -867,7 +868,15 @@ class capture_span(object):
return decorated
- def __enter__(self) -> Union[Span, DroppedSpan, None]:
+ def __enter__(self) -> Optional[SpanType]:
+ return self.handle_enter(self.sync)
+
+ def __exit__(
+ self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
+ ) -> None:
+ self.handle_exit(exc_type, exc_val, exc_tb)
+
+ def handle_enter(self, sync: bool) -> Optional[SpanType]:
transaction = execution_context.get_transaction()
if transaction and transaction.is_sampled:
return transaction.begin_span(
@@ -879,11 +888,13 @@ class capture_span(object):
span_subtype=self.subtype,
span_action=self.action,
start=self.start,
- sync=self.sync,
+ sync=sync,
)
return None
- def __exit__(self, exc_type, exc_val, exc_tb):
+ def handle_exit(
+ self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
+ ) -> None:
transaction = execution_context.get_transaction()
if transaction and transaction.is_sampled: | merge common code between capture_span and async_capture_span (#<I>)
this should avoid issues with changes only being implemented
in one of the two context managers | elastic_apm-agent-python | train |
d93c3298b7f07869ea69ac400554d0e94c9d6906 | diff --git a/channeldb/invoice_test.go b/channeldb/invoice_test.go
index <HASH>..<HASH> 100644
--- a/channeldb/invoice_test.go
+++ b/channeldb/invoice_test.go
@@ -1294,9 +1294,9 @@ func TestHTLCSet(t *testing.T) {
expSet2 := make(map[CircuitKey]*InvoiceHTLC)
checkHTLCSets := func() {
- require.Equal(t, expSetNil, inv.HTLCSet(nil))
- require.Equal(t, expSet1, inv.HTLCSet(setID1))
- require.Equal(t, expSet2, inv.HTLCSet(setID2))
+ require.Equal(t, expSetNil, inv.HTLCSet(nil, HtlcStateAccepted))
+ require.Equal(t, expSet1, inv.HTLCSet(setID1, HtlcStateAccepted))
+ require.Equal(t, expSet2, inv.HTLCSet(setID2, HtlcStateAccepted))
}
// All HTLC sets should be empty initially.
diff --git a/channeldb/invoices.go b/channeldb/invoices.go
index <HASH>..<HASH> 100644
--- a/channeldb/invoices.go
+++ b/channeldb/invoices.go
@@ -471,17 +471,16 @@ type Invoice struct {
HodlInvoice bool
}
-// HTLCSet returns the set of accepted HTLCs belonging to an invoice. Passing a
-// nil setID will return all accepted HTLCs in the case of legacy or MPP, and no
-// HTLCs in the case of AMP. Otherwise, the returned set will be filtered by
-// the populated setID which is used to retrieve AMP HTLC sets.
-func (i *Invoice) HTLCSet(setID *[32]byte) map[CircuitKey]*InvoiceHTLC {
+// HTLCSet returns the set of HTLCs belonging to setID and in the provided
+// state. Passing a nil setID will return all HTLCs in the provided state in the
+// case of legacy or MPP, and no HTLCs in the case of AMP. Otherwise, the
+// returned set will be filtered by the populated setID which is used to
+// retrieve AMP HTLC sets.
+func (i *Invoice) HTLCSet(setID *[32]byte, state HtlcState) map[CircuitKey]*InvoiceHTLC {
htlcSet := make(map[CircuitKey]*InvoiceHTLC)
for key, htlc := range i.Htlcs {
- // Only consider accepted mpp htlcs. It is possible that there
- // are htlcs registered in the invoice database that previously
- // timed out and are in the canceled state now.
- if htlc.State != HtlcStateAccepted {
+ // Only add HTLCs that are in the requested HtlcState.
+ if htlc.State != state {
continue
}
@@ -2039,7 +2038,7 @@ func updateInvoiceState(invoice *Invoice, hash *lntypes.Hash,
// Sanity check that the user isn't trying to settle or accept a
// non-existent HTLC set.
- if len(invoice.HTLCSet(update.SetID)) == 0 {
+ if len(invoice.HTLCSet(update.SetID, HtlcStateAccepted)) == 0 {
return ErrEmptyHTLCSet
}
@@ -2329,8 +2328,8 @@ func (d *DB) DeleteInvoice(invoicesToDelete []InvoiceDeleteRef) error {
// invoice key.
key := invoiceAddIndex.Get(addIndexKey[:])
if !bytes.Equal(key, invoiceKey) {
- return fmt.Errorf("unknown invoice in " +
- "add index")
+ return fmt.Errorf("unknown invoice " +
+ "in add index")
}
// Remove from the add index.
diff --git a/invoices/update.go b/invoices/update.go
index <HASH>..<HASH> 100644
--- a/invoices/update.go
+++ b/invoices/update.go
@@ -168,7 +168,7 @@ func updateMpp(ctx *invoiceUpdateCtx,
return nil, ctx.failRes(ResultHtlcSetTotalTooLow), nil
}
- htlcSet := inv.HTLCSet(setID)
+ htlcSet := inv.HTLCSet(setID, channeldb.HtlcStateAccepted)
// Check whether total amt matches other htlcs in the set.
var newSetTotal lnwire.MilliSatoshi
@@ -373,7 +373,7 @@ func updateLegacy(ctx *invoiceUpdateCtx,
// Don't allow settling the invoice with an old style
// htlc if we are already in the process of gathering an
// mpp set.
- for _, htlc := range inv.HTLCSet(nil) {
+ for _, htlc := range inv.HTLCSet(nil, channeldb.HtlcStateAccepted) {
if htlc.MppTotalAmt > 0 {
return nil, ctx.failRes(ResultMppInProgress), nil
} | channeldb+invoice: add state filter to HTLCSet | lightningnetwork_lnd | train |
15bbbae3685dcd8a57b008b6a610e37c0e1d4a8c | diff --git a/src/hoist.js b/src/hoist.js
index <HASH>..<HASH> 100644
--- a/src/hoist.js
+++ b/src/hoist.js
@@ -10,6 +10,13 @@
for (var x in from) into[x] = from[x];
return into;
}
+
+ function extendAliases(into, from) {
+ for (var x in from) {
+ var xs = x.split(' ');
+ for (var i = 0; i < xs.length; i++) into[xs[i]] = from[x];
+ }
+ }
function get(obj, key, nothing) {
if (key.indexOf('.') == -1) {
@@ -423,18 +430,28 @@
this.key = key;
}
+ extendAliases(PartialQueryManager.prototype, {
+ "eq is equals":
+ function (value) { this.qm = this.qm._where(this.key, value); return this; },
+ "gt greaterThan":
+ function (value) { this.qm = this.qm._whereAnd(this.key, "$gt", value); return this; },
+ "gte ge":
+ function (value) { this.qm = this.qm._whereAnd(this.key, "$gte", value); return this; },
+ "elem in":
+ function (value) { this.qm = this.qm._whereAnd(this.key, "$in", value); return this; },
+ "lt lessThan":
+ function (value) { this.qm = this.qm._whereAnd(this.key, "$lt", value); return this; },
+ "lte le":
+ function (value) { this.qm = this.qm._whereAnd(this.key, "$lte", value); return this; },
+ "neq ne isnt notEquals":
+ function (value) { this.qm = this.qm._whereAnd(this.key, "$ne", value); return this; },
+ "nelem nin notIn notElem":
+ function (value) { this.qm = this.qm._whereAnd(this.key, "$nin", value); return this; },
+ "exists":
+ function () { this.qm = this.qm._whereAnd(this.key, "$exists", true); return this; }
+ });
+
extend(PartialQueryManager.prototype, {
- eq: function (value) { this.qm = this.qm._where(this.key, value); return this; },
- gt: function (value) { this.qm = this.qm._whereAnd(this.key, "$gt", value); return this; },
- gte: function (value) { this.qm = this.qm._whereAnd(this.key, "$gte", value); return this; },
- gt: function (value) { this.qm = this.qm._whereAnd(this.key, "$gt", value); return this; },
- isIn: function (value) { this.qm = this.qm._whereAnd(this.key, "$in", value); return this; },
- lt: function (value) { this.qm = this.qm._whereAnd(this.key, "$lt", value); return this; },
- lte: function (value) { this.qm = this.qm._whereAnd(this.key, "$lte", value); return this; },
- ne: function (value) { this.qm = this.qm._whereAnd(this.key, "$ne", value); return this; },
- nin: function (value) { this.qm = this.qm._whereAnd(this.key, "$nin", value); return this; },
- exists: function () { this.qm = this.qm._whereAnd(this.key, "$exists", true); return this; },
-
where: function (key) { return this.qm.where(key); },
limit: function (limit) { return this.qm.limit(limit); },
skip: function (skip) { return this.qm.skip(skip); }, | some aliases for query terms for better niceness | hoist_hoist-js | train |
0316c98e0a064231cc532cb728eee8dbc40dc074 | diff --git a/src/Carbon/Lang/lt.php b/src/Carbon/Lang/lt.php
index <HASH>..<HASH> 100644
--- a/src/Carbon/Lang/lt.php
+++ b/src/Carbon/Lang/lt.php
@@ -35,42 +35,56 @@
* - Justinas (Gamesh)
*/
return [
- 'year' => ':count metus|:count metus|:count metų',
+ 'year' => ':count metai|:count metai|:count metais',
'y' => ':count m.',
- 'month' => ':count mėnesį|:count mėnesius|:count mėnesių',
+ 'month' => ':count mėnuo|:count mėnuo|:count mėnesį',
'm' => ':count mėn.',
- 'week' => ':count savaitę|:count savaites|:count savaičių',
+ 'week' => ':count savaitė|:count savaitė|:count savaitę',
'w' => ':count sav.',
- 'day' => ':count dieną|:count dienas|:count dienų',
+ 'day' => ':count diena|:count dienos|:count dienų',
'd' => ':count d.',
- 'hour' => ':count valandą|:count valandas|:count valandų',
+ 'hour' => ':count valanda|:count valandą|:count valandą',
'h' => ':count val.',
- 'minute' => ':count minutę|:count minutes|:count minučių',
+ 'minute' => ':count minutė|:count minutė|:count minutę',
'min' => ':count min.',
- 'second' => ':count sekundę|:count sekundes|:count sekundžių',
+ 'second' => ':count sekundė|:count sekundes|:count sekundžių',
's' => ':count sek.',
- 'second_from_now' => ':count sekundės|:count sekundžių|:count sekundžių',
- 'minute_from_now' => ':count minutės|:count minučių|:count minučių',
- 'hour_from_now' => ':count valandos|:count valandų|:count valandų',
- 'day_from_now' => ':count dienos|:count dienų|:count dienų',
- 'week_from_now' => ':count savaitės|:count savaičių|:count savaičių',
- 'month_from_now' => ':count mėnesio|:count mėnesių|:count mėnesių',
+
+ 'year_ago' => ':count metus|:count metus|:count metų',
+ 'month_ago' => ':count mėnesį|:count mėnesius|:count mėnesių',
+ 'week_ago' => ':count savaitę|:count savaites|:count savaičių',
+ 'day_ago' => ':count dieną|:count dienas|:count dienų',
+ 'hour_ago' => ':count valandą|:count valandas|:count valandų',
+ 'minute_ago' => ':count minutę|:count minutes|:count minučių',
+ 'second_ago' => ':count sekundę|:count sekundes|:count sekundžių',
+
'year_from_now' => ':count metų',
+ 'month_from_now' => ':count mėnesio|:count mėnesių|:count mėnesių',
+ 'week_from_now' => ':count savaitės|:count savaičių|:count savaičių',
+ 'day_from_now' => ':count dienos|:count dienų|:count dienų',
+ 'hour_from_now' => ':count valandos|:count valandų|:count valandų',
+ 'minute_from_now' => ':count minutės|:count minučių|:count minučių',
+ 'second_from_now' => ':count sekundės|:count sekundžių|:count sekundžių',
+
'ago' => 'prieš :time',
'from_now' => 'už :time',
'after' => 'po :time',
'before' => ':time nuo dabar',
+
'first_day_of_week' => 1,
'day_of_first_week_of_year' => 4,
+
'diff_now' => 'ką tik',
'diff_yesterday' => 'vakar',
'diff_tomorrow' => 'rytoj',
'diff_before_yesterday' => 'užvakar',
'diff_after_tomorrow' => 'poryt',
+
'period_recurrences' => 'kartą|:count kartų',
'period_interval' => 'kiekvieną :interval',
'period_start_date' => 'nuo :date',
'period_end_date' => 'iki :date',
+
'months' => ['sausis', 'vasaris', 'kovas', 'balandis', 'gegužė', 'birželis', 'liepa', 'rugpjūtis', 'rugsėjis', 'spalis', 'lapkritis', 'gruodis'],
'months_short' => ['sau', 'vas', 'kov', 'bal', 'geg', 'bir', 'lie', 'rgp', 'rgs', 'spa', 'lap', 'gru'],
'weekdays' => ['sekmadienis', 'pirmadienis', 'antradienis', 'trečiadienis', 'ketvirtadienis', 'penktadienis', 'šeštadienis'], | #<I> Fix Lithuanian declensions | briannesbitt_Carbon | train |
b9bc07d8f71c5de913bfb8f1bacd86caf733c477 | diff --git a/lib/node_modules/@stdlib/math/stats/incr/prod/lib/incrprod.js b/lib/node_modules/@stdlib/math/stats/incr/prod/lib/incrprod.js
index <HASH>..<HASH> 100644
--- a/lib/node_modules/@stdlib/math/stats/incr/prod/lib/incrprod.js
+++ b/lib/node_modules/@stdlib/math/stats/incr/prod/lib/incrprod.js
@@ -6,6 +6,12 @@ var frexp = require( '@stdlib/math/base/special/frexp' );
var ldexp = require( '@stdlib/math/base/special/ldexp' );
+// VARIABLES //
+
+// `frexp` workspace:
+var PARTS = [ 0.0, 0 ];
+
+
// MAIN //
/**
@@ -59,24 +65,23 @@ function incrprod() {
* // returns -10.0
*/
function accumulator( x ) {
- var parts;
if ( arguments.length === 0 ) {
return prod;
}
// Splitting the incoming value into a normalized fraction and exponent:
- parts = frexp( x );
+ frexp( PARTS, x );
// Update the accumulated fraction:
- frac *= parts[ 0 ];
+ frac *= PARTS[ 0 ];
// Update the accumulated exponent:
- exp += parts[ 1 ];
+ exp += PARTS[ 1 ];
// Ensure fraction remains normalized to avoid overflow/underflow...
if ( frac > -0.5 && frac < 0.5 ) {
- parts = frexp( frac );
- frac = parts[ 0 ];
- exp += parts[ 1 ];
+ frexp( PARTS, frac );
+ frac = PARTS[ 0 ];
+ exp += PARTS[ 1 ];
}
prod = ldexp( frac, exp );
return prod; | Avoid temporary array creation by reusing allocated memory | stdlib-js_stdlib | train |
0e9a935797f472cebb0ae02493830abce12b5469 | diff --git a/pygmsh/built_in/geometry.py b/pygmsh/built_in/geometry.py
index <HASH>..<HASH> 100644
--- a/pygmsh/built_in/geometry.py
+++ b/pygmsh/built_in/geometry.py
@@ -458,9 +458,6 @@ class Geometry:
if holes:
assert with_volume
- print(x0)
- print(radii)
-
# Add points.
p = [
self.add_point(x0, lcar=lcar),
@@ -503,32 +500,13 @@ class Geometry:
self.add_curve_loop([c[1], c[6], c[11]]),
]
# Create a surface for each line loop.
- print()
- for pp in p:
- print(pp)
- print()
- for cc in c:
- print(cc)
- print()
- for l in ll:
- print(l)
- self.add_surface(l)
-
- exit(1)
s = [self.add_surface(l) for l in ll]
# Combine the surfaces to avoid seams
- if self._gmsh_major() == 3:
- s = [self.add_compound_surface(s[:4]), self.add_compound_surface(s[4:])]
- else:
- assert self._gmsh_major() == 4
- # <https://gitlab.onelab.info/gmsh/gmsh/issues/507>
- self.add_raw_code(
- "Compound Surface{{{}}};".format(",".join([surf.id for surf in s[:4]]))
- )
- self.add_raw_code(
- "Compound Surface{{{}}};".format(",".join([surf.id for surf in s[4:]]))
- )
+ # <https://gitlab.onelab.info/gmsh/gmsh/issues/507>
+ # Cannot enable those yet, <https://gitlab.onelab.info/gmsh/gmsh/-/issues/995>
+ # self._COMPOUND_ENTITIES.append((2, [surf._ID for surf in s[:4]]))
+ # self._COMPOUND_ENTITIES.append((2, [surf._ID for surf in s[4:]]))
# Create the surface loop.
surface_loop = self.add_surface_loop(s)
diff --git a/test/test_ellipsoid.py b/test/test_ellipsoid.py
index <HASH>..<HASH> 100644
--- a/test/test_ellipsoid.py
+++ b/test/test_ellipsoid.py
@@ -10,8 +10,8 @@ def test():
geom = pygmsh.built_in.Geometry()
geom.add_ellipsoid([0.0, 0.0, 0.0], [1.0, 0.5, 0.75], 0.05)
- ref = 1.5676038497587947
mesh = pygmsh.generate_mesh(geom)
+ ref = 1.5676038497587947
assert abs(compute_volume(mesh) - ref) < 1.0e-2 * ref
return mesh
diff --git a/test/test_extrusion_entities.py b/test/test_extrusion_entities.py
index <HASH>..<HASH> 100644
--- a/test/test_extrusion_entities.py
+++ b/test/test_extrusion_entities.py
@@ -47,7 +47,7 @@ def test():
poly = geom.add_polygon(
[[5.0, 0.0, 0.0], [6.0, 0.0, 0.0], [5.0, 1.0, 0.0]], lcar=1e20
)
- _, _, poly_lat = geom.extrude(poly, [0.0, 0.0, 1.0], num_layers=1)
+ _, _, poly_lat = geom.extrude(poly.surface, [0.0, 0.0, 1.0], num_layers=1)
mesh = pygmsh.generate_mesh(geom)
assert len(mesh.points) == 8 + 6
assert len(poly_lat) == 3 | get ellipsoid to work | nschloe_pygmsh | train |
2047445a1b4fcc8d8ea83f69422555df5be774b1 | diff --git a/src/org/opencms/jsp/CmsJspNavBuilder.java b/src/org/opencms/jsp/CmsJspNavBuilder.java
index <HASH>..<HASH> 100644
--- a/src/org/opencms/jsp/CmsJspNavBuilder.java
+++ b/src/org/opencms/jsp/CmsJspNavBuilder.java
@@ -592,8 +592,8 @@ public class CmsJspNavBuilder {
for (CmsJspNavElement ne : curnav) {
// add the navigation entry to the result list
list.add(ne);
- // check if navigation entry is a folder and below the max level -> if so, get the navigation from this folder as well
- if (ne.isFolderLink() && (noLimit || (ne.getNavTreeLevel() < endLevel))) {
+ // check if navigation entry is a folder or navigation level and below the max level -> if so, get the navigation from this folder as well
+ if ((ne.isFolderLink() || ne.isNavigationLevel()) && (noLimit || (ne.getNavTreeLevel() < endLevel))) {
List<CmsJspNavElement> subnav = getSiteNavigation(m_cms.getSitePath(ne.getResource()), endLevel);
// copy the result of the subfolder to the result list
list.addAll(subnav); | Fixed issue in site navigation with navigation levels containing only
file sub elements like redirects. | alkacon_opencms-core | train |
d8f8c42cec662c3b9e7a6804c1a40f50626d729b | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -123,7 +123,7 @@ let driver = {
if (!input || !input.pin)
return cb({"code": 403, "msg": driverConfig.errors[403]});
let modelUserObj = new SSOT.user(soajs);
- BL.user.find(soajs, {"pin": input.pin}, modelUserObj, (error, record) => {
+ BL.user.find(soajs, {"pin": input.pin, "tId": soajs.tenant.id}, modelUserObj, (error, record) => {
if (error) {
modelUserObj.closeConnection();
return cb(error);
diff --git a/lib/user.js b/lib/user.js
index <HASH>..<HASH> 100644
--- a/lib/user.js
+++ b/lib/user.js
@@ -172,6 +172,7 @@ let bl = {
});
} else if (inputmaskData.pin) {
data.pin = inputmaskData.pin;
+ data.tId = inputmaskData.tId;
modelObj.getUserByPin(data, (err, record) => {
if (err) {
soajs.log.error(err);
diff --git a/model/mongo/user.js b/model/mongo/user.js
index <HASH>..<HASH> 100644
--- a/model/mongo/user.js
+++ b/model/mongo/user.js
@@ -290,16 +290,17 @@ User.prototype.getUserByUsernameOrId = function (data, cb) {
*
* @param cb
*/
+
User.prototype.getUserByPin = function (data, cb) {
let __self = this;
- if (!data || !data.pin) {
- let error = new Error("pin is required.");
+ if (!data || (!data.pin && !data.tId)) {
+ let error = new Error("pin and tId are required.");
return cb(error, null);
}
let condition = {
$or: [
- {'tenant.pin.code': data.pin},
- {'config.allowedTenants.tenant.pin.code': data.pin}
+ {$and: [{'tenant.pin.code': data.pin}, {'tenant.id': data.tId}]},
+ {"config.allowedTenants": {"$elemMatch": {$and: [{'tenant.pin.code': data.pin}, {'tenant.id': data.tId}]}}}
]
};
__self.mongoCore.findOne(colName, condition, null, null, (err, record) => {
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -30,7 +30,6 @@
"mocha": "3.2.0",
"nock": "*",
"shelljs": "0.8.3",
-
"soajs.mongodb.data": "2.1.*"
},
"dependencies": {
@@ -43,7 +42,6 @@
"passport-local": "1.0.0",
"passport-twitter": "1.0.4",
"request": "2.84.0",
-
"soajs.core.modules": "2.5.13"
}
}
diff --git a/test/unit/lib/user.js b/test/unit/lib/user.js
index <HASH>..<HASH> 100644
--- a/test/unit/lib/user.js
+++ b/test/unit/lib/user.js
@@ -136,7 +136,8 @@ describe("Unit test for: lib - user", function () {
});
it("test - find - pin", function (done) {
let data = {
- "pin": "1235"
+ "pin": "1235",
+ "tId": "5c0e74ba9acc3c5a84a51259"
};
BL.find(soajs, data, modelUserObj, (error, record) => {
assert.equal(record.username, "owner");
diff --git a/test/unit/model/mongo/user.js b/test/unit/model/mongo/user.js
index <HASH>..<HASH> 100644
--- a/test/unit/model/mongo/user.js
+++ b/test/unit/model/mongo/user.js
@@ -147,7 +147,7 @@ describe("Unit test for: model - user", function () {
});
});
it("test - getUserByPin", function (done) {
- modelObj.getUserByPin({"pin": "1235"}, (error, record) => {
+ modelObj.getUserByPin({"pin": "1235", "tId": "5c0e74ba9acc3c5a84a51259"}, (error, record) => {
assert.equal(record.username, "owner");
done();
}); | assured that pin login is matching code to tId within the same document of allowedTenant array | soajs_soajs.urac.driver | train |
de1965f3b20a3a04b61e899e218c33d6e5d7f85f | diff --git a/src/plugins/google_analytics/google_analytics.js b/src/plugins/google_analytics/google_analytics.js
index <HASH>..<HASH> 100644
--- a/src/plugins/google_analytics/google_analytics.js
+++ b/src/plugins/google_analytics/google_analytics.js
@@ -12,6 +12,7 @@ class GoogleAnalytics extends ContainerPlugin {
if (options.gaAccount) {
this.account = options.gaAccount
this.trackerName = (options.gaTrackerName) ? options.gaTrackerName + "." : 'Clappr.'
+ this.domainName = options.gaDomainName
this.currentHDState = undefined
this.embedScript()
}
@@ -46,6 +47,8 @@ class GoogleAnalytics extends ContainerPlugin {
this.listenTo(this.container, Events.CONTAINER_HIGHDEFINITIONUPDATE, this.onHD)
this.listenTo(this.container, Events.CONTAINER_PLAYBACKDVRSTATECHANGED, this.onDVR)
_gaq.push([this.trackerName + '_setAccount', this.account]);
+ if (!!this.domainName)
+ _gaq.push([this.trackerName + '_setDomainName', this.domainName]);
}
onPlay() { | google analytics: allow setting domain name externally (closes #<I>) | clappr_clappr | train |
090deec1075d4847504e122c037b4aef505d97af | diff --git a/tests/test_statsd.py b/tests/test_statsd.py
index <HASH>..<HASH> 100644
--- a/tests/test_statsd.py
+++ b/tests/test_statsd.py
@@ -5,7 +5,7 @@ import uuid
import mock
import random
-from tornado import gen, iostream, tcpserver, testing
+from tornado import gen, locks, tcpserver, testing
from rejected import statsd
@@ -103,6 +103,7 @@ class StatsdServer(tcpserver.TCPServer):
def __init__(self, ssl_options=None, max_buffer_size=None,
read_chunk_size=None):
+ self.event = locks.Event()
self.packets = []
self.reconnect_receive = False
super(StatsdServer, self).__init__(
@@ -111,14 +112,18 @@ class StatsdServer(tcpserver.TCPServer):
def handle_stream(self, stream, address):
def read_callback(future):
+ self.event.clear()
result = future.result()
+ print(b'Received', result)
self.packets.append(result)
if b'reconnect' in result:
self.reconnect_receive = True
stream.close()
+ self.event.set()
return
inner_future = stream.read_until_regex(self.PATTERN)
self.io_loop.add_future(inner_future, read_callback)
+ self.event.set()
future = stream.read_until_regex(self.PATTERN)
self.io_loop.add_future(future, read_callback)
@@ -149,29 +154,35 @@ class TCPTestCase(testing.AsyncTestCase):
@testing.gen_test
def test_add_timing(self):
self.statsd.add_timing('foo', 2.5)
- yield gen.sleep(0.1)
+ yield self.server.event.wait()
self.assertIn(self.payload_format('foo', 2500.0, 'ms'), self.server.packets)
@testing.gen_test
def test_incr(self):
self.statsd.incr('bar', 2)
- yield gen.sleep(0.1)
+ yield self.server.event.wait()
self.assertIn(self.payload_format('bar', 2, 'c'), self.server.packets)
@testing.gen_test
def test_set_gauge(self):
self.statsd.set_gauge('baz', 98.5)
- yield gen.sleep(0.1)
+ yield self.server.event.wait()
self.assertIn(self.payload_format('baz', 98.5, 'g'), self.server.packets)
@testing.gen_test
def test_reconnect(self):
self.statsd.set_gauge('baz', 98.5)
- yield gen.sleep(0.1)
+ yield gen.sleep(1)
self.statsd.set_gauge('reconnect', 100)
- yield gen.sleep(0.1)
+ yield gen.sleep(1)
+ self.assertTrue(self.server.reconnect_receive)
self.statsd.set_gauge('bar', 10)
- yield gen.sleep(0.1)
+
+ while len(self.server.packets) < 3:
+ yield gen.moment
+
+ self.assertTrue(self.server.reconnect_receive)
+
self.assertIn(self.payload_format('baz', 98.5, 'g'), self.server.packets)
self.assertIn(self.payload_format('reconnect', 100, 'g'), self.server.packets)
self.assertIn(self.payload_format('bar', 10, 'g'), self.server.packets) | Perhaps travis needs more sleepy time | gmr_rejected | train |
4ddbe4cccf1017685edfbaf701f8248d743597e8 | diff --git a/encode_builder.go b/encode_builder.go
index <HASH>..<HASH> 100644
--- a/encode_builder.go
+++ b/encode_builder.go
@@ -1,7 +1,6 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-
package gojay
// grow grows b's capacity, if necessary, to guarantee space for
@@ -35,3 +34,26 @@ func (enc *Encoder) writeByte(c byte) {
func (enc *Encoder) writeString(s string) {
enc.buf = append(enc.buf, s...)
}
+
+func (enc *Encoder) writeStringEscape(s string) {
+ l := len(s)
+ for i := 0; i < l; i++ {
+ switch s[i] {
+ case '\\', '"':
+ enc.writeByte('\\')
+ enc.writeByte(s[i])
+ case '\n':
+ enc.writeByte('\\')
+ enc.writeByte('n')
+ case '\r':
+ enc.writeByte('\\')
+ enc.writeByte('r')
+ case '\t':
+ enc.writeByte('\\')
+ enc.writeByte('t')
+ default:
+ enc.writeByte(s[i])
+ }
+ }
+
+}
diff --git a/encode_string.go b/encode_string.go
index <HASH>..<HASH> 100644
--- a/encode_string.go
+++ b/encode_string.go
@@ -17,7 +17,7 @@ func (enc *Encoder) EncodeString(s string) error {
// encodeString encodes a string to
func (enc *Encoder) encodeString(v string) ([]byte, error) {
enc.writeByte('"')
- enc.writeString(v)
+ enc.writeStringEscape(v)
enc.writeByte('"')
return enc.buf, nil
}
@@ -29,7 +29,7 @@ func (enc *Encoder) AddString(v string) {
enc.writeByte(',')
}
enc.writeByte('"')
- enc.writeString(v)
+ enc.writeStringEscape(v)
enc.writeByte('"')
}
@@ -44,7 +44,7 @@ func (enc *Encoder) AddStringOmitEmpty(v string) {
enc.writeByte(',')
}
enc.writeByte('"')
- enc.writeString(v)
+ enc.writeStringEscape(v)
enc.writeByte('"')
}
@@ -55,9 +55,9 @@ func (enc *Encoder) AddStringKey(key, v string) {
enc.writeByte(',')
}
enc.writeByte('"')
- enc.writeString(key)
+ enc.writeStringEscape(key)
enc.writeBytes(objKeyStr)
- enc.writeString(v)
+ enc.writeStringEscape(v)
enc.writeByte('"')
}
@@ -72,8 +72,8 @@ func (enc *Encoder) AddStringKeyOmitEmpty(key, v string) {
enc.writeByte(',')
}
enc.writeByte('"')
- enc.writeString(key)
+ enc.writeStringEscape(key)
enc.writeBytes(objKeyStr)
- enc.writeString(v)
+ enc.writeStringEscape(v)
enc.writeByte('"')
}
diff --git a/encode_string_test.go b/encode_string_test.go
index <HASH>..<HASH> 100644
--- a/encode_string_test.go
+++ b/encode_string_test.go
@@ -30,6 +30,56 @@ func TestEncoderStringEncodeAPI(t *testing.T) {
builder.String(),
"Result of marshalling is different as the one expected")
})
+ t.Run("utf8-multibyte", func(t *testing.T) {
+ str := "テュールスト マーティン ヤコブ 😁"
+ builder := &strings.Builder{}
+ enc := NewEncoder(builder)
+ err := enc.EncodeString(str)
+ assert.Nil(t, err, "Error should be nil")
+ assert.Equal(
+ t,
+ `"テュールスト マーティン ヤコブ 😁"`,
+ builder.String(),
+ "Result of marshalling is different as the one expected")
+ })
+ t.Run("escaped-sequence1", func(t *testing.T) {
+ str := `テュールスト マ\ーテ
+ィン ヤコブ 😁`
+ builder := &strings.Builder{}
+ enc := NewEncoder(builder)
+ err := enc.EncodeString(str)
+ assert.Nil(t, err, "Error should be nil")
+ assert.Equal(
+ t,
+ `"テュールスト マ\\ーテ\nィン ヤコブ 😁"`,
+ builder.String(),
+ "Result of marshalling is different as the one expected")
+ })
+ t.Run("escaped-sequence2", func(t *testing.T) {
+ str := `テュールスト マ\ーテ
+ィン ヤコブ 😁 `
+ builder := &strings.Builder{}
+ enc := NewEncoder(builder)
+ err := enc.EncodeString(str)
+ assert.Nil(t, err, "Error should be nil")
+ assert.Equal(
+ t,
+ `"テュールスト マ\\ーテ\nィン ヤコブ 😁\t"`,
+ builder.String(),
+ "Result of marshalling is different as the one expected")
+ })
+ t.Run("escaped-sequence3", func(t *testing.T) {
+ str := "hello \r world"
+ builder := &strings.Builder{}
+ enc := NewEncoder(builder)
+ err := enc.EncodeString(str)
+ assert.Nil(t, err, "Error should be nil")
+ assert.Equal(
+ t,
+ `"hello \r world"`,
+ builder.String(),
+ "Result of marshalling is different as the one expected")
+ })
}
func TestEncoderStringEncodeAPIErrors(t *testing.T) { | add escaping sequence for encoding | francoispqt_gojay | train |
7a5a945d70df55b309efee442bf71ad92aef8c37 | diff --git a/src/frontend/org/voltdb/SnapshotSiteProcessor.java b/src/frontend/org/voltdb/SnapshotSiteProcessor.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/SnapshotSiteProcessor.java
+++ b/src/frontend/org/voltdb/SnapshotSiteProcessor.java
@@ -38,7 +38,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
-import com.google_voltpatches.common.collect.Lists;
import org.apache.zookeeper_voltpatches.KeeperException;
import org.apache.zookeeper_voltpatches.KeeperException.NoNodeException;
import org.apache.zookeeper_voltpatches.ZooKeeper;
@@ -59,6 +58,7 @@ import org.voltdb.utils.CompressionService;
import org.voltdb.utils.MiscUtils;
import com.google_voltpatches.common.collect.ListMultimap;
+import com.google_voltpatches.common.collect.Lists;
import com.google_voltpatches.common.collect.Maps;
import com.google_voltpatches.common.util.concurrent.ListenableFuture;
import com.google_voltpatches.common.util.concurrent.MoreExecutors;
@@ -415,6 +415,12 @@ public class SnapshotSiteProcessor {
*/
public void startSnapshotWithTargets(Collection<SnapshotDataTarget> targets, long now)
{
+ //Basically asserts that there are no tasks with null targets at this point
+ //getTarget checks and crashes
+ for (SnapshotTableTask t : m_snapshotTableTasks.values()) {
+ t.getTarget();
+ }
+
ArrayList<SnapshotDataTarget> targetsToClose = Lists.newArrayList();
for (final SnapshotDataTarget target : targets) {
if (target.needsFinalClose()) {
diff --git a/src/frontend/org/voltdb/SnapshotTableTask.java b/src/frontend/org/voltdb/SnapshotTableTask.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/SnapshotTableTask.java
+++ b/src/frontend/org/voltdb/SnapshotTableTask.java
@@ -47,10 +47,16 @@ public class SnapshotTableTask
public void setTarget(SnapshotDataTarget target)
{
+ if (target == null) {
+ VoltDB.crashLocalVoltDB("Attempted to set null target on snapshot table task");
+ }
m_target = target;
}
public SnapshotDataTarget getTarget()
{
+ if (m_target == null) {
+ VoltDB.crashLocalVoltDB("Attempted to operate on snapshot table task with a null target");
+ }
return m_target;
} | For ENG-<I>, add some fail fast checks to snapshot initiation to aid debugging | VoltDB_voltdb | train |
c60fb068e1138423690dd9fe500e31761d40efec | diff --git a/moto/cognitoidp/responses.py b/moto/cognitoidp/responses.py
index <HASH>..<HASH> 100644
--- a/moto/cognitoidp/responses.py
+++ b/moto/cognitoidp/responses.py
@@ -2,6 +2,7 @@ from __future__ import unicode_literals
import json
import os
+import re
from moto.core.responses import BaseResponse
from .models import cognitoidp_backends, find_region_by_value, UserStatus
@@ -332,18 +333,25 @@ class CognitoIdpResponse(BaseResponse):
"status": lambda u: "Enabled" if u.enabled else "Disabled",
"username": lambda u: u.username,
}
- name, value = filt.replace('"', "").replace(" ", "").split("=")
+ comparisons = {"=": lambda x, y: x == y, "^=": lambda x, y: x.startswith(y)}
+
+ match = re.match(r"([\w:]+)\s*(=|\^=)\s*\"(.*)\"", filt)
+ if match:
+ name, op, value = match.groups()
+ else:
+ raise InvalidParameterException("Error while parsing filter")
+ compare = comparisons[op]
users = [
user
for user in users
if [
attr
for attr in user.attributes
- if attr["Name"] == name and attr["Value"] == value
+ if attr["Name"] == name and compare(attr["Value"], value)
]
or (
name in inherent_attributes
- and inherent_attributes[name](user) == value
+ and compare(inherent_attributes[name](user), value)
)
]
response = {"Users": [user.to_json(extended=True) for user in users]}
diff --git a/tests/test_cognitoidp/test_cognitoidp.py b/tests/test_cognitoidp/test_cognitoidp.py
index <HASH>..<HASH> 100644
--- a/tests/test_cognitoidp/test_cognitoidp.py
+++ b/tests/test_cognitoidp/test_cognitoidp.py
@@ -1209,18 +1209,55 @@ def test_list_users():
UserAttributes=[{"Name": "phone_number", "Value": "+33666666666"}],
)
result = conn.list_users(
- UserPoolId=user_pool_id, Filter='phone_number="+33666666666'
+ UserPoolId=user_pool_id, Filter='phone_number="+33666666666"'
)
result["Users"].should.have.length_of(1)
result["Users"][0]["Username"].should.equal(username_bis)
# checking Filter with space
result = conn.list_users(
- UserPoolId=user_pool_id, Filter='phone_number = "+33666666666'
+ UserPoolId=user_pool_id, Filter='phone_number = "+33666666666"'
)
result["Users"].should.have.length_of(1)
result["Users"][0]["Username"].should.equal(username_bis)
+ user0_username = "[email protected]"
+ conn.admin_create_user(
+ UserPoolId=user_pool_id,
+ Username=user0_username,
+ UserAttributes=[{"Name": "phone_number", "Value": "+48555555555"}],
+ )
+
+ # checking Filter with prefix operator
+ result = conn.list_users(UserPoolId=user_pool_id, Filter='phone_number ^= "+48"')
+ result["Users"].should.have.length_of(1)
+ result["Users"][0]["Username"].should.equal(user0_username)
+
+ # empty value Filter should also be supported
+ result = conn.list_users(UserPoolId=user_pool_id, Filter='family_name=""')
+ result["Users"].should.have.length_of(0)
+
+
+@mock_cognitoidp
+def test_list_users_incorrect_filter():
+ conn = boto3.client("cognito-idp", "us-west-2")
+
+ user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
+
+ with pytest.raises(conn.exceptions.InvalidParameterException) as exc:
+ conn.list_users(UserPoolId=user_pool_id, Filter="username = foo")
+ _assert_filter_parsing_error(exc)
+
+ with pytest.raises(conn.exceptions.InvalidParameterException) as exc:
+ conn.list_users(UserPoolId=user_pool_id, Filter="username=")
+ _assert_filter_parsing_error(exc)
+
+
+def _assert_filter_parsing_error(exc):
+ err = exc.value.response["Error"]
+ assert err["Code"].should.equal("InvalidParameterException")
+ assert err["Message"].should.equal("Error while parsing filter")
+
@mock_cognitoidp
def test_list_users_inherent_attributes(): | cognito-idp – Added format validation and implemented prefix operator for Filter param of list_users (#<I>) | spulec_moto | train |
ac051fc2bb157ca38ff7614124421b2bee5be553 | diff --git a/actions/class.Runner.php b/actions/class.Runner.php
index <HASH>..<HASH> 100644
--- a/actions/class.Runner.php
+++ b/actions/class.Runner.php
@@ -971,6 +971,10 @@ class taoQtiTest_actions_Runner extends tao_actions_ServiceModule
/**
* Manage the bidirectional communication
+ * @throws common_Exception
+ * @throws common_exception_Error
+ * @throws common_exception_Unauthorized
+ * @throws common_ext_ExtensionException
*/
public function messages()
{
@@ -978,10 +982,10 @@ class taoQtiTest_actions_Runner extends tao_actions_ServiceModule
$this->checkSecurityToken(); // will return 500 on error
- try {
- // close the PHP session to prevent session overwriting and loss of security token for secured queries
- session_write_close();
+ // close the PHP session to prevent session overwriting and loss of security token for secured queries
+ session_write_close();
+ try {
$input = taoQtiCommon_helpers_Utils::readJsonPayload();
if (!$input) {
$input = [];
diff --git a/models/classes/runner/synchronisation/synchronisationService/ResponseGenerator.php b/models/classes/runner/synchronisation/synchronisationService/ResponseGenerator.php
index <HASH>..<HASH> 100644
--- a/models/classes/runner/synchronisation/synchronisationService/ResponseGenerator.php
+++ b/models/classes/runner/synchronisation/synchronisationService/ResponseGenerator.php
@@ -27,16 +27,13 @@ namespace oat\taoQtiTest\models\runner\synchronisation\synchronisationService;
use common_Exception;
use common_exception_InconsistentData;
use common_Logger;
-use oat\oatbox\service\ServiceManagerAwareTrait;
+use oat\oatbox\service\ConfigurableService;
use oat\taoQtiTest\models\runner\QtiRunnerServiceContext;
use oat\taoQtiTest\models\runner\synchronisation\TestRunnerAction;
use ResolverException;
-use Zend\ServiceManager\ServiceLocatorAwareInterface;
-class ResponseGenerator implements ServiceLocatorAwareInterface
+class ResponseGenerator extends ConfigurableService
{
- use ServiceManagerAwareTrait;
-
/**
* Typical amount of time added on TimePoints to avoid timestamp collisions.
* This value will be used to adjust intervals between moves in the synced time line.
diff --git a/models/classes/runner/synchronisation/synchronisationService/TestRunnerActionResolver.php b/models/classes/runner/synchronisation/synchronisationService/TestRunnerActionResolver.php
index <HASH>..<HASH> 100644
--- a/models/classes/runner/synchronisation/synchronisationService/TestRunnerActionResolver.php
+++ b/models/classes/runner/synchronisation/synchronisationService/TestRunnerActionResolver.php
@@ -25,15 +25,12 @@ declare(strict_types=1);
namespace oat\taoQtiTest\models\runner\synchronisation\synchronisationService;
use common_exception_InconsistentData;
-use oat\oatbox\service\ServiceManagerAwareTrait;
+use oat\oatbox\service\ConfigurableService;
use oat\taoQtiTest\models\runner\synchronisation\TestRunnerAction;
use ResolverException;
-use Zend\ServiceManager\ServiceLocatorAwareInterface;
-class TestRunnerActionResolver implements ServiceLocatorAwareInterface
+class TestRunnerActionResolver extends ConfigurableService
{
- use ServiceManagerAwareTrait;
-
/**
* @param array $data
* @param array $availableActions | extended new services from ConfigurableService, because of service autoloading | oat-sa_extension-tao-testqti | train |
b5573b70809bed76a0397bb5bb196b6b2144d3d4 | diff --git a/src/main/java/org/junit/experimental/ParallelComputer.java b/src/main/java/org/junit/experimental/ParallelComputer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/junit/experimental/ParallelComputer.java
+++ b/src/main/java/org/junit/experimental/ParallelComputer.java
@@ -9,9 +9,7 @@ import java.util.concurrent.Future;
import org.junit.runner.Computer;
import org.junit.runner.Runner;
-import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.ParentRunner;
-import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.RunnerBuilder;
import org.junit.runners.model.RunnerInterceptor;
@@ -34,47 +32,47 @@ public class ParallelComputer extends Computer {
return new ParallelComputer(false, true);
}
- private static <T> Runner parallelize(ParentRunner<T> runner) {
- runner.setRunnerInterceptor(new RunnerInterceptor() {
- private final List<Future<Object>> fResults= new ArrayList<Future<Object>>();
+ private static <T> Runner parallelize(Runner runner) {
+ if (runner instanceof ParentRunner<?>) {
+ ((ParentRunner<?>) runner).setRunnerInterceptor(new RunnerInterceptor() {
+ private final List<Future<Object>> fResults= new ArrayList<Future<Object>>();
- private final ExecutorService fService= Executors
- .newCachedThreadPool();
+ private final ExecutorService fService= Executors
+ .newCachedThreadPool();
- public void runChild(final Runnable childStatement) {
- fResults.add(fService.submit(new Callable<Object>() {
- public Object call() throws Exception {
- childStatement.run();
- return null;
- }
- }));
- }
+ public void runChild(final Runnable childStatement) {
+ fResults.add(fService.submit(new Callable<Object>() {
+ public Object call() throws Exception {
+ childStatement.run();
+ return null;
+ }
+ }));
+ }
- public void finished() {
- for (Future<Object> each : fResults)
- try {
- each.get();
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- });
+ public void finished() {
+ for (Future<Object> each : fResults)
+ try {
+ each.get();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ });
+ }
return runner;
}
@Override
public Runner getSuite(RunnerBuilder builder, java.lang.Class<?>[] classes)
throws InitializationError {
- Suite suite= (Suite) super.getSuite(builder, classes);
+ Runner suite= super.getSuite(builder, classes);
return fClasses ? parallelize(suite) : suite;
}
@Override
protected Runner getRunner(RunnerBuilder builder, Class<?> testClass)
throws Throwable {
- // TODO (May 4, 2009 4:09:16 PM): no guarantees here
- BlockJUnit4ClassRunner runner= (BlockJUnit4ClassRunner) super
- .getRunner(builder, testClass);
+ Runner runner= super.getRunner(builder, testClass);
return fMethods ? parallelize(runner) : runner;
}
} | ParallelComputer doesn't have to assume BlockJUnit4Runner anymore | junit-team_junit4 | train |
12f9c51ba4d7fa9ad6e916bed6e39ab67f6d2687 | diff --git a/c7n/resources/sqs.py b/c7n/resources/sqs.py
index <HASH>..<HASH> 100644
--- a/c7n/resources/sqs.py
+++ b/c7n/resources/sqs.py
@@ -15,7 +15,7 @@ from __future__ import absolute_import, division, print_function, unicode_litera
from botocore.exceptions import ClientError
-from c7n.filters import CrossAccountAccessFilter
+from c7n.filters import CrossAccountAccessFilter, MetricsFilter
from c7n.manager import resources
from c7n.utils import local_session
from c7n.query import QueryResourceManager
@@ -71,6 +71,15 @@ class SQS(QueryResourceManager):
return list(filter(None, w.map(_augment, resources)))
[email protected]_registry.register('metrics')
+class MetricsFilter(MetricsFilter):
+
+ def get_dimensions(self, resource):
+ return [
+ {'Name': 'QueueName',
+ 'Value': resource['QueueUrl'].rsplit('/', 1)[-1]}]
+
+
@SQS.filter_registry.register('cross-account')
class SQSCrossAccount(CrossAccountAccessFilter):
permissions = ('sqs:GetQueueAttributes',) | sqs metrics - dimension fix for queue name (#<I>) | cloud-custodian_cloud-custodian | train |
7196943ab96e759e31ee6cdf2ec526cd7a844c0a | diff --git a/src/main/org/openscience/cdk/fingerprint/Fingerprinter.java b/src/main/org/openscience/cdk/fingerprint/Fingerprinter.java
index <HASH>..<HASH> 100644
--- a/src/main/org/openscience/cdk/fingerprint/Fingerprinter.java
+++ b/src/main/org/openscience/cdk/fingerprint/Fingerprinter.java
@@ -154,12 +154,11 @@ public class Fingerprinter implements IFingerprinter {
long after = System.currentTimeMillis();
logger.debug("time for aromaticity calculation: " + (after - before) + " milliseconds");
logger.debug("Finished Aromaticity Detection");
- Map<String,String> paths = findPathes(container, searchDepth);
BitSet bitSet = new BitSet(size);
- for (String path : paths.values()) {
- position = new java.util.Random(path.hashCode()).nextInt(size);
- logger.debug("Setting bit " + position + " for " + path);
+ int[] hashes = findPathes(container, searchDepth);
+ for (int hash : hashes) {
+ position = new java.util.Random(hash).nextInt(size);
bitSet.set(position);
}
@@ -187,8 +186,7 @@ public class Fingerprinter implements IFingerprinter {
* @param searchDepth The maximum path length desired
* @return A Map of path strings, keyed on themselves
*/
- protected Map<String,String> findPathes(IAtomContainer container, int searchDepth) {
- Map<String,String> paths = new HashMap<String,String>();
+ protected int[] findPathes(IAtomContainer container, int searchDepth) {
List<StringBuffer> allPaths = new ArrayList<StringBuffer>();
@@ -237,8 +235,13 @@ public class Fingerprinter implements IFingerprinter {
if (cleanPath.contains(s2)) continue;
cleanPath.add(s2);
}
- for (String s : cleanPath) paths.put(s, s);
- return paths;
+
+ // convert paths to hashes
+ int[] hashes = new int[cleanPath.size()];
+ int i= 0;
+ for (String s: cleanPath) hashes[i++] = s.hashCode();
+
+ return hashes;
}
private String convertSymbol(String symbol) { | Updated findPathes so that it directly returns the hash values of the path strings, rather than creating a new HashMap to hold the paths and then evaluate the hashes in the caller. This saves a bit of memory in my tests and does not seem to degrade performance
git-svn-id: <URL> | cdk_cdk | train |
5857d63460c5ce965372fb0b7955e30d55975f64 | diff --git a/src/proxy.js b/src/proxy.js
index <HASH>..<HASH> 100644
--- a/src/proxy.js
+++ b/src/proxy.js
@@ -361,25 +361,27 @@ function peek(draft, prop) {
}
function markChanged(state) {
- let resetDrafts = true
- let assignFn = assign
- if (isMap(state.base)) {
- assignFn = assignMap
- } else if (isSet(state.base)) {
- assignFn = assignSet
- // We need to keep track of how non-proxied objects are related to proxied ones.
- // For other data structures that support keys we can use those keys to access the item, notwithstanding it being a proxy or not.
- // Sets, however, do not have keys.
- // We use original objects as keys and keep proxified values as values.
- resetDrafts = false
- }
if (!state.modified) {
state.modified = true
- state.copy = assignFn(shallowCopy(state.base), state.drafts)
- if (resetDrafts) {
+
+ const {base, drafts, parent} = state
+ const copy = shallowCopy(base)
+
+ if (isSet(base)) {
+ // Note: The `drafts` property is preserved for Set objects, since
+ // we need to keep track of which values are drafted.
+ assignSet(copy, drafts)
+ } else {
+ // Merge nested drafts into the copy.
+ if (isMap(base)) assignMap(copy, drafts)
+ else assign(copy, drafts)
state.drafts = null
}
- if (state.parent) markChanged(state.parent)
+
+ state.copy = copy
+ if (parent) {
+ markChanged(parent)
+ }
}
} | refactor: markChanged function | immerjs_immer | train |
8a97b67885b4251eadefb5ef40358b1afcbe294b | diff --git a/src-modules/org/opencms/workplace/tools/sites/CmsSitesList.java b/src-modules/org/opencms/workplace/tools/sites/CmsSitesList.java
index <HASH>..<HASH> 100644
--- a/src-modules/org/opencms/workplace/tools/sites/CmsSitesList.java
+++ b/src-modules/org/opencms/workplace/tools/sites/CmsSitesList.java
@@ -73,6 +73,9 @@ public class CmsSitesList extends A_CmsListDialog {
/** The path of the fav icon. */
protected static final String LIST_ICON_FAVICON = "tools/sites/icons/small/default-favicon.png";
+ /** Holds - keys: site roots and values: favicon links. */
+ protected static Map<String, String> m_icons = new HashMap<String, String>();
+
/** A parameter name for the title of the site. */
protected static final String PARAM_SITE_TITLE = "sitetitle";
@@ -136,9 +139,6 @@ public class CmsSitesList extends A_CmsListDialog {
/** Path to the module reports. */
private static final String PATH_REPORTS = "/system/workplace/admin/sites/reports/";
- /** Holds - keys: site roots and values: favicon links. */
- protected Map<String, String> m_icons = new HashMap<String, String>();
-
/**
* Public constructor.<p>
*
@@ -262,6 +262,9 @@ public class CmsSitesList extends A_CmsListDialog {
// noop
}
+ // clear the icons
+ m_icons.clear();
+
for (CmsSite site : sites) {
if (site.getSiteMatcher() != null) {
CmsListItem item = getList().newItem(site.getSiteRoot()); | Improved favicons refresh for site management wp tool. | alkacon_opencms-core | train |
8d525cabedbeb7665fbcea36540b043c1f3aeae0 | diff --git a/pixiedust/display/chart/mpld3ChartDisplay.py b/pixiedust/display/chart/mpld3ChartDisplay.py
index <HASH>..<HASH> 100644
--- a/pixiedust/display/chart/mpld3ChartDisplay.py
+++ b/pixiedust/display/chart/mpld3ChartDisplay.py
@@ -182,7 +182,8 @@ class Mpld3ChartDisplay(ChartDisplay):
if self.supportsLegend(handlerId):
showLegend = self.options.get("showLegend", "true")
if showLegend == "true":
- l = ax.legend(title='')
+ l = ax.legend(title='',)
+ l.get_frame().set_alpha(0)
numColumns = len(keyFieldValues)
for i, text in enumerate(l.get_texts()):
text.set_color(colormap(1.*i/numColumns)) | Hack to fix legend bug in mpld3 | pixiedust_pixiedust | train |
44005c1344b4732409896791583d0f937d4f59ec | diff --git a/src/sap.m/src/sap/m/SegmentedButton.js b/src/sap.m/src/sap/m/SegmentedButton.js
index <HASH>..<HASH> 100644
--- a/src/sap.m/src/sap/m/SegmentedButton.js
+++ b/src/sap.m/src/sap/m/SegmentedButton.js
@@ -930,6 +930,7 @@ function(
this.addStyleClass("sapMSegBSelectWrapper");
this._lazyLoadSelectForm();
this._syncSelect();
+ this._syncAriaAssociations();
};
/**
@@ -941,6 +942,24 @@ function(
this.removeStyleClass("sapMSegBSelectWrapper");
};
+ SegmentedButton.prototype._syncAriaAssociations = function () {
+ var oSelect = this.getAggregation("_select");
+ this.getAriaLabelledBy().forEach(function (oLabel) {
+ if (oSelect.getAriaLabelledBy().indexOf(oLabel) === -1) {
+ oSelect.addAriaLabelledBy(oLabel);
+ }
+ });
+
+ // sap.m.Select doesn't have an ariaDescribedBy association, so we copy
+ // the ariaDescribedBy association elements from the sap.m.SegmentedButton instance
+ // into the ariaLabelledBy association in the sap.m.Select instance
+ this.getAriaDescribedBy().forEach(function (oDesc) {
+ if (oSelect.getAriaLabelledBy().indexOf(oDesc) === -1) {
+ oSelect.addAriaLabelledBy(oDesc);
+ }
+ });
+ };
+
/**
* Image does not have an onload event but we need to recalculate the button sizes - after the image is loaded
* we override the onload method once and call the calculation method after the original method is called.
diff --git a/src/sap.m/test/sap/m/qunit/SegmentedButton.qunit.js b/src/sap.m/test/sap/m/qunit/SegmentedButton.qunit.js
index <HASH>..<HASH> 100755
--- a/src/sap.m/test/sap/m/qunit/SegmentedButton.qunit.js
+++ b/src/sap.m/test/sap/m/qunit/SegmentedButton.qunit.js
@@ -162,6 +162,30 @@ sap.ui.define([
oSegmentedButton.destroy();
});
+ QUnit.test("Accessibility state is written when SegmentedButton is rendered as a sap.m.Select", function(assert) {
+ // prepare
+ var oSegmentedButton = new SegmentedButton({
+ ariaLabelledBy: [
+ new Label("labelledBy_test", {text: "labelledBy_test"})
+ ],
+ ariaDescribedBy: [
+ new Label("describedBy_test", {text: "describedBy_test"})
+ ]
+ }),
+ oSelect;
+
+ // act
+ oSegmentedButton._toSelectMode();
+ oSelect = oSegmentedButton.getAggregation("_select");
+
+ // assert
+ assert.equal(oSelect.getAriaLabelledBy()[0], "labelledBy_test", "select control has corret ariaLabelledBy values from sap.m.SegmentedButton");
+ assert.equal(oSelect.getAriaLabelledBy()[1], "describedBy_test", "select control has corret ariaDescribedBy values from sap.m.SegmentedButton");
+
+ // clean
+ oSegmentedButton.destroy();
+ });
+
QUnit.test("SegmentedButton selection before and after rendering", function(assert) {
// Arrange
var oButton1 = new Button(), | [FIX] sap.m.SegmentedButton: accessibility state is now maintained corretly
Accessibility attributes are added to the DOM representation of the SegmentedButton,
when a sap.m.Select control is rendererd inside.
Change-Id: I8a2f<I>f<I>a<I>beb<I>c4d6b0f<I>fc<I>dc3e1c8
BCP: <I> | SAP_openui5 | train |
c1d54cd30b5eea7d48d8188e9bde16974a4c2cab | diff --git a/oscrypto/_tls.py b/oscrypto/_tls.py
index <HASH>..<HASH> 100644
--- a/oscrypto/_tls.py
+++ b/oscrypto/_tls.py
@@ -123,6 +123,28 @@ def get_dh_params_length(server_handshake_bytes):
return output
+def parse_alert(server_handshake_bytes):
+ """
+ Parses the handshake for protocol alerts
+
+ :param server_handshake_bytes:
+ A byte string of the handshake data received from the server
+
+ :return:
+ None or an 2-element tuple of integers:
+ 0: 1 (warning) or 2 (fatal)
+ 1: The alert description (see https://tools.ietf.org/html/rfc5246#section-7.2)
+ """
+
+ for record_type, _, record_data in _parse_tls_records(server_handshake_bytes):
+ if record_type != b'\x15':
+ continue
+ if len(record_data) != 2:
+ return None
+ return (int_from_bytes(record_data[0:1]), int_from_bytes(record_data[1:2]))
+ return None
+
+
def parse_session_info(server_handshake_bytes, client_handshake_bytes):
"""
Parse the TLS handshake from the client to the server to extract information
diff --git a/oscrypto/_win/tls.py b/oscrypto/_win/tls.py
index <HASH>..<HASH> 100644
--- a/oscrypto/_win/tls.py
+++ b/oscrypto/_win/tls.py
@@ -30,12 +30,13 @@ from ._secur32 import secur32, Secur32Const, handle_error
from ._crypt32 import crypt32, Crypt32Const, handle_error as handle_crypt32_error
from ._kernel32 import kernel32
from .._types import type_name, str_cls, byte_cls, int_types
-from ..errors import TLSError, TLSVerificationError
+from ..errors import TLSError, TLSVerificationError, TLSDisconnectError, TLSGracefulDisconnectError
from .._tls import (
detect_client_auth_request,
detect_other_protocol,
extract_chain,
get_dh_params_length,
+ parse_alert,
parse_session_info,
raise_client_auth,
raise_dh_params,
@@ -45,6 +46,7 @@ from .._tls import (
raise_hostname,
raise_no_issuer,
raise_protocol_error,
+ raise_protocol_version,
raise_revoked,
raise_self_signed,
raise_verification,
@@ -806,6 +808,9 @@ class TLSSocket(object):
if result == Secur32Const.SEC_E_ILLEGAL_MESSAGE:
if detect_client_auth_request(handshake_server_bytes):
raise_client_auth()
+ alert_info = parse_alert(handshake_server_bytes)
+ if alert_info and alert_info == (2, 70):
+ raise_protocol_version()
raise_handshake()
if result == Secur32Const.SEC_E_WRONG_PRINCIPAL:
@@ -1452,10 +1457,9 @@ class TLSSocket(object):
"""
if self._remote_closed:
- message = 'The remote end closed the connection'
+ raise TLSGracefulDisconnectError('The remote end closed the connection')
else:
- message = 'The connection was already closed'
- raise TLSError(message)
+ raise TLSDisconnectError('The connection was already closed')
@property
def certificate(self):
diff --git a/tests/test_tls.py b/tests/test_tls.py
index <HASH>..<HASH> 100644
--- a/tests/test_tls.py
+++ b/tests/test_tls.py
@@ -67,10 +67,11 @@ def connection_timeout(timeout=30):
t = threading.Timer(timeout, lambda: thread.interrupt_main())
t.start()
f(*args)
- if not osx_pypy_bug:
- t.cancel()
except (KeyboardInterrupt):
raise_with(AssertionError("Timed out"), sys.exc_info()[2])
+ finally:
+ if not osx_pypy_bug:
+ t.cancel()
return wrapped
return timeout_decorator
@@ -352,3 +353,9 @@ class TLSTests(unittest.TestCase):
# there aren't buffer overlfow issues in TLSSocket()
c = HttpsClient()
c.download('https://packagecontrol.io/channel_v3.json', 15)
+
+ @connection_timeout()
+ def test_tls_protocol_version(self):
+ session = tls.TLSSession(set(['TLSv1', 'TLSv1.1']))
+ with assert_exception(self, errors.TLSError, 'TLS handshake failed - protocol version error'):
+ s = tls.TLSSocket('github.com', 443, session=session) | Handle graceful disconnects of tls.TLSSocket() on Windows | wbond_oscrypto | train |
e3d98862afff536b7056a53c35d386c0a036ce7e | diff --git a/lib/migrant/schema.rb b/lib/migrant/schema.rb
index <HASH>..<HASH> 100644
--- a/lib/migrant/schema.rb
+++ b/lib/migrant/schema.rb
@@ -106,6 +106,10 @@ module Migrant
def requires_migration?
false # All added to base table
end
+
+ def add_association(association)
+ parent_schema.add_association(association)
+ end
end
# Why does this class exist? Excellent question.
diff --git a/test/rails_app/app/models/customer.rb b/test/rails_app/app/models/customer.rb
index <HASH>..<HASH> 100644
--- a/test/rails_app/app/models/customer.rb
+++ b/test/rails_app/app/models/customer.rb
@@ -1,4 +1,6 @@
class Customer < User
+ belongs_to :category
+
structure do
money_spent "$5.00"
money_gifted "NOK 550.00"
diff --git a/test/test_data_schema.rb b/test/test_data_schema.rb
index <HASH>..<HASH> 100644
--- a/test/test_data_schema.rb
+++ b/test/test_data_schema.rb
@@ -13,7 +13,8 @@ class TestDataSchema < Test::Unit::TestCase
should "generate a foreign key field for a belongs_to association" do
assert_schema(Business, :user_id, :type => :integer)
assert_schema(BusinessCategory, :business_id, :type => :integer)
- assert_schema(BusinessCategory, :category_id, :type => :integer)
+ assert_schema(BusinessCategory, :category_id, :type => :integer)
+ assert_schema(User, :category_id, :type => :integer)
end
should "generate foreign key fields for a *polymorphic* belongs_to association" do
diff --git a/test/test_migration_generator.rb b/test/test_migration_generator.rb
index <HASH>..<HASH> 100644
--- a/test/test_migration_generator.rb
+++ b/test/test_migration_generator.rb
@@ -236,8 +236,6 @@ class TestMigrationGenerator < Test::Unit::TestCase
verified true, default: true
end
- #rake_migrate
- #generate_migrations
run_against_template('modified_verified')
end
end
diff --git a/test/verified_output/migrations/create_users.rb b/test/verified_output/migrations/create_users.rb
index <HASH>..<HASH> 100644
--- a/test/verified_output/migrations/create_users.rb
+++ b/test/verified_output/migrations/create_users.rb
@@ -3,6 +3,7 @@ class CreateUsers < ActiveRecord::Migration
create_table :users do |t|
t.string :name
t.string :email
+ t.integer :category_id
t.string :encrypted_password, :limit=>48
t.string :password_salt, :limit=>42
t.decimal :money_spent, :precision=>10, :scale=>2 | Fixed associations failing to be detected on inherited AR models | pascalh1011_migrant | train |
de0ebd7eb893a1745ff6045db0abd8e46a838e6e | diff --git a/nyawc/Crawler.py b/nyawc/Crawler.py
index <HASH>..<HASH> 100644
--- a/nyawc/Crawler.py
+++ b/nyawc/Crawler.py
@@ -202,6 +202,7 @@ class Crawler(object):
if action == CrawlerActions.DO_SKIP_TO_NEXT:
self.queue.move(queue_item, QueueItem.STATUS_FINISHED)
+ self.__spawn_new_requests()
if action == CrawlerActions.DO_CONTINUE_CRAWLING or action is None:
self.queue.move(queue_item, QueueItem.STATUS_IN_PROGRESS) | Continue spawning new requests (if there are any) when skipping a
request. #4 | tijme_not-your-average-web-crawler | train |
645bd3ad5312bcb74a78796c391ae05dbb86394d | diff --git a/test/test_git.rb b/test/test_git.rb
index <HASH>..<HASH> 100644
--- a/test/test_git.rb
+++ b/test/test_git.rb
@@ -55,7 +55,7 @@ class TestGit < Test::Unit::TestCase
end
def test_raises_on_slow_shell
- Grit::Git.git_timeout = 0.001
+ Grit::Git.git_timeout = 0.0000001
assert_raises Grit::Git::GitTimeout do
@git.version
end | set an even lower timeout in the test_raises_on_slow_shell test, to make it
pass on machines faster than an Atari | mojombo_grit | train |
57bd1d6ff5a45dff2972e2fd4474007976393554 | diff --git a/python/ray/worker.py b/python/ray/worker.py
index <HASH>..<HASH> 100644
--- a/python/ray/worker.py
+++ b/python/ray/worker.py
@@ -1053,6 +1053,28 @@ def _initialize_serialization(worker=global_worker):
custom_serializer=array_custom_serializer,
custom_deserializer=array_custom_deserializer)
+ def ordered_dict_custom_serializer(obj):
+ return list(obj.keys()), list(obj.values())
+
+ def ordered_dict_custom_deserializer(obj):
+ return collections.OrderedDict(zip(obj[0], obj[1]))
+
+ worker.serialization_context.register_type(
+ collections.OrderedDict, 20 * b"\x02", pickle=False,
+ custom_serializer=ordered_dict_custom_serializer,
+ custom_deserializer=ordered_dict_custom_deserializer)
+
+ def default_dict_custom_serializer(obj):
+ return list(obj.keys()), list(obj.values()), obj.default_factory
+
+ def default_dict_custom_deserializer(obj):
+ return collections.defaultdict(obj[2], zip(obj[0], obj[1]))
+
+ worker.serialization_context.register_type(
+ collections.defaultdict, 20 * b"\x03", pickle=False,
+ custom_serializer=default_dict_custom_serializer,
+ custom_deserializer=default_dict_custom_deserializer)
+
if worker.mode in [SCRIPT_MODE, SILENT_MODE]:
# These should only be called on the driver because _register_class
# will export the class to all of the workers.
diff --git a/src/thirdparty/download_thirdparty.sh b/src/thirdparty/download_thirdparty.sh
index <HASH>..<HASH> 100755
--- a/src/thirdparty/download_thirdparty.sh
+++ b/src/thirdparty/download_thirdparty.sh
@@ -13,4 +13,4 @@ fi
cd $TP_DIR/arrow
git fetch origin master
-git checkout 84e5e02fbf412c979387b0a53b0ad0c6d5c5e790
+git checkout 49e02d27227332b06528816bbf73e434a4e1ebcb
diff --git a/test/runtest.py b/test/runtest.py
index <HASH>..<HASH> 100644
--- a/test/runtest.py
+++ b/test/runtest.py
@@ -7,7 +7,7 @@ import string
import sys
import time
import unittest
-from collections import defaultdict, namedtuple
+from collections import defaultdict, namedtuple, OrderedDict
import numpy as np
@@ -354,10 +354,18 @@ class APITest(unittest.TestCase):
ray.get(ray.put(TempClass()))
- # Note that the below actually returns a dictionary and not a
- # defaultdict. This is a bug
- # (https://github.com/ray-project/ray/issues/512).
- ray.get(ray.put(defaultdict(lambda: 0)))
+ # Test subtypes of dictionaries.
+ value_before = OrderedDict([("hello", 1), ("world", 2)])
+ object_id = ray.put(value_before)
+ self.assertEqual(value_before, ray.get(object_id))
+
+ value_before = defaultdict(lambda: 0, [("hello", 1), ("world", 2)])
+ object_id = ray.put(value_before)
+ self.assertEqual(value_before, ray.get(object_id))
+
+ value_before = defaultdict(lambda: [], [("hello", 1), ("world", 2)])
+ object_id = ray.put(value_before)
+ self.assertEqual(value_before, ray.get(object_id))
# Test passing custom classes into remote functions from the driver.
@ray.remote | Specialize Serialization for OrderedDict (#<I>)
Specialize Serialization for OrderedDict and defaultdict | ray-project_ray | train |
a43647605f1169b0c9515ba2ef02e24a5fc22319 | diff --git a/force/force.go b/force/force.go
index <HASH>..<HASH> 100644
--- a/force/force.go
+++ b/force/force.go
@@ -14,7 +14,7 @@ const (
testClientSecret = "4165772184959202901"
testUserName = "[email protected]"
testPassword = "golangrocks2"
- testSecurityToken = "JcQ8eqU5MawUq4z0vSbGKbqXy"
+ testSecurityToken = "ZvjruzWBRGSlsXY7zTESjFaLM"
testEnvironment = "production"
) | Password Reset caused Security Token Reset | nimajalali_go-force | train |
49938ff48fe74dfe64dfc0a183a9caa3034ca85d | diff --git a/plenum/test/cli/test_basic_client_commands.py b/plenum/test/cli/test_basic_client_commands.py
index <HASH>..<HASH> 100644
--- a/plenum/test/cli/test_basic_client_commands.py
+++ b/plenum/test/cli/test_basic_client_commands.py
@@ -10,9 +10,9 @@ def testClientNames(cli, validNodeNames, createAllNodes):
"""
cName = "Joe"
- def checkClientNotAddedWithNodeName(name):
+ def checkClientNotAddedWithNodeName(curClientCount, name):
# Count of cli.clients should still be 1
- assert len(cli.clients) == 1
+ assert len(cli.clients) == curClientCount
# nm should not be in cli.client
assert name not in cli.clients
@@ -21,9 +21,11 @@ def testClientNames(cli, validNodeNames, createAllNodes):
assert msg == "Client name cannot start with node names, which are {}." \
"".format(', '.join(validNodeNames))
+ cliCountBefore = len(cli.clients)
cli.enterCmd("new client {}".format(cName))
# Count of cli.clients should be 1
- assert len(cli.clients) == 1
+ curClientCount = len(cli.clients)
+ assert curClientCount == cliCountBefore + 1
# Client name should be in cli.client
assert cName in cli.clients
@@ -34,15 +36,15 @@ def testClientNames(cli, validNodeNames, createAllNodes):
for i, nm in enumerate(validNodeNames):
# Adding client with name same as that of a node
cli.enterCmd("new client {}".format(nm))
- checkClientNotAddedWithNodeName(nm)
+ checkClientNotAddedWithNodeName(curClientCount, nm)
# Adding client with name prefixed with that of a node
cli.enterCmd("new client {}{}".format(nm, randomString(3)))
- checkClientNotAddedWithNodeName(nm)
+ checkClientNotAddedWithNodeName(curClientCount, nm)
cli.enterCmd("new client {}".format(cName))
# Count of cli.clients should be 1
- assert len(cli.clients) == 1
+ assert len(cli.clients) == curClientCount
# Client name should be in cli.client
assert cName in cli.clients
diff --git a/plenum/test/cli/test_command_reg_ex.py b/plenum/test/cli/test_command_reg_ex.py
index <HASH>..<HASH> 100644
--- a/plenum/test/cli/test_command_reg_ex.py
+++ b/plenum/test/cli/test_command_reg_ex.py
@@ -1,17 +1,21 @@
+import pytest
from prompt_toolkit.contrib.regular_languages.compiler import compile
-
from plenum.cli.cli_helper import getUtilGrams, getNodeGrams, getClientGrams, getAllGrams
-def test_command_reg_ex(cmd):
[email protected]("module")
+def grammar():
utilGrams = getUtilGrams()
nodeGrams = getNodeGrams()
clientGrams = getClientGrams()
grams = getAllGrams(utilGrams, nodeGrams, clientGrams)
- grammar = compile("".join(grams))
- res = grammar.match(cmd)
- assert res
+ return compile("".join(grams))
+
+
[email protected]("module")
+def checkIfMatched(grammar, cmd):
+ assert grammar.match(cmd)
-def test_new_keypair_command_reg_ex():
- test_command_reg_ex("new keypair")
\ No newline at end of file
+def test_new_keypair_command_reg_ex(grammar):
+ checkIfMatched(grammar, "new keypair")
\ No newline at end of file | fixed testClientNames and testCommandRegEx tests | hyperledger_indy-plenum | train |
3db4039ca4304749d373912a62c64c3713cc0900 | diff --git a/benchexec/tools/skink.py b/benchexec/tools/skink.py
index <HASH>..<HASH> 100644
--- a/benchexec/tools/skink.py
+++ b/benchexec/tools/skink.py
@@ -23,7 +23,8 @@ class Tool(benchexec.tools.template.BaseTool):
"include",
"logback-test.xml",
"skink.sh",
- "skink.jar"
+ "skink.jar",
+ "skink_exp.jar"
]
def executable(self): | added missing path that broke the last run
Had to update REQUIRED_PATHS to pick up our exp jar. The last run was broken because of it. | sosy-lab_benchexec | train |
4747b0faa6b99b24dd92d717914d18154ffe85ec | diff --git a/bootstrap.py b/bootstrap.py
index <HASH>..<HASH> 100755
--- a/bootstrap.py
+++ b/bootstrap.py
@@ -27,6 +27,8 @@ import time
from logging import Formatter, StreamHandler, getLogger
from pathlib import Path
+from install_dev_repos import REPOS, install_repo
+
# ---- Setup logger
fmt = Formatter('%(asctime)s [%(levelname)s] [%(name)s] -> %(message)s')
h = StreamHandler()
@@ -67,6 +69,8 @@ parser.add_argument('--filter-log', default='',
help="Comma-separated module name hierarchies whose log "
"messages should be shown. e.g., "
"spyder.plugins.completion,spyder.plugins.editor")
+parser.add_argument('--no-subrepos', action='store_true', default=False,
+ help="Do not install subrepos")
parser.add_argument('spyder_options', nargs='*')
args = parser.parse_args()
@@ -114,14 +118,25 @@ else:
logger.info("Skipping GUI toolkit detection")
os.environ['QT_API'] = args.gui
+# ---- Install sub repos
+
+if not args.no_subrepos:
+ for name in REPOS.keys():
+ if name == 'spyder':
+ continue
+ if not REPOS[name]['editable']:
+ install_repo(name)
+ else:
+ logger.info("%s already installed in editable mode", name)
+
# ---- Check versions
# Checking versions (among other things, this has the effect of setting the
# QT_API environment variable if this has not yet been done just above)
from spyder import get_versions
versions = get_versions(reporev=True)
-logger.info("Imported Spyder %s - Revision %s, Branch: %s\n"
- " [Python %s %dbits, Qt %s, %s %s on %s]",
+logger.info("Imported Spyder %s - Revision %s, Branch: %s; "
+ "[Python %s %dbits, Qt %s, %s %s on %s]",
versions['spyder'], versions['revision'], versions['branch'],
versions['python'], versions['bitness'], versions['qt'],
versions['qt_api'], versions['qt_api_ver'], versions['system'])
diff --git a/install_dev_repos.py b/install_dev_repos.py
index <HASH>..<HASH> 100755
--- a/install_dev_repos.py
+++ b/install_dev_repos.py
@@ -15,6 +15,7 @@ from logging import Formatter, StreamHandler, getLogger
from pathlib import Path
from subprocess import check_output
+from importlib_metadata import PackageNotFoundError, distribution
from packaging.requirements import Requirement
DEVPATH = Path(__file__).resolve().parent
@@ -26,8 +27,15 @@ for p in [DEVPATH] + list(DEPS_PATH.iterdir()):
if p.name.startswith('.') or not p.is_dir() and not (
(p / 'setup.py').exists() or (p / 'pyproject.toml').exists()):
continue
+ try:
+ dist = distribution(p.name)._path
+ except PackageNotFoundError:
+ dist = None
+ editable = None
+ else:
+ editable = (p == dist or p in dist.parents)
- REPOS[p.name] = p
+ REPOS[p.name] = {'repo': p, 'dist': dist, 'editable': editable}
# ---- Setup logger
fmt = Formatter('%(asctime)s [%(levelname)s] [%(name)s] -> %(message)s')
@@ -69,7 +77,7 @@ def install_repo(name, not_editable=False):
"""
try:
- repo_path = REPOS[name]
+ repo_path = REPOS[name]['repo']
except KeyError:
logger.warning('Distribution %r not valid. Must be one of %s',
name, set(REPOS.keys())) | Bootstrap will install subrepos if not already installed in editable mode.
Flag provided to prevent this installation. | spyder-ide_spyder | train |
1c748907985042d2b8659505cd508e7ef7a84e9c | diff --git a/bundles/org.eclipse.orion.client.editor/web/orion/editor/tooltip.js b/bundles/org.eclipse.orion.client.editor/web/orion/editor/tooltip.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.editor/web/orion/editor/tooltip.js
+++ b/bundles/org.eclipse.orion.client.editor/web/orion/editor/tooltip.js
@@ -676,20 +676,35 @@ function Tooltip (view, editor) {
var curLine = tv.getLineAtOffset(start);
var endLine = tv.getLineAtOffset(end);
- // Adjust start / end to be on the current line if necessary
+ var height, viewRect;
+
if (curLine !== endLine) {
- // 'getLineEnd' isn't API in textView but is in textModel...
- end = tv.getModel().getLineEnd(curLine);
+ var y = tv.getLocationAtOffset(start).y;
+ height = 0;
+ var maxX = 0;
+ while (curLine <= endLine){
+ height += tv.getLineHeight(curLine);
+ var lineEnd = tv.getModel().getLineEnd(curLine);
+ var possibleEnd = tv.getLocationAtOffset(lineEnd).x;
+ if (possibleEnd > end){
+ maxX = possibleEnd;
+ }
+ curLine++;
+ }
+ var lineStart = tv.getModel().getLineStart(endLine);
+ var x = tv.getLocationAtOffset(lineStart).x;
+
+ viewRect = { x: x, y: y, width: maxX - x, height: height};
+
+ } else {
+ var startPos = tv.getLocationAtOffset(start);
+ var endPos = tv.getLocationAtOffset(end);
+ height = tv.getLineHeight(curLine);
+ viewRect = { x: startPos.x, y: startPos.y,
+ width: endPos.x - startPos.x, height: height};
}
-
- var height = tv.getLineHeight(curLine);
- var startPos = tv.getLocationAtOffset(start);
- var endPos = tv.getLocationAtOffset(end);
-
- var viewRect = { x: startPos.x, y: startPos.y,
- width: endPos.x - startPos.x, height: height};
- viewRect = this._view.convert(viewRect, "document", "page"); //$NON-NLS-0$ //$NON-NLS-1$
+ viewRect = this._view.convert(viewRect, "document", "page"); //$NON-NLS-1$ //$NON-NLS-2$
return {left: viewRect.x, top: viewRect.y, width: viewRect.width, height: viewRect.height};
},
/* | Bug <I> - [Hover] Hover tooltip obscures code with multi line annotation | eclipse_orion.client | train |
a8d3851956389561865ffabcc5ee7ec09c8538d2 | diff --git a/instaloader/instaloader.py b/instaloader/instaloader.py
index <HASH>..<HASH> 100644
--- a/instaloader/instaloader.py
+++ b/instaloader/instaloader.py
@@ -758,17 +758,16 @@ class Instaloader:
),
check_bbd=self.check_resume_bbd,
enabled=self.resume_prefix is not None
- ) as resume_info:
- is_resuming, start_index = resume_info
- for number, post in enumerate(posts):
- if max_count is not None and number + start_index >= max_count:
+ ) as (is_resuming, start_index):
+ for number, post in enumerate(posts, start=start_index + 1):
+ if max_count is not None and number > max_count:
break
if displayed_count is not None:
- self.context.log("[{0:{w}d}/{1:{w}d}] ".format(number + start_index + 1, displayed_count,
+ self.context.log("[{0:{w}d}/{1:{w}d}] ".format(number, displayed_count,
w=len(str(displayed_count))),
end="", flush=True)
else:
- self.context.log("[{:3d}] ".format(number + start_index + 1), end="", flush=True)
+ self.context.log("[{:3d}] ".format(number), end="", flush=True)
if post_filter is not None:
try:
if not post_filter(post):
diff --git a/instaloader/nodeiterator.py b/instaloader/nodeiterator.py
index <HASH>..<HASH> 100644
--- a/instaloader/nodeiterator.py
+++ b/instaloader/nodeiterator.py
@@ -219,8 +219,7 @@ def resumable_iteration(context: InstaloaderContext,
load=lambda _, path: FrozenNodeIterator(**json.load(open(path))),
save=lambda fni, path: json.dump(fni._asdict(), open(path, 'w')),
format_path=lambda magic: "resume_info_{}.json".format(magic)
- ) as resume_info:
- is_resuming, start_index = resume_info
+ ) as (is_resuming, start_index):
for post in post_iterator:
do_something_with(post) | Nicer syntax for resumable_iteration usage | instaloader_instaloader | train |
a3d4f898c5901a08c49dd59664b6e4a7457bcdd0 | diff --git a/server/conn.go b/server/conn.go
index <HASH>..<HASH> 100644
--- a/server/conn.go
+++ b/server/conn.go
@@ -180,6 +180,7 @@ func (cc *clientConn) handshake(ctx context.Context) error {
return err
}
data := cc.alloc.AllocWithLen(4, 32)
+ data = append(data, mysql.OKHeader)
data = append(data, 0, 0)
if cc.capability&mysql.ClientProtocol41 > 0 {
data = dumpUint16(data, mysql.ServerStatusAutocommit) | server: fix wrong deletion introduced by #<I> (#<I>) | pingcap_tidb | train |
203d31f5e91c2f47fe33c8af9dfcc9c7bf9a912d | diff --git a/lib/styler/styles.js b/lib/styler/styles.js
index <HASH>..<HASH> 100644
--- a/lib/styler/styles.js
+++ b/lib/styler/styles.js
@@ -16,6 +16,7 @@ exports.stops = {
cx: 0,
cy: 0,
fill: function (display, data) {
+ if (data.stop.isEndPoint) return '#fff';
if (data.stop.isBranchPoint) return '#dbdcdd';
return '#fff';
},
@@ -32,7 +33,7 @@ exports.stops = {
if (data.stop.isBranchPoint) {
return '#fff';
}
- return '#333';
+ return 'gray';
},
'stroke-width': function (display, data) {
if (data.stop.isEndPoint) {
@@ -56,8 +57,10 @@ exports.labels = {
return pixels(display.zoom.scale(), 1, 1.2, 1.4) + 'em';
},
visibility: function (display, data) {
- if (display.zoom.scale() < 0.75) return 'hidden';
- return 'visible';
+ if (display.zoom.scale() >= 1) return 'visible';
+ if (display.zoom.scale() >= 0.75 && data.stop.isBranchPoint) return 'visible';
+ if (display.zoom.scale() >= 0.5 && data.stop.isEndPoint) return 'visible';
+ return 'hidden';
}
}; | Different stops display labels at different zooms | conveyal_transitive.js | train |
2ea73a90e01f05dee83344797027b71d6baf20b4 | diff --git a/lib/cancan/model_adapters/active_record_adapter.rb b/lib/cancan/model_adapters/active_record_adapter.rb
index <HASH>..<HASH> 100644
--- a/lib/cancan/model_adapters/active_record_adapter.rb
+++ b/lib/cancan/model_adapters/active_record_adapter.rb
@@ -73,8 +73,8 @@ module CanCan
value.delete(k)
nested[k] = v
else
- name = model_class.reflect_on_association(name).table_name.to_sym
- result_hash[name] = value
+ table_name = model_class.reflect_on_association(name).table_name.to_sym
+ result_hash[table_name] = value
end
nested
end
diff --git a/spec/cancan/model_adapters/active_record_adapter_spec.rb b/spec/cancan/model_adapters/active_record_adapter_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/cancan/model_adapters/active_record_adapter_spec.rb
+++ b/spec/cancan/model_adapters/active_record_adapter_spec.rb
@@ -6,6 +6,7 @@ if ENV["MODEL_ADAPTER"].nil? || ENV["MODEL_ADAPTER"] == "active_record"
describe CanCan::ModelAdapters::ActiveRecordAdapter do
with_model :category do
table do |t|
+ t.string "name"
t.boolean "visible"
end
model do
@@ -159,6 +160,15 @@ if ENV["MODEL_ADAPTER"].nil? || ENV["MODEL_ADAPTER"] == "active_record"
lambda { Article.accessible_by(@ability) }.should raise_error(CanCan::Error)
end
+ it "should support more than one deeply nested conditions" do
+ @ability.can :read, Comment, :article => {
+ :category => {
+ :name => 'foo', :visible => true
+ }
+ }
+ expect { Comment.accessible_by(@ability) }.to_not raise_error
+ end
+
it "should not allow to check ability on object against SQL conditions without block" do
@ability.can :read, Article, ["secret=?", true]
lambda { @ability.can? :read, Article.new }.should raise_error(CanCan::Error)
@@ -317,4 +327,4 @@ if ENV["MODEL_ADAPTER"].nil? || ENV["MODEL_ADAPTER"] == "active_record"
# adapter.matches_condition?(article1, :name.nlike, "%ello worl%").should be_false
end
end
-end
+end
\ No newline at end of file | Fixes nested ability conditions in issue #<I> | CanCanCommunity_cancancan | train |
a40ee15e5a109aab63e45e8b4d64f412a4767d27 | diff --git a/src/serverError.js b/src/serverError.js
index <HASH>..<HASH> 100644
--- a/src/serverError.js
+++ b/src/serverError.js
@@ -1,9 +1,9 @@
class ServerError extends Error {
- constructor ({ statusCode, reason, error }, query) {
- super(`Cubic-client encountered an error while requesting ${query}: ${statusCode} - ${reason}`)
+ constructor ({ statusCode, body }, query) {
+ super(`Cubic-client encountered an error while requesting ${query}: ${statusCode} - ${body.error} (${body.reason})`)
this.statusCode = statusCode
- this.reason = reason
- this.error = error
+ this.reason = body.reason
+ this.error = body.error
}
} | fix: Fix ServerErrors returning `undefined` as reason. | cubic-js_cubic | train |
d3ddb4af8d26a04aacbd20eeb4b68217894231bd | diff --git a/src/textlint-rule-no-nfd.js b/src/textlint-rule-no-nfd.js
index <HASH>..<HASH> 100644
--- a/src/textlint-rule-no-nfd.js
+++ b/src/textlint-rule-no-nfd.js
@@ -12,9 +12,11 @@ function reporter(context) {
return;
}
const text = getSource(node);
- matchCaptureGroupAll(text, /([\u309a\u3099])/g).forEach(({index}) => {
+ matchCaptureGroupAll(text, /([\u309b\u309c\u309a\u3099])/g).forEach(({index}) => {
+ // \u309b\u309c => \u309a\u3099
const dakutenChars = text.slice(index - 1, index + 1);
- const expectedText = unorm.nfc(dakutenChars);
+ const nfdlized = dakutenChars.replace("\u309B", "\u3099").replace("\u309C", "\u309A")
+ const expectedText = unorm.nfc(nfdlized);
const ruleError = new RuleError(`Disallow to use NFD(well-known as Mac濁点): "${dakutenChars}" => "${expectedText}"`, {
index,
fix: fixer.replaceTextRange([index - 1, index + 1], expectedText)
diff --git a/test/textlint-rule-no-nfd-test.js b/test/textlint-rule-no-nfd-test.js
index <HASH>..<HASH> 100644
--- a/test/textlint-rule-no-nfd-test.js
+++ b/test/textlint-rule-no-nfd-test.js
@@ -11,7 +11,6 @@ tester.run("no-todo", rule, {
"エンジン"
],
invalid: [
- // single match
{
text: "ホ\u309aケット",
output: "ポケット",
@@ -24,6 +23,17 @@ tester.run("no-todo", rule, {
]
},
{
+ text: "ホ゜ケット",
+ output: "ポケット",
+ errors: [
+ {
+ message: `Disallow to use NFD(well-known as Mac濁点): "ホ\u309c" => "ポ"`,
+ line: 1,
+ column: 2
+ }
+ ]
+ },
+ {
text: "エンシ\u3099ン",
output:"エンジン",
errors: [
@@ -33,6 +43,18 @@ tester.run("no-todo", rule, {
column: 4
}
]
+ },
+
+ {
+ text: "エンシ゛ン",
+ output:"エンジン",
+ errors: [
+ {
+ message: `Disallow to use NFD(well-known as Mac濁点): "シ\u309b" => "ジ"`,
+ line: 1,
+ column: 4
+ }
+ ]
}
]
});
\ No newline at end of file | feat(rule): VOICED SOUND MARK \u<I>b \u<I>c support | azu_textlint-rule-no-nfd | train |
28c2050b05230505ecc17af84aadf3ae482ace8d | diff --git a/config/server.js b/config/server.js
index <HASH>..<HASH> 100644
--- a/config/server.js
+++ b/config/server.js
@@ -509,7 +509,7 @@ module.exports = function (server) {
* match those in the certificate.
*/
- server.set('jwks_uri', undefined);
+ server.set('jwks_uri', issuer + '/jwks');
/**
* registration_endpoint | added jwks_uri to discovery endpoint | anvilresearch_connect | train |
5efe38401aa23ffed686cf2651faed3ed2592898 | diff --git a/lib/cucumber/salad/table.rb b/lib/cucumber/salad/table.rb
index <HASH>..<HASH> 100644
--- a/lib/cucumber/salad/table.rb
+++ b/lib/cucumber/salad/table.rb
@@ -126,6 +126,8 @@ module Cucumber
@rows ||= table.hashes.map { |h| new_row(h) }
end
+ alias_method :to_a, :rows
+
private
attr_accessor :table | Alias Table#to_a -> #rows. | mojotech_capybara-ui | train |
8cc5fd82e60278b5fc5076a1f39197f30b665123 | diff --git a/src/widgets/dialog/dialog_helper.js b/src/widgets/dialog/dialog_helper.js
index <HASH>..<HASH> 100644
--- a/src/widgets/dialog/dialog_helper.js
+++ b/src/widgets/dialog/dialog_helper.js
@@ -15,9 +15,9 @@
return new Promise ( resolve => {
- $modals.one ( 'modal:close dialog:close', () => resolve ( $.dialog ( options ) ) );
+ $modals.one ( 'modal:close dialog:close', _.once ( () => _.defer ( () => resolve ( $.dialog ( options ) ) ) ) );
- $modals.dialog ( 'close' );
+ $modals.modal ( 'close' );
}); | Dialog: ensuring modals and dialogs are being closed via the common “Modal” constructor | svelto_svelto | train |
18c3c1759af65a82937b16a9a7e3393dccf9f906 | diff --git a/lib/Doctrine/DBAL/DBALException.php b/lib/Doctrine/DBAL/DBALException.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/DBAL/DBALException.php
+++ b/lib/Doctrine/DBAL/DBALException.php
@@ -24,6 +24,7 @@ class DBALException extends \Exception
const ERROR_DUPLICATE_KEY = 1;
const ERROR_UNKNOWN_TABLE = 2;
const ERROR_TABLE_ALREADY_EXISTS = 3;
+ const ERROR_FOREIGN_KEY_CONSTRAINT = 4;
/**
* @param string $method
diff --git a/lib/Doctrine/DBAL/Driver/PDOMySql/Driver.php b/lib/Doctrine/DBAL/Driver/PDOMySql/Driver.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/DBAL/Driver/PDOMySql/Driver.php
+++ b/lib/Doctrine/DBAL/Driver/PDOMySql/Driver.php
@@ -117,6 +117,10 @@ class Driver implements \Doctrine\DBAL\Driver
{
switch ($exception->getCode()) {
case 23000:
+ if (strpos($exception->getMessage(), 'Cannot delete or update a parent row: a foreign key constraint fails') !== false) {
+ return DBALException::ERROR_FOREIGN_KEY_CONSTRAINT;
+ }
+
return DBALException::ERROR_DUPLICATE_KEY;
case '42S02':
return DBALException::ERROR_UNKNOWN_TABLE;
diff --git a/tests/Doctrine/Tests/DBAL/Functional/ExceptionTest.php b/tests/Doctrine/Tests/DBAL/Functional/ExceptionTest.php
index <HASH>..<HASH> 100644
--- a/tests/Doctrine/Tests/DBAL/Functional/ExceptionTest.php
+++ b/tests/Doctrine/Tests/DBAL/Functional/ExceptionTest.php
@@ -34,7 +34,7 @@ class ExceptionTest extends \Doctrine\Tests\DbalFunctionalTestCase
public function testTableAlreadyExists()
{
- $table = new \Doctrine\DBAL\Schema\Table("duplicatekey_table");
+ $table = new \Doctrine\DBAL\Schema\Table("alreadyexist_table");
$table->addColumn('id', 'integer', array());
$table->setPrimaryKey(array('id'));
@@ -46,5 +46,34 @@ class ExceptionTest extends \Doctrine\Tests\DbalFunctionalTestCase
$this->_conn->executeQuery($sql);
}
}
+
+ public function testForeignKeyContraintException()
+ {
+ if ( ! $this->_conn->getDatabasePlatform()->supportsForeignKeyConstraints()) {
+ $this->markTestSkipped("Only fails on platforms with foreign key constraints.");
+ }
+
+ $schema = new \Doctrine\DBAL\Schema\Schema();
+ $table = $schema->createTable("constraint_error_table");
+ $table->addColumn('id', 'integer', array());
+ $table->setPrimaryKey(array('id'));
+
+ $owningTable = $schema->createTable("owning_table");
+ $owningTable->addColumn('id', 'integer', array());
+ $owningTable->addColumn('constraint_id', 'integer', array());
+ $owningTable->setPrimaryKey(array('id'));
+ $owningTable->addForeignKeyConstraint($table, array('constraint_id'), array('id'));
+
+ foreach ($schema->toSql($this->_conn->getDatabasePlatform()) AS $sql) {
+ $this->_conn->executeQuery($sql);
+ }
+
+ $this->_conn->insert("constraint_error_table", array('id' => 1));
+ $this->_conn->insert("owning_table", array('id' => 1, 'constraint_id' => 1));
+
+ $this->setExpectedException('\Doctrine\DBAL\DBALException', null, DBALException::ERROR_FOREIGN_KEY_CONSTRAINT);
+ $this->_conn->delete('constraint_error_table', array('id' => 1));
+
+ }
}
\ No newline at end of file | [DBAL-<I>] Implement error detection on foreign key constraint exception for mysql and sqlite | doctrine_dbal | train |
f6d4323b62c4ff201a6a9ee0bee8579f6a877759 | diff --git a/kerncraft/models/benchmark.py b/kerncraft/models/benchmark.py
index <HASH>..<HASH> 100644
--- a/kerncraft/models/benchmark.py
+++ b/kerncraft/models/benchmark.py
@@ -7,8 +7,11 @@ import subprocess
from functools import reduce
import operator
import sys
-import six
from distutils.spawn import find_executable
+from pprint import pprint
+import re
+
+import six
from kerncraft.kernel import KernelCode
@@ -45,6 +48,9 @@ class Benchmark(object):
def perfctr(self, cmd, group='MEM', cpu='S0:0', code_markers=True, pin=True):
'''
runs *cmd* with likwid-perfctr and returns result as dict
+
+ *group* may be a performance group known to likwid-perfctr or an event string.
+ Only works with single core!
'''
# Making sure iaca.sh is available:
@@ -52,9 +58,9 @@ class Benchmark(object):
print("likwid-perfctr was not found. Make sure likwid is installed and found in PATH.",
file=sys.stderr)
sys.exit(1)
-
+
# FIXME currently only single core measurements support!
- perf_cmd = ['likwid-perfctr', '-O', '-g', group]
+ perf_cmd = ['likwid-perfctr', '-f', '-O', '-g', group]
if pin:
perf_cmd += ['-C', cpu]
@@ -76,13 +82,20 @@ class Benchmark(object):
results = {}
ignore = True
for l in output:
- if ignore and (l.startswith('Event,core 0') or l.startswith('Metric,Core 0')):
- ignore = False
- elif ignore or not l:
- continue
-
l = l.split(',')
- results[l[0]] = l[1:]
+ try:
+ # Metrics
+ results[l[0]] = float(l[1])
+ except:
+ pass
+ try:
+ # Event counters
+ counter_value = int(l[2])
+ if re.fullmatch(r'[A-Z_]+', l[0]) and re.fullmatch(r'[A-Z0-9]+', l[1]):
+ results.setdefault(l[0], {})
+ results[l[0]][l[1]] = counter_value
+ except (IndexError, ValueError):
+ pass
return results
@@ -106,7 +119,7 @@ class Benchmark(object):
repetitions *= 10
result = self.perfctr(args+[six.text_type(repetitions)])
- runtime = float(result['Runtime (RDTSC) [s]'][0])
+ runtime = result['Runtime (RDTSC) [s]']
time_per_repetition = runtime/float(repetitions)
self.results = {'raw output': result}
@@ -125,13 +138,13 @@ class Benchmark(object):
self.results['Runtime (per cacheline update) [cy/CL]'] = \
(cys_per_repetition/iterations_per_repetition)*iterations_per_cacheline
self.results['MEM volume (per repetition) [B]'] = \
- float(result['Memory data volume [GBytes]'][0])*1e9/repetitions
+ result['Memory data volume [GBytes]']*1e9/repetitions
self.results['Performance [MFLOP/s]'] = \
sum(self.kernel._flops.values())/(time_per_repetition/iterations_per_repetition)/1e6
if 'Memory bandwidth [MBytes/s]' in result:
- self.results['MEM BW [MByte/s]'] = float(result['Memory bandwidth [MBytes/s]'][0])
+ self.results['MEM BW [MByte/s]'] = result['Memory bandwidth [MBytes/s]']
else:
- self.results['MEM BW [MByte/s]'] = float(result['Memory BW [MBytes/s]'][0])
+ self.results['MEM BW [MByte/s]'] = result['Memory BW [MBytes/s]']
self.results['Performance [MLUP/s]'] = (iterations_per_repetition/time_per_repetition)/1e6
self.results['Performance [MIt/s]'] = (iterations_per_repetition/time_per_repetition)/1e6 | more generic likwid-perfctr output parsing | RRZE-HPC_kerncraft | train |
eecc5410dbfcdc59e2cba3f4255f03b58783ae27 | diff --git a/src/core/services/gesture/gesture.js b/src/core/services/gesture/gesture.js
index <HASH>..<HASH> 100644
--- a/src/core/services/gesture/gesture.js
+++ b/src/core/services/gesture/gesture.js
@@ -552,8 +552,10 @@ function attachToDocument( $mdGesture, $$MdGestureHandler ) {
function clickHijacker(ev) {
var isKeyClick = ev.clientX === 0 && ev.clientY === 0;
+ var isSubmitEvent = ev.target && ev.target.type === 'submit';
if (!isKeyClick && !ev.$material && !ev.isIonicTap
- && !isInputEventFromLabelClick(ev)) {
+ && !isInputEventFromLabelClick(ev)
+ && !isSubmitEvent) {
ev.preventDefault();
ev.stopPropagation();
lastLabelClickPos = null; | fix(mdGesture): fix form submit via enter/go button on iOS (#<I>) (#<I>) | angular_material | train |
a77576cf74ab6c42f8d56e67ea47f04589382ae1 | diff --git a/config/datadog-helper.php b/config/datadog-helper.php
index <HASH>..<HASH> 100644
--- a/config/datadog-helper.php
+++ b/config/datadog-helper.php
@@ -46,6 +46,6 @@ return [
| Since the UDP method uses the a local dogstatsd instance you don't need to setup
| any additional application/api access.
*/
- 'transport' => 'TCP'
+ 'transport' => 'UDP'
]; | Default transport should match underlying library (UDP). | chaseconey_laravel-datadog-helper | train |
42bf9fac3217f359adcbd2aa405dc23394bce913 | diff --git a/lib/kamerling/value.rb b/lib/kamerling/value.rb
index <HASH>..<HASH> 100644
--- a/lib/kamerling/value.rb
+++ b/lib/kamerling/value.rb
@@ -15,7 +15,7 @@ module Kamerling
end
def to_h
- attributes.map { |(key, val)| { key => serialise(val) } }.reduce(:merge)
+ attributes.map { |key, val| { key => serialise(val) } }.reduce(:merge)
end
private | Value#to_h: no need to destructure in Hash#map | chastell_kamerling | train |
423d55d4831ada8f8093dc78c5f241183b5aba3c | diff --git a/components/Router/helpers/parsed-link/actions.js b/components/Router/helpers/parsed-link/actions.js
index <HASH>..<HASH> 100644
--- a/components/Router/helpers/parsed-link/actions.js
+++ b/components/Router/helpers/parsed-link/actions.js
@@ -11,7 +11,6 @@ import popTabToRoot from '@shopgate/pwa-core/commands/popTabToRoot';
import showTab from '@shopgate/pwa-core/commands/showTab';
import { getPageContext } from '../../../../helpers/legacy';
import { isFunction } from '../../../../helpers/validation';
-import { INDEX_PATH } from '../../../../constants/RoutePaths';
/**
* Native link handler, simply changes current location.href to open email, tel, etc..
@@ -101,10 +100,6 @@ const reactRouter = (options, historyHandler) => {
targetTab: pageContext.tab,
});
- if (options.url === INDEX_PATH) {
- return;
- }
-
if (isFunction(historyHandler)) {
historyHandler(options);
return;
diff --git a/helpers/redux/index.js b/helpers/redux/index.js
index <HASH>..<HASH> 100644
--- a/helpers/redux/index.js
+++ b/helpers/redux/index.js
@@ -123,7 +123,7 @@ export const generateResultHash = (params, includeSort = true) => {
filters: {},
};
- let mergedParams = {
+ const mergedParams = {
...defaultParams,
...params,
}; | CON-<I> Removed router check against index route path | shopgate_pwa | train |
4f364bcc670ca9510ec834ec28621e54e223fae4 | diff --git a/io/rtp/src/main/java/org/mobicents/media/server/impl/rtp/RtpChannel.java b/io/rtp/src/main/java/org/mobicents/media/server/impl/rtp/RtpChannel.java
index <HASH>..<HASH> 100644
--- a/io/rtp/src/main/java/org/mobicents/media/server/impl/rtp/RtpChannel.java
+++ b/io/rtp/src/main/java/org/mobicents/media/server/impl/rtp/RtpChannel.java
@@ -325,7 +325,7 @@ public class RtpChannel extends MultiplexedChannel implements DtlsListener, IceE
// bind data channel
this.udpManager.bind(this.dataChannel, PORT_ANY, isLocal);
- this.rtcpMux = true;
+ this.rtcpMux = rtcpMux;
this.bound = true;
// activate media elements
@@ -526,6 +526,7 @@ public class RtpChannel extends MultiplexedChannel implements DtlsListener, IceE
if (this.rtcpMux) {
this.handlers.removeHandler(this.rtcpHandler);
this.rtcpHandler.reset();
+ this.rtcpMux = false;
}
if(this.ice) { | #<I> RTP connection now sets properly the rtcp-mux flag. | RestComm_media-core | train |
dd00a6edd658bc9d42abe6d4c352e1a7a01f4722 | diff --git a/holoviews/plotting/plotly/chart3d.py b/holoviews/plotting/plotly/chart3d.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/plotly/chart3d.py
+++ b/holoviews/plotting/plotly/chart3d.py
@@ -22,7 +22,7 @@ class Chart3DPlot(ElementPlot):
def init_layout(self, key, element, ranges):
l, b, zmin, r, t, zmax = self.get_extents(element, ranges)
- xd, yd = (element.get_dimension(i) for i in range(2))
+ xd, yd, zd = (element.get_dimension(i) for i in range(3))
xaxis = dict(range=[l, r], title=str(xd))
if self.logx:
xaxis['type'] = 'log'
@@ -31,7 +31,7 @@ class Chart3DPlot(ElementPlot):
if self.logy:
yaxis['type'] = 'log'
- zaxis = dict(range=[zmin, zmax], title=str(yd))
+ zaxis = dict(range=[zmin, zmax], title=str(zd))
if self.logz:
zaxis['type'] = 'log' | Fixed z-axis label in Plotly | pyviz_holoviews | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.