hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
294688c0dce917128dc679cc96c69f5c4bb79ebe | diff --git a/closure/bin/build/closurebuilder.py b/closure/bin/build/closurebuilder.py
index <HASH>..<HASH> 100755
--- a/closure/bin/build/closurebuilder.py
+++ b/closure/bin/build/closurebuilder.py
@@ -259,7 +259,7 @@ is prefererred over using this script for performing JavaScript compilation.
Please migrate your codebase.
See:
-https://github.com/google/closure-compiler/wiki/Manage-Closure-Dependencies
+https://github.com/google/closure-compiler/wiki/Managing-Dependencies
""")
# Make sure a .jar is specified. | Update broken hyperlink in closurebuilder.py
RELNOTES: n/a
-------------
Created by MOE: <URL> | google_closure-library | train |
d7273c50cc436c59a2e058f937577ab8f258f5dc | diff --git a/common/oatbox/service/ServiceManager.php b/common/oatbox/service/ServiceManager.php
index <HASH>..<HASH> 100644
--- a/common/oatbox/service/ServiceManager.php
+++ b/common/oatbox/service/ServiceManager.php
@@ -21,12 +21,8 @@
namespace oat\oatbox\service;
use oat\oatbox\Configurable;
-use oat\oatbox\log\LoggerService;
-use oat\oatbox\log\TaoLoggerAwareInterface;
-use Psr\Log\LoggerAwareInterface;
use Zend\ServiceManager\ServiceLocatorInterface;
use Zend\ServiceManager\ServiceLocatorAwareInterface;
-use oat\oatbox\config\ConfigurationService;
/**
* The simple placeholder ServiceManager
@@ -137,18 +133,12 @@ class ServiceManager implements ServiceLocatorInterface
* Propagate service dependencies
*
* @param $service
- * @param null $originalService The class where the propagate method is call
* @return mixed
*/
- public function propagate($service, $originalService = null)
+ public function propagate($service)
{
- if (is_object($service)) {
-
- // Propagate ServiceLocator
- if ($service instanceof ServiceLocatorAwareInterface) {
- $service->setServiceLocator($this);
- }
-
+ if (is_object($service) && ($service instanceof ServiceLocatorAwareInterface)) {
+ $service->setServiceLocator($this);
}
return $service;
} | REmove the propagation from service manager | oat-sa_generis | train |
80e4e34994f67fb617809dd23b3729474c6aca91 | diff --git a/generators/app/templates/gulpfile.js b/generators/app/templates/gulpfile.js
index <HASH>..<HASH> 100644
--- a/generators/app/templates/gulpfile.js
+++ b/generators/app/templates/gulpfile.js
@@ -32,6 +32,9 @@ function watch(done) {
<% } -%>
<% if (framework !== 'react') { -%>
+<% if (modules !== 'systemjs') { -%>
+ gulp.watch(path.join(conf.paths.src, 'app/**/*.html'), browserSync.reload);
+<% } else { -%>
gulp.watch(path.join(conf.paths.src, '**/*.html'), browserSync.reload);
<% } -%>
<% } else if (modules === 'webpack') {-%> | Not watch index.html when webpack or inject | FountainJS_generator-fountain-gulp | train |
b1a5bfce53e64858f78c0c6611b7da5550505ef7 | diff --git a/src/Comely/IO/Session/Storage/Disk.php b/src/Comely/IO/Session/Storage/Disk.php
index <HASH>..<HASH> 100644
--- a/src/Comely/IO/Session/Storage/Disk.php
+++ b/src/Comely/IO/Session/Storage/Disk.php
@@ -14,6 +14,7 @@ declare(strict_types=1);
namespace Comely\IO\Session\Storage;
+use Comely\IO\FileSystem\Disk\Directory;
use Comely\IO\FileSystem\Exception\DiskException;
use Comely\IO\Session\Exception\StorageException;
@@ -28,15 +29,15 @@ class Disk implements SessionStorageInterface
/**
* Disk constructor.
- * @param \Comely\IO\FileSystem\Disk $disk
+ * @param Directory $disk
* @throws StorageException
*/
- public function __construct(\Comely\IO\FileSystem\Disk $disk)
+ public function __construct(Directory $disk)
{
// Check permission
- if (!$disk->privileges()->read) {
+ if (!$disk->permissions()->read) {
throw new StorageException('Disk instance passed for Session storage does not have READ privilege');
- } elseif (!$disk->privileges()->write) {
+ } elseif (!$disk->permissions()->write) {
throw new StorageException('Disk instance passed for Session storage does not have WRITE privilege');
}
@@ -79,15 +80,7 @@ class Disk implements SessionStorageInterface
*/
public function has(string $id): bool
{
- try {
- $file = $this->disk->file($id . ".sess");
- if ($file->is() === \Comely\IO\FileSystem\Disk::IS_FILE) {
- return $file->permissions()->read;
- }
- } catch (DiskException $e) {
- }
-
- return false;
+ return is_readable($this->disk->suffixed($id . ".sess"));
}
/**
@@ -97,7 +90,7 @@ class Disk implements SessionStorageInterface
public function delete(string $id): void
{
try {
- $this->disk->file($id . ".sess")->delete();
+ $this->disk->delete($id . ".sess");
} catch (DiskException $e) {
throw new StorageException($e->getMessage());
}
@@ -111,7 +104,7 @@ class Disk implements SessionStorageInterface
$files = $this->list();
foreach ($files as $file) {
try {
- $this->disk->file($file)->delete();
+ $this->disk->delete($file);
} catch (DiskException $e) {
trigger_error($e->getMessage(), E_USER_WARNING);
}
@@ -125,7 +118,7 @@ class Disk implements SessionStorageInterface
public function list(): array
{
try {
- return $this->disk->find("*.sess");
+ return $this->disk->glob("*.sess");
} catch (DiskException $e) {
throw new StorageException($e->getMessage());
}
@@ -139,7 +132,7 @@ class Disk implements SessionStorageInterface
public function lastModified(string $id): int
{
try {
- return $this->disk->file($id . ".sess")->lastModified();
+ return $this->disk->lastModified($id . ".sess");
} catch (DiskException $e) {
throw new StorageException($e->getMessage());
} | updates according to changes in Disk API | comelyio_comely | train |
7e2a8072abc9f7af96f27218d7ab5bdd8e551e58 | diff --git a/airflow/operators/docker_operator.py b/airflow/operators/docker_operator.py
index <HASH>..<HASH> 100644
--- a/airflow/operators/docker_operator.py
+++ b/airflow/operators/docker_operator.py
@@ -97,6 +97,7 @@ class DockerOperator(BaseOperator):
:type user: int or str
:param volumes: List of volumes to mount into the container, e.g.
``['/host/path:/container/path', '/host/path2:/container/path2:ro']``.
+ :type volumes: list
:param working_dir: Working directory to
set on the container (equivalent to the -w switch the docker client)
:type working_dir: str | [AIRFLOW-<I>] - Add missing type in DockerOperator doc string (#<I>)
Add missing type in DockerOperator docstring for the volumes params | apache_airflow | train |
fddc1c1ca0e8a737d5d23b82d3c6d2fd10e28ae7 | diff --git a/src/web/primitives/grid/Main.js b/src/web/primitives/grid/Main.js
index <HASH>..<HASH> 100644
--- a/src/web/primitives/grid/Main.js
+++ b/src/web/primitives/grid/Main.js
@@ -38,21 +38,6 @@ const Main = injectStyles(styled.main`
margin: 30px auto;
padding: 0;
}
-
- ${props =>
- props.result
- ? `
- @media print {
- > h1,
- > h2,
- > p {
- display: none;
- }
- }
- `
- : `
-
- `};
`);
export default Main; | Show result text on print again. Each block should have a screenOnly prop like the printOnly prop | DirektoratetForByggkvalitet_losen | train |
2f9e454326d23593244430990e7befa9d096caa3 | diff --git a/modules/activiti-engine/src/main/java/org/activiti/engine/impl/test/AbstractActivitiTestCase.java b/modules/activiti-engine/src/main/java/org/activiti/engine/impl/test/AbstractActivitiTestCase.java
index <HASH>..<HASH> 100644
--- a/modules/activiti-engine/src/main/java/org/activiti/engine/impl/test/AbstractActivitiTestCase.java
+++ b/modules/activiti-engine/src/main/java/org/activiti/engine/impl/test/AbstractActivitiTestCase.java
@@ -22,6 +22,11 @@ import java.util.concurrent.Callable;
import junit.framework.AssertionFailedError;
+import org.activiti.bpmn.model.BpmnModel;
+import org.activiti.bpmn.model.EndEvent;
+import org.activiti.bpmn.model.SequenceFlow;
+import org.activiti.bpmn.model.StartEvent;
+import org.activiti.bpmn.model.UserTask;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.FormService;
import org.activiti.engine.HistoryService;
@@ -40,6 +45,8 @@ import org.activiti.engine.impl.interceptor.CommandContext;
import org.activiti.engine.impl.interceptor.CommandExecutor;
import org.activiti.engine.impl.jobexecutor.JobExecutor;
import org.activiti.engine.impl.util.ClockUtil;
+import org.activiti.engine.repository.Deployment;
+import org.activiti.engine.repository.ProcessDefinition;
import org.activiti.engine.runtime.ProcessInstance;
import org.junit.Assert;
@@ -254,6 +261,55 @@ public abstract class AbstractActivitiTestCase extends PvmTestCase {
.list()
.isEmpty();
}
+
+ /**
+ * Since the 'one task process' is used everywhere the actual process content
+ * doesn't matter, instead of copying around the BPMN 2.0 xml one could use
+ * this method which gives a {@link BpmnModel} version of the same process back.
+ */
+ public BpmnModel createOneTaskProcess() {
+ BpmnModel model = new BpmnModel();
+ org.activiti.bpmn.model.Process process = new org.activiti.bpmn.model.Process();
+ model.addProcess(process);
+ process.setId("oneTaskProcess");
+ process.setName("The one task process");
+
+ StartEvent startEvent = new StartEvent();
+ startEvent.setId("start");
+ process.addFlowElement(startEvent);
+
+ UserTask userTask = new UserTask();
+ userTask.setName("The Task");
+ userTask.setId("theTask");
+ userTask.setAssignee("kermit");
+ process.addFlowElement(userTask);
+
+ EndEvent endEvent = new EndEvent();
+ endEvent.setId("theEnd");
+ process.addFlowElement(endEvent);;
+
+ process.addFlowElement(new SequenceFlow("start", "theTask"));
+ process.addFlowElement(new SequenceFlow("theTask", "theEnd"));
+
+ return model;
+ }
+
+ /**
+ * Creates and deploys the one task process. See {@link #createOneTaskProcess()}.
+ *
+ * @return The process definition id (NOT the process definition key) of deployed one task process.
+ */
+ public String deployOneTaskProcess() {
+ BpmnModel bpmnModel = createOneTaskProcess();
+ Deployment deployment = repositoryService.createDeployment()
+ .addBpmnModel("oneTaskProcess.bpmn20.xml", bpmnModel).deploy();
+
+ this.deploymentId = deployment.getId(); // For auto-cleanup
+
+ ProcessDefinition processDefinition = repositoryService.createProcessDefinitionQuery()
+ .deploymentId(deployment.getId()).singleResult();
+ return processDefinition.getId();
+ }
private static class InteruptTask extends TimerTask {
protected boolean timeLimitExceeded = false; | Added convenience method to test super class for deploying the one task process | Activiti_Activiti | train |
913c60e00be6fc24fdcec657b2334c61ae32dd87 | diff --git a/lib/Gregwar/Image.php b/lib/Gregwar/Image.php
index <HASH>..<HASH> 100644
--- a/lib/Gregwar/Image.php
+++ b/lib/Gregwar/Image.php
@@ -375,7 +375,7 @@ class Image
*/
public function cacheFile($type = 'jpg', $quality = 80)
{
- if (!count($this->operations))
+ if (!count($this->operations) && $type == $this->guessType())
return $this->file;
$datas = array( | [Fix] When nothing change except the format | Gregwar_Image | train |
944ef510ba07569fa06336cd6e40b994740b5f1b | diff --git a/content/base/extractor/goose.go b/content/base/extractor/goose.go
index <HASH>..<HASH> 100644
--- a/content/base/extractor/goose.go
+++ b/content/base/extractor/goose.go
@@ -6,7 +6,7 @@ import (
"html/template"
"strings"
- "github.com/advancedlogic/GoOse"
+ goose "github.com/advancedlogic/GoOse"
"github.com/urandom/readeef/content"
"github.com/urandom/readeef/content/data"
"github.com/urandom/webfw/fs" | prefix the goose import, to appease vim-go | urandom_readeef | train |
9afa82be6657b2a07e24d986bb1748d71a45ea63 | diff --git a/lib/resources/h2o/js/graphs.js b/lib/resources/h2o/js/graphs.js
index <HASH>..<HASH> 100644
--- a/lib/resources/h2o/js/graphs.js
+++ b/lib/resources/h2o/js/graphs.js
@@ -10,7 +10,7 @@ function g_varimp(divid, names, varimp) {
// Setup size and axis
var margin = {top: 30, right: 10, bottom: 10, left: 10},
width = 640 - margin.left - margin.right,
- height = 450 - margin.top - margin.bottom;
+ height = names.length*16 - margin.top - margin.bottom;
var xScale = d3.scale.linear()
.range([0, width]) | Disable static height of varimp graph.
Now graph is scaled by number of variables. Each variable
bar has a static height. | h2oai_h2o-2 | train |
86b579a96cc335e58f0920c1b9994bb7486b4309 | diff --git a/lib/Cake/Model/Datasource/Database/Query.php b/lib/Cake/Model/Datasource/Database/Query.php
index <HASH>..<HASH> 100644
--- a/lib/Cake/Model/Datasource/Database/Query.php
+++ b/lib/Cake/Model/Datasource/Database/Query.php
@@ -116,10 +116,7 @@ class Query implements Expression, IteratorAggregate {
return $sql .= $this->{'_build' . ucFirst($name) . 'Part'}($parts, $sql);
};
- // TODO: Should Query actually get the driver or just let the connection decide where
- // to get the query translator?
- $translator = $this->connection()->driver()->queryTranslator($this->_type);
- $query = $translator($this);
+ $query = $this->_transformQuery();
$query->build($builder->bindTo($query));
return $sql;
}
@@ -473,6 +470,19 @@ class Query implements Expression, IteratorAggregate {
}
/**
+ * Returns a query object as returned by the connection object as a result of
+ * transforming this query instance to conform to any dialect specifics
+ *
+ * @return void
+ **/
+ protected function _transformQuery() {
+ // TODO: Should Query actually get the driver or just let the connection decide where
+ // to get the query translator?
+ $translator = $this->connection()->driver()->queryTranslator($this->_type);
+ return $translator($this);
+ }
+
+/**
* Returns string representation of this query (complete SQL statement)
*
* @return string | A bit of refactoring for clarity... Did I mention that all tests pass
for all implemented drivers? :) | cakephp_cakephp | train |
3b5cc7c1f600ed0b8b59b877a734ca8f8f34eb72 | diff --git a/lib/get/onValue.js b/lib/get/onValue.js
index <HASH>..<HASH> 100644
--- a/lib/get/onValue.js
+++ b/lib/get/onValue.js
@@ -36,7 +36,7 @@ module.exports = function onValue(model, node, seedOrFunction, outerResults, per
}
else if (outputFormat === "JSONG") {
- if (typeof node.value === "object") {
+ if (node.value && typeof node.value === "object") {
valueNode = clone(node);
} else {
valueNode = node.value; | made it so null is not output as an atom. | Netflix_falcor | train |
5b8e8375c6a9cbf62e66a9578fbff0e9490a5e60 | diff --git a/pkg/kubernetes/generic.go b/pkg/kubernetes/generic.go
index <HASH>..<HASH> 100644
--- a/pkg/kubernetes/generic.go
+++ b/pkg/kubernetes/generic.go
@@ -180,6 +180,21 @@ func (g *Generic) SetInitTokenStore(role string, token string) error {
return fmt.Errorf("Error reading init token path: %v", s)
}
+ dat, ok := s.Data["init_token"]
+ if !ok {
+ return fmt.Errorf("Error finding current init token data: %v", s)
+ }
+ oldToken, ok := dat.(string)
+ if !ok {
+ return fmt.Errorf("Error converting init_token data to string: %v", s)
+ }
+ logrus.Infof("Revoked Token '%s': '%s'", role, oldToken)
+
+ err = g.kubernetes.vaultClient.Auth().Token().RevokeOrphan(oldToken)
+ if err != nil {
+ return fmt.Errorf("Error revoking init token at path: %v", s)
+ }
+
s.Data["init_token"] = token
_, err = g.kubernetes.vaultClient.Logical().Write(path, s.Data)
if err != nil {
diff --git a/pkg/kubernetes/init_token_test.go b/pkg/kubernetes/init_token_test.go
index <HASH>..<HASH> 100644
--- a/pkg/kubernetes/init_token_test.go
+++ b/pkg/kubernetes/init_token_test.go
@@ -13,9 +13,10 @@ func TestInitToken_Ensure_NoExpectedToken_NotExisting(t *testing.T) {
fv := NewFakeVault(t)
i := &InitToken{
- Role: "etcd",
- Policies: []string{"etcd"},
- kubernetes: fv.Kubernetes(),
+ Role: "etcd",
+ Policies: []string{"etcd"},
+ kubernetes: fv.Kubernetes(),
+ ExpectedToken: "",
}
// expect a read and vault says secret is not existing | Old token is revoked when init token set by user | jetstack_vault-helper | train |
9c5613e2df6d2a3d362993b62c10f8f0efbafabb | diff --git a/test/test-creation.js b/test/test-creation.js
index <HASH>..<HASH> 100644
--- a/test/test-creation.js
+++ b/test/test-creation.js
@@ -373,7 +373,6 @@ const expectedFiles = {
CLIENT_MAIN_SRC_DIR + 'app/app.constants.ts',
CLIENT_MAIN_SRC_DIR + 'app/blocks/config/http.config.ts',
CLIENT_MAIN_SRC_DIR + 'app/blocks/config/localstorage.config.ts',
- CLIENT_MAIN_SRC_DIR + 'app/blocks/config/alert.config.ts',
CLIENT_MAIN_SRC_DIR + 'app/blocks/config/translation.config.ts',
CLIENT_MAIN_SRC_DIR + 'app/blocks/config/translation-storage.provider.ts',
CLIENT_MAIN_SRC_DIR + 'app/blocks/config/compile.config.ts',
@@ -409,6 +408,7 @@ const expectedFiles = {
CLIENT_MAIN_SRC_DIR + 'app/shared/login/login.html',
CLIENT_MAIN_SRC_DIR + 'app/shared/login/login.service.ts',
CLIENT_MAIN_SRC_DIR + 'app/shared/login/login.component.ts',
+ CLIENT_MAIN_SRC_DIR + 'app/shared/alert/alert.provider.ts',
CLIENT_MAIN_SRC_DIR + 'app/admin/admin.module.ts',
CLIENT_MAIN_SRC_DIR + 'app/admin/index.ts', | ng2: (shared) fix tests | jhipster_generator-jhipster | train |
25497a553d42ad9fb5c5a78dbb90bd700198ed86 | diff --git a/grr/client/grr_response_client/client_utils_common.py b/grr/client/grr_response_client/client_utils_common.py
index <HASH>..<HASH> 100644
--- a/grr/client/grr_response_client/client_utils_common.py
+++ b/grr/client/grr_response_client/client_utils_common.py
@@ -148,7 +148,7 @@ def IsExecutionWhitelisted(cmd, args):
("arp.exe", ["-a"]),
("driverquery.exe", ["/v"]),
("ipconfig.exe", ["/all"]),
- ("netsh.exe", ["advfirewall"], ["firewall"], ["show"], ["rule"], ["name=all"]),
+ ("netsh.exe", ["advfirewall", "firewall", "show", "rule", "name=all"]),
("tasklist.exe", ["/SVC"]),
("tasklist.exe", ["/v"]),
] | fix arg list (#<I>)
fix the arg list for netsh.exe | google_grr | train |
ba88ecc4a3a4c07d41232a533a283e5d195853b5 | diff --git a/example/server.py b/example/server.py
index <HASH>..<HASH> 100755
--- a/example/server.py
+++ b/example/server.py
@@ -84,7 +84,7 @@ class Handler(BaseHTTPRequestHandler):
redirect_uri="http://localhost:8080/login/google",
client_id=config["google.client_id"],
client_secret=config["google.client_secret"])
- c.auth_received(data)
+ c.request_token(data=data)
self.wfile.write("Access token: %s<br>" % c.access_token)
@@ -117,7 +117,7 @@ class Handler(BaseHTTPRequestHandler):
redirect_uri="http://localhost:8080/login/facebook",
client_id=config["facebook.client_id"],
client_secret=config["facebook.client_secret"])
- c.auth_received(data,
+ c.request_token(data=data,
parser = lambda data: dict(parse_qsl(data)))
d = c.request("/me")
@@ -161,7 +161,7 @@ class Handler(BaseHTTPRequestHandler):
client_secret=config["foursquare.client_secret"],
)
c.access_token_key = "oauth_token"
- c.auth_received(data)
+ c.request_token(data=data)
d = c.request("/users/24700343")
diff --git a/sanction/client.py b/sanction/client.py
index <HASH>..<HASH> 100644
--- a/sanction/client.py
+++ b/sanction/client.py
@@ -14,6 +14,7 @@ class Client(object):
self.redirect_uri = redirect_uri
self.client_id = client_id
self.client_secret = client_secret
+ self.access_token = None
self.access_token_key = "access_token"
@@ -34,12 +35,16 @@ class Client(object):
return "%s?%s" % (self.auth_endpoint, urlencode(o))
- def auth_received(self, qs, parser=None):
- if qs.has_key("error"):
- raise IOError(qs["error"])
+ def request_token(self, data=None, parser=None, grant_type=None):
+ data = data and data or {}
+ if data.has_key("error"):
+ raise IOError(data["error"])
else:
- self.__get_access_token(qs["code"], self.client_id,
- self.client_secret, parser=parser)
+ kwargs = {"grant_type": grant_type}
+ if "code" in data: kwargs["code"] = data["code"]
+
+ self.__get_access_token(self.client_id,
+ self.client_secret, parser=parser, **kwargs)
def request(self, path, qs=None, data=None):
@@ -53,15 +58,15 @@ class Client(object):
return loads(h.read())
- def __get_access_token(self, code, client_id, client_secret,
+ def __get_access_token(self, client_id, client_secret, code=None,
grant_type=None, parser=None):
parser = parser and parser or loads
o = {
- "code": code,
"client_id": client_id,
"client_secret": client_secret,
"grant_type": grant_type and grant_type or "authorization_code"
}
+ if code is not None: o["code"] = code
if self.redirect_uri is not None:
o["redirect_uri"] = self.redirect_uri
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ requires = []
setup(name="sanction",
keywords="python,oauth2",
- version="0.1-alpha-2",
+ version="0.1-alpha-3",
description="A simple, lightweight OAuth2 client",
author="Demian Brecht",
author_email="[email protected]",
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -45,16 +45,28 @@ class SanctionTests(TestCase):
self.assertEquals(d["client_id"], client_id)
- def test_auth_received(self):
+ def test_request_token(self):
# i don't want to bother mocking an oauth2 server, so i'm just going
# to test failure cases and rely on manual testing for correct ones
c = Client()
try:
- c.auth_received({ "error": "something bad happened" })
+ c.request_token({ "error": "something bad happened" })
self.fail("shouldn't hit here")
except IOError:
pass
-
+
+
+ def test_facebook_client_credentials(self):
+ c = Client(
+ token_endpoint="https://graph.facebook.com/oauth/access_token",
+ client_id="285809954824916",
+ client_secret="d985f6a3ecaffd11d61b3cd026b8753a")
+
+ self.assertEquals(c.access_token, None)
+ c.request_token(parser=lambda data: dict(parse_qsl(data)),
+ grant_type="client_credentials")
+ self.assertIsNotNone(c.access_token)
+ | updated api for token request to make more sense with other oauth2 client flows, added unit test for facebook api | demianbrecht_sanction | train |
3d6745f5098f57a58ba06e25bcd16b43c8c076de | diff --git a/ontquery/__init__.py b/ontquery/__init__.py
index <HASH>..<HASH> 100644
--- a/ontquery/__init__.py
+++ b/ontquery/__init__.py
@@ -418,11 +418,14 @@ class OntTerm(OntId):
i = None
for i, result in enumerate(results_gen):
if i > 0:
- if i == 1:
- pass
- print(repr(TermRepr(**old_result)), '\n')
- print(repr(TermRepr(**result)), '\n')
- continue
+ if result.curie == old_result.curie:
+ i = 0 # if we get the same record from multiple places it is ok
+ else:
+ if i == 1:
+ print(repr(TermRepr(**old_result)), '\n')
+ print(repr(TermRepr(**result)), '\n')
+ continue
+
if i == 0:
old_result = result
@@ -644,7 +647,8 @@ class OntQuery:
#print(red.format('AAAAAAAAAA'), result)
if result:
yield result
- return # FIXME order services based on which you want first for now, will work on merging later
+ if result.label:
+ return # FIXME order services based on which you want first for now, will work on merging later
class OntQueryCli(OntQuery):
@@ -1045,7 +1049,7 @@ class InterLexRemote(OntService): # note to self
rdll = rdflibLocal(graph)
# TODO cases where ilx is preferred will be troublesome
- maybe_out = list(rdll.query(curie=curie, label=label, predicates=predicates))
+ maybe_out = [r for r in rdll.query(curie=curie, label=label, predicates=predicates)]
if maybe_out:
out = maybe_out
else:
@@ -1101,10 +1105,14 @@ class rdflibLocal(OntService): # reccomended for local default implementation
#self.NIFRID.definingCitation:'definingCitation',
}
o = None
+ owlClass = None
for p, o in gen:
pn = translate.get(p, None)
if isinstance(o, self.rdflib.Literal):
o = o.toPython()
+ elif p == self.rdflib.RDF.type and o == self.rdflib.OWL.Class:
+ owlClass = True
+
if pn is None:
# TODO translation and support for query result structure
# FIXME lists instead of klobbering results with mulitple predicates
@@ -1116,7 +1124,7 @@ class rdflibLocal(OntService): # reccomended for local default implementation
else:
out[pn] = o
- if o is not None:
+ if o is not None and owlClass is not None:
yield QueryResult(kwargs, **out, _graph=self.graph, source=self) # if you yield here you have to yield from below
else:
for keyword, object in kwargs.items():
diff --git a/test/test_services.py b/test/test_services.py
index <HASH>..<HASH> 100644
--- a/test/test_services.py
+++ b/test/test_services.py
@@ -49,8 +49,15 @@ class TestIlx(ServiceBase, unittest.TestCase):
remote = oq.InterLexRemote(host='localhost', port='8505')
def test_problem(self):
- t = self.OntTerm('ILX:0101431')
+ curie = 'ILX:0101431'
+ t = self.OntTerm(curie)
# FIXME UBERON:0000955 is lost in predicates
+ assert t.curie == curie, t
+
+ def test_no_label(self):
+ t = self.OntTerm('NLX:60355')
+ ser = t._graph.serialize(format='nifttl').decode()
+ assert t.label, ser
class TestSciGraph(ServiceBase, unittest.TestCase):
remote = oq.SciGraphRemote() | if we get a result but no label keep going, fix for ilx records with no class, which is another issue in and of itself | tgbugs_ontquery | train |
09530911efad14baa0a8721ed6ca8e3f410afa32 | diff --git a/cmd/panic/main.go b/cmd/panic/main.go
index <HASH>..<HASH> 100644
--- a/cmd/panic/main.go
+++ b/cmd/panic/main.go
@@ -179,12 +179,13 @@ var types = map[string]struct {
"asleep": {
"panics with 'all goroutines are asleep - deadlock'",
func() {
- // When built with the race detector, this hangs!? Without, it's panics as expected.
+ // When built with the race detector, this hangs. I suspect this is due
+ // because the race detector starts a separate goroutine which causes
+ // checkdead() to not trigger. See checkdead() in src/runtime/proc.go.
+ // https://github.com/golang/go/issues/20588
+ //
// Repro:
// go install -race github.com/maruel/panicparse/cmd/panic; panic asleep
- //
- // TODO(maruel): File a bug.
- // See checkdead() in src/runtime/proc.go.
var mu sync.Mutex
mu.Lock()
mu.Lock() | panic: update asleep reference about hanging with -race
Tested on go<I>.
This is a known issue. <URL> | maruel_panicparse | train |
88c2353e2e9b0e06fe0c8ddc867b593a42d132b6 | diff --git a/gns3server/compute/qemu/qemu_vm.py b/gns3server/compute/qemu/qemu_vm.py
index <HASH>..<HASH> 100644
--- a/gns3server/compute/qemu/qemu_vm.py
+++ b/gns3server/compute/qemu/qemu_vm.py
@@ -1304,6 +1304,14 @@ class QemuVM(BaseNode):
else:
return []
+ def _spice_options(self):
+
+ if self._console:
+ return ["-spice",
+ "addr={},port={},disable-ticketing".format(self._manager.port_manager.console_host, self._console)]
+ else:
+ return []
+
def _monitor_options(self):
if self._monitor:
@@ -1563,6 +1571,8 @@ class QemuVM(BaseNode):
command.extend(self._serial_options())
elif self._console_type == "vnc":
command.extend(self._vnc_options())
+ elif self._console_type == "spice":
+ command.extend(self._spice_options())
else:
raise QemuError("Console type {} is unknown".format(self._console_type))
command.extend(self._monitor_options())
diff --git a/gns3server/schemas/qemu.py b/gns3server/schemas/qemu.py
index <HASH>..<HASH> 100644
--- a/gns3server/schemas/qemu.py
+++ b/gns3server/schemas/qemu.py
@@ -63,7 +63,7 @@ QEMU_CREATE_SCHEMA = {
},
"console_type": {
"description": "Console type",
- "enum": ["telnet", "vnc"]
+ "enum": ["telnet", "vnc", "spice"]
},
"hda_disk_image": {
"description": "QEMU hda disk image path",
@@ -244,7 +244,7 @@ QEMU_UPDATE_SCHEMA = {
},
"console_type": {
"description": "Console type",
- "enum": ["telnet", "vnc"]
+ "enum": ["telnet", "vnc", "spice"]
},
"linked_clone": {
"description": "Whether the VM is a linked clone or not",
@@ -541,7 +541,7 @@ QEMU_OBJECT_SCHEMA = {
},
"console_type": {
"description": "Console type",
- "enum": ["telnet", "vnc"]
+ "enum": ["telnet", "vnc", "spice"]
},
"initrd": {
"description": "QEMU initrd path",
diff --git a/tests/compute/qemu/test_qemu_vm.py b/tests/compute/qemu/test_qemu_vm.py
index <HASH>..<HASH> 100644
--- a/tests/compute/qemu/test_qemu_vm.py
+++ b/tests/compute/qemu/test_qemu_vm.py
@@ -366,6 +366,13 @@ def test_vnc_option(vm, tmpdir, loop, fake_qemu_img_binary):
assert '-vnc 127.0.0.1:5' in ' '.join(options)
+def test_spice_option(vm, tmpdir, loop, fake_qemu_img_binary):
+ vm._console_type = 'spice'
+ vm._console = 5905
+ options = loop.run_until_complete(asyncio.async(vm._build_command()))
+ assert '-spice addr=127.0.0.1,port=5905,disable-ticketing' in ' '.join(options)
+
+
def test_disk_options_multiple_disk(vm, tmpdir, loop, fake_qemu_img_binary):
vm._hda_disk_image = str(tmpdir / "test0.qcow2") | QEMU - SPICE support at build options and schemas | GNS3_gns3-server | train |
78c31358becdfaabf1bdf1d383faa3e45a34e561 | diff --git a/PySimpleGUI.py b/PySimpleGUI.py
index <HASH>..<HASH> 100644
--- a/PySimpleGUI.py
+++ b/PySimpleGUI.py
@@ -2013,7 +2013,7 @@ class Window:
'''
Display a user defined for and return the filled in data
'''
- def __init__(self, title, default_element_size=DEFAULT_ELEMENT_SIZE, default_button_element_size = (None, None), auto_size_text=None, auto_size_buttons=None, location=(None, None), button_color=None, font=None, progress_bar_color=(None, None), background_color=None, is_tabbed_form=False, border_depth=None, auto_close=False, auto_close_duration=DEFAULT_AUTOCLOSE_TIME, icon=DEFAULT_WINDOW_ICON, return_keyboard_events=False, use_default_focus=True, text_justification=None, no_titlebar=False, grab_anywhere=False, keep_on_top=False):
+ def __init__(self, title, default_element_size=DEFAULT_ELEMENT_SIZE, default_button_element_size = (None, None), auto_size_text=None, auto_size_buttons=None, location=(None, None), button_color=None, font=None, progress_bar_color=(None, None), background_color=None, is_tabbed_form=False, border_depth=None, auto_close=False, auto_close_duration=DEFAULT_AUTOCLOSE_TIME, icon=DEFAULT_WINDOW_ICON, force_toplevel = False, return_keyboard_events=False, use_default_focus=True, text_justification=None, no_titlebar=False, grab_anywhere=False, keep_on_top=False):
self.AutoSizeText = auto_size_text if auto_size_text is not None else DEFAULT_AUTOSIZE_TEXT
self.AutoSizeButtons = auto_size_buttons if auto_size_buttons is not None else DEFAULT_AUTOSIZE_BUTTONS
self.Title = title
@@ -2054,6 +2054,7 @@ class Window:
self.NoTitleBar = no_titlebar
self.GrabAnywhere = grab_anywhere
self.KeepOnTop = keep_on_top
+ self.ForceTopLevel = force_toplevel
# ------------------------- Add ONE Row to Form ------------------------- #
def AddRow(self, *args):
@@ -3471,8 +3472,13 @@ def StartupTK(my_flex_form):
global _my_windows
ow = _my_windows.NumOpenWindows
+
# print('Starting TK open Windows = {}'.format(ow))
- root = tk.Tk() if not ow else tk.Toplevel()
+ if not ow and not my_flex_form.ForceTopLevel:
+ root = tk.Tk()
+ else:
+ root = tk.Toplevel()
+
try:
root.attributes('-alpha', 0) # hide window while building it. makes for smoother 'paint'
except: | New Window option - Force top level - forces the window to be a Toplevel tkinter window | PySimpleGUI_PySimpleGUI | train |
66a6f69b6599bd149ab24000291683cd3674c8c4 | diff --git a/tests/test_types.py b/tests/test_types.py
index <HASH>..<HASH> 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -73,6 +73,13 @@ def test_multiple():
assert hug.types.multiple(['value1', 'value2']) == ['value1', 'value2']
+def test_delimited_list():
+ '''Test to ensure Hug's custom delimited list type function works as expected'''
+ assert hug.types.delimited_list(',')('value1,value2') == ['value1', 'value2']
+ assert hug.types.delimited_list(',')(['value1', 'value2']) == ['value1', 'value2']
+ assert hug.types.delimited_list('|-|')('value1|-|value2|-|value3,value4') == ['value1', 'value2', 'value3,value4']
+
+
def test_comma_separated_list():
'''Tests that Hug's comma separated type correctly converts into a Python list'''
assert hug.types.comma_separated_list('value') == ['value'] | Add tests for desired support of custom delimited lists | hugapi_hug | train |
a0ccd949c2dd793814a835c063cefee961fee941 | diff --git a/resource.go b/resource.go
index <HASH>..<HASH> 100644
--- a/resource.go
+++ b/resource.go
@@ -58,9 +58,7 @@ type Resource struct {
func (r *Resource) InitializeObject(obj interface{}) {
// initialize model
- if model, ok := obj.(Model); ok {
- Init(model)
- }
+ Init(obj.(Model))
}
func (r *Resource) FindAll(req api2go.Request) (api2go.Responder, error) {
@@ -121,9 +119,7 @@ func (r *Resource) FindAll(req api2go.Request) (api2go.Responder, error) {
// initialize each model
s := reflect.ValueOf(slice)
for i := 0; i < s.Len(); i++ {
- if model, ok := s.Index(i).Interface().(Model); ok {
- Init(model)
- }
+ Init(s.Index(i).Interface().(Model))
}
return &response{Data: slice}, nil
@@ -157,10 +153,7 @@ func (r *Resource) FindOne(id string, req api2go.Request) (api2go.Responder, err
}
// get model
- model, ok := obj.(Model)
- if !ok {
- return nil, api2go.NewHTTPError(nil, "invalid input given", http.StatusBadRequest)
- }
+ model := obj.(Model)
// initialize model
Init(model)
@@ -181,10 +174,7 @@ func (r *Resource) Create(obj interface{}, req api2go.Request) (api2go.Responder
}
// get model
- model, ok := obj.(Model)
- if !ok {
- return nil, api2go.NewHTTPError(nil, "invalid input given", http.StatusBadRequest)
- }
+ model := obj.(Model)
// validate model
err := model.Validate(true)
@@ -224,10 +214,7 @@ func (r *Resource) Update(obj interface{}, req api2go.Request) (api2go.Responder
}
// get model
- model, ok := obj.(Model)
- if !ok {
- return nil, api2go.NewHTTPError(nil, "invalid input given", http.StatusBadRequest)
- }
+ model := obj.(Model)
// validate model
err := model.Validate(false) | objects have to implement Model except something is badly wrong | 256dpi_fire | train |
e882fdd63be9e8e73f970bae72281b7711623ac9 | diff --git a/docs.js b/docs.js
index <HASH>..<HASH> 100644
--- a/docs.js
+++ b/docs.js
@@ -59,10 +59,15 @@ function makeAppDocs(name, env, cb) {
if (!util.exists(source)) {
cb(util.errorWithUsage(5, 'Not an application directory'), usage);
+ return;
+ }
- } else {
- makeDocs(name, source, env, cb);
+ if (!name) {
+ name = env.app.name;
+ log.info('No name specified, using "%s" from package.json', name);
}
+
+ makeDocs(name, source, env, cb);
}
function makeMojitDocs(name, env, cb) {
@@ -70,7 +75,7 @@ function makeMojitDocs(name, env, cb) {
err;
if (!name) {
- err = 'Please specify mojit name';
+ err = 'Please specify mojit name or path.';
} else if (!source) {
err = 'Cannot find mojit ' + name;
@@ -80,6 +85,7 @@ function makeMojitDocs(name, env, cb) {
cb(util.errorWithUsage(5, err, usage));
} else {
+ env.opts.directory = resolve(env.opts.directory, source);
makeDocs(name, source, env, cb);
}
}
@@ -91,12 +97,13 @@ function makeMojitoDocs(name, env, cb) {
cb(util.error(7, 'Cannot find the Mojito library'));
} else {
+ env.opts.directory = resolve(env.opts.directory, 'mojito');
makeDocs(name, env.mojito.path, env, cb);
}
}
function main(env, cb) {
- var type = (env.args.shift() || '').toLowerCase(),
+ var type = (env.args.shift() || 'app').toLowerCase(),
name = env.args.shift() || '',
exclude = env.opts.exclude || [];
@@ -137,7 +144,7 @@ module.exports = main;
module.exports.makeDocs = makeDocs;
module.exports.usage = usage = [
- 'Usage: mojito docs <type> [name] [--directory] [--server] [--port]',
+ 'Usage: mojito doc <type> [name] [--directory] [--server] [--port]',
' <type> "mojito", "app" or "mojit", required',
' [name] name for docs, required for type "mojit"',
'',
@@ -145,7 +152,10 @@ module.exports.usage = usage = [
' mojito docs app foo',
' (creates directory "artifacts/docs/app/foo" containing that apps\'s docs)',
'',
- ' mojito docs mojit Bar --directory ~/mydocs',
+ ' mojito doc',
+ ' (same as above, uses name in package.json for app name)',
+ '',
+ ' mojito doc mojit Bar --directory ~/mydocs',
' (creates directory ~/mydocs/mojits/Bar containing docs for mojit Bar)',
'',
'Options:',
diff --git a/tests/docs.js b/tests/docs.js
index <HASH>..<HASH> 100644
--- a/tests/docs.js
+++ b/tests/docs.js
@@ -12,10 +12,10 @@ function getEnv(args, opts) {
};
}
-test('mojito docs (no args)', function(t) {
+test('mojito docs (no args) // now same as `mojito doc app`', function(t) {
function cb(err, msg) {
- t.ok(err.message.match(/Unknown type/));
+ t.ok(err.message.match(/Not an application dir/));
}
t.plan(1); | fix yahoo/mojito-cli#<I> have `mojito doc` assume the common case
invoking `mojito doc` without further arguments in an application
directory assumes `mojito doc app <name>`, and uses the name in
the app's package.json for <name>. | YahooArchive_mojito-cli-doc | train |
5dbe02964c8fb9d717f15f7d12e550497d083e51 | diff --git a/guice/common/src/main/java/com/peterphi/std/guice/common/daemon/GuiceDaemonRegistry.java b/guice/common/src/main/java/com/peterphi/std/guice/common/daemon/GuiceDaemonRegistry.java
index <HASH>..<HASH> 100644
--- a/guice/common/src/main/java/com/peterphi/std/guice/common/daemon/GuiceDaemonRegistry.java
+++ b/guice/common/src/main/java/com/peterphi/std/guice/common/daemon/GuiceDaemonRegistry.java
@@ -2,6 +2,7 @@ package com.peterphi.std.guice.common.daemon;
import com.google.inject.Singleton;
+import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -29,6 +30,7 @@ public final class GuiceDaemonRegistry
{
return daemons.stream()
.filter(d -> !(d instanceof GuiceRecurringDaemon))
+ .sorted(Comparator.comparing(GuiceDaemon::getName))
.collect(Collectors.toList());
}
@@ -43,6 +45,7 @@ public final class GuiceDaemonRegistry
return daemons.stream()
.filter(d -> d instanceof GuiceRecurringDaemon)
.map(d -> (GuiceRecurringDaemon) d)
+ .sorted(Comparator.comparing(GuiceDaemon::getName))
.collect(Collectors.toList());
}
} | When returning daemons, sort by their name | petergeneric_stdlib | train |
50829db4ef60e59a80de79704bf91e5a3afa12f0 | diff --git a/girder/api/v1/user.py b/girder/api/v1/user.py
index <HASH>..<HASH> 100644
--- a/girder/api/v1/user.py
+++ b/girder/api/v1/user.py
@@ -212,15 +212,15 @@ class User(Resource):
email=params['email'], firstName=params['firstName'],
lastName=params['lastName'], admin=admin)
+ outputUser = self.model('user').filter(user, user)
if not currentUser and self.model('user').canLogin(user):
setattr(cherrypy.request, 'girderUser', user)
token = self.sendAuthTokenCookie(user)
- user['authToken'] = {
+ outputUser['authToken'] = {
'token': token['_id'],
'expires': token['expires']
}
-
- return user
+ return outputUser
@access.user
@loadmodel(map={'id': 'userToDelete'}, model='user', level=AccessType.ADMIN)
@@ -276,7 +276,8 @@ class User(Resource):
else:
raise AccessException('Only admins may change status.')
- return self.model('user').save(user)
+ user = self.model('user').save(user)
+ return self.model('user').filter(user, user)
@access.admin
@loadmodel(model='user', level=AccessType.ADMIN)
@@ -456,13 +457,13 @@ class User(Resource):
user['emailVerified'] = True
self.model('token').remove(token)
- self.model('user').save(user)
+ user = self.model('user').save(user)
if self.model('user').canLogin(user):
setattr(cherrypy.request, 'girderUser', user)
authToken = self.sendAuthTokenCookie(user)
return {
- 'user': user,
+ 'user': self.model('user').filter(user, user),
'authToken': {
'token': authToken['_id'],
'expires': authToken['expires'],
@@ -472,7 +473,7 @@ class User(Resource):
}
else:
return {
- 'user': user,
+ 'user': self.model('user').filter(user, user),
'message': 'Email verification succeeded.'
} | Perform filtering of all returned user documents in the user API
Models returned through the API should almost always be filtered. Users
may not necessarily have access to every field in their own user document. | girder_girder | train |
0be208b1eef6b4521a1c50ae20cacfb9a7997ebc | diff --git a/phpunit.xml b/phpunit.xml
index <HASH>..<HASH> 100644
--- a/phpunit.xml
+++ b/phpunit.xml
@@ -9,7 +9,4 @@
<directory suffix=".php">./src/</directory>
</whitelist>
</filter>
- <listeners>
- <listener class="\Mockery\Adapter\Phpunit\TestListener"></listener>
- </listeners>
</phpunit>
diff --git a/tests/unit/src/Handler/LogHandlerTest.php b/tests/unit/src/Handler/LogHandlerTest.php
index <HASH>..<HASH> 100644
--- a/tests/unit/src/Handler/LogHandlerTest.php
+++ b/tests/unit/src/Handler/LogHandlerTest.php
@@ -89,4 +89,9 @@ class LogHandlerTest extends PHPUnit_Framework_TestCase {
$this->fail($e->getMessage());
}
}
+
+ protected function tearDown()
+ {
+ Mockery::close();
+ }
}
\ No newline at end of file
diff --git a/tests/unit/src/Handler/RavenHandlerTest.php b/tests/unit/src/Handler/RavenHandlerTest.php
index <HASH>..<HASH> 100644
--- a/tests/unit/src/Handler/RavenHandlerTest.php
+++ b/tests/unit/src/Handler/RavenHandlerTest.php
@@ -35,4 +35,9 @@ class RavenHandlerTest extends PHPUnit_Framework_TestCase
$handler->handle(new ErrorException('test', 0, E_NOTICE));
}
+
+ protected function tearDown()
+ {
+ Mockery::close();
+ }
}
diff --git a/tests/unit/src/RunnerTest.php b/tests/unit/src/RunnerTest.php
index <HASH>..<HASH> 100644
--- a/tests/unit/src/RunnerTest.php
+++ b/tests/unit/src/RunnerTest.php
@@ -254,4 +254,9 @@ class RunnerTest extends \PHPUnit_Framework_TestCase {
RunnerExt::$LAST_ERROR = E_ERROR;
}
+
+ protected function tearDown()
+ {
+ Mockery::close();
+ }
}
\ No newline at end of file
diff --git a/tests/unit/src/Util/FrameTest.php b/tests/unit/src/Util/FrameTest.php
index <HASH>..<HASH> 100644
--- a/tests/unit/src/Util/FrameTest.php
+++ b/tests/unit/src/Util/FrameTest.php
@@ -107,6 +107,4 @@ class FrameTest extends PHPUnit_Framework_TestCase {
$unserializedObj = unserialize($serialized);
$this->assertTrue(($this->frame == $unserializedObj));
}
-
-
}
\ No newline at end of file
diff --git a/tests/unit/src/Util/InspectorTest.php b/tests/unit/src/Util/InspectorTest.php
index <HASH>..<HASH> 100644
--- a/tests/unit/src/Util/InspectorTest.php
+++ b/tests/unit/src/Util/InspectorTest.php
@@ -34,4 +34,5 @@ class InspectorText extends PHPUnit_Framework_TestCase
$this->assertEquals(count($this->exception->getTrace()), count($frames));
$this->assertTrue($this->inspector->hasFrames());
}
+
}
\ No newline at end of file | Fixing the tests to remove the Mockery listener. | thephpleague_booboo | train |
02298b55394963f1766cc25aa98e35733afec20f | diff --git a/node/agent.go b/node/agent.go
index <HASH>..<HASH> 100644
--- a/node/agent.go
+++ b/node/agent.go
@@ -557,13 +557,14 @@ func configureContainer(a *HostAgent, client *ControlClient,
}
cfg.Volumes = make(map[string]struct{})
- hcfg.Binds = []string{}
+ bindsMap := make(map[string]string) // map to prevent duplicate path assignments. Use to populate hcfg.Binds later.
if err := injectContext(svc, serviceState, client); err != nil {
glog.Errorf("Error injecting context: %s", err)
return nil, nil, err
}
+ // iterate svc.Volumes - create bindings for non-dfs volumes
for _, volume := range svc.Volumes {
if volume.Type != "" && volume.Type != "dfs" {
continue
@@ -574,28 +575,34 @@ func configureContainer(a *HostAgent, client *ControlClient,
return nil, nil, err
}
- binding := fmt.Sprintf("%s:%s", resourcePath, volume.ContainerPath)
- cfg.Volumes[strings.Split(binding, ":")[1]] = struct{}{}
- hcfg.Binds = append(hcfg.Binds, strings.TrimSpace(binding))
+ resourcePath = strings.TrimSpace(resourcePath)
+ containerPath := strings.TrimSpace(volume.ContainerPath)
+ cfg.Volumes[containerPath] = struct {}{}
+ bindsMap[containerPath] = resourcePath
}
+
+ // mount serviced path
dir, binary, err := ExecPath()
if err != nil {
glog.Errorf("Error getting exec path: %v", err)
return nil, nil, err
}
- volumeBinding := fmt.Sprintf("%s:/serviced", dir)
- cfg.Volumes[strings.Split(volumeBinding, ":")[1]] = struct{}{}
- hcfg.Binds = append(hcfg.Binds, strings.TrimSpace(volumeBinding))
+
+ resourcePath := strings.TrimSpace(dir)
+ containerPath := strings.TrimSpace("/serviced")
+ cfg.Volumes[containerPath] = struct{}{}
+ bindsMap[containerPath] = resourcePath
// bind mount everything we need for logstash-forwarder
if len(svc.LogConfigs) != 0 {
const LOGSTASH_CONTAINER_DIRECTORY = "/usr/local/serviced/resources/logstash"
logstashPath := utils.ResourcesDir() + "/logstash"
- binding := fmt.Sprintf("%s:%s", logstashPath, LOGSTASH_CONTAINER_DIRECTORY)
- cfg.Volumes[LOGSTASH_CONTAINER_DIRECTORY] = struct{}{}
- hcfg.Binds = append(hcfg.Binds, binding)
- glog.V(1).Infof("added logstash bind mount: %s", binding)
+ resourcePath := strings.TrimSpace(logstashPath)
+ containerPath := strings.TrimSpace(LOGSTASH_CONTAINER_DIRECTORY)
+ cfg.Volumes[containerPath] = struct{}{}
+ bindsMap[containerPath] = resourcePath
+ glog.V(1).Infof("added logstash bind mount: %s", fmt.Sprintf("%s:%s", resourcePath, containerPath))
}
// specify temporary volume paths for docker to create
@@ -652,15 +659,24 @@ func configureContainer(a *HostAgent, client *ControlClient,
}
if matchedRequestedImage {
- binding := fmt.Sprintf("%s:%s", hostPath, containerPath)
- cfg.Volumes[strings.Split(binding, ":")[1]] = struct{}{}
- hcfg.Binds = append(hcfg.Binds, strings.TrimSpace(binding))
+ hostPath = strings.TrimSpace(hostPath)
+ containerPath = strings.TrimSpace(containerPath)
+ cfg.Volumes[containerPath] = struct{}{}
+ bindsMap[containerPath] = hostPath
}
} else {
glog.Warningf("Could not bind mount the following: %s", bindMountString)
}
}
+ // transfer bindsMap to hcfg.Binds
+ hcfg.Binds = []string{}
+ for containerPath, hostPath := range(bindsMap) {
+ binding := fmt.Sprintf("%s:%s", hostPath, containerPath)
+ hcfg.Binds = append(hcfg.Binds, binding)
+ }
+
+
// Get host IP
ips, err := utils.GetIPv4Addresses()
if err != nil { | CC-<I>: de-dupe paths for docker bind mounting | control-center_serviced | train |
a4e48296d2fe896e499739e2903d4d6bc6a9688c | diff --git a/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreAutoConfiguration.java b/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreAutoConfiguration.java
index <HASH>..<HASH> 100644
--- a/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreAutoConfiguration.java
+++ b/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreAutoConfiguration.java
@@ -103,6 +103,13 @@ public class CrnkCoreAutoConfiguration implements ApplicationContextAware {
}
}
+ if (properties.getEnforceDotSeparator() != null) {
+ QuerySpecUrlMapper urlMapper = boot.getUrlMapper();
+ if (urlMapper instanceof DefaultQuerySpecUrlMapper) {
+ ((DefaultQuerySpecUrlMapper) urlMapper).setEnforceDotPathSeparator(properties.getEnforceDotSeparator());
+ }
+ }
+
boot.boot();
return boot;
}
diff --git a/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreProperties.java b/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreProperties.java
index <HASH>..<HASH> 100644
--- a/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreProperties.java
+++ b/crnk-setup/crnk-setup-spring-boot2/src/main/java/io/crnk/spring/setup/boot/core/CrnkCoreProperties.java
@@ -50,6 +50,11 @@ public class CrnkCoreProperties {
*/
private Boolean return404OnNull;
+ /**
+ * Disabled by default to maintain compatibility, but recommended to enable.
+ */
+ private Boolean enforceDotSeparator;
+
public String getResourcePackage() {
return resourcePackage;
}
@@ -62,6 +67,14 @@ public class CrnkCoreProperties {
this.enabled = enabled;
}
+ public Boolean getEnforceDotSeparator() {
+ return enforceDotSeparator;
+ }
+
+ public void setEnforceDotSeparator(Boolean enforceDotSeparator) {
+ this.enforceDotSeparator = enforceDotSeparator;
+ }
+
/**
* @deprecated use dependency injection
*/ | support setting dot separator handling in spring boot #<I> | crnk-project_crnk-framework | train |
7891afcec82de620ec063d5f44fbd8b860e27edd | diff --git a/examples/basic_voice.py b/examples/basic_voice.py
index <HASH>..<HASH> 100644
--- a/examples/basic_voice.py
+++ b/examples/basic_voice.py
@@ -24,7 +24,6 @@ ytdl_format_options = {
}
ffmpeg_options = {
- 'before_options': '-nostdin',
'options': '-vn'
} | Remove deprecated ffmpeg/avconv flag from basic_voice example | Rapptz_discord.py | train |
1a34bc544e529957198777c68722baadaa215fba | diff --git a/pkg/command/map_string_test.go b/pkg/command/map_string_test.go
index <HASH>..<HASH> 100644
--- a/pkg/command/map_string_test.go
+++ b/pkg/command/map_string_test.go
@@ -39,6 +39,15 @@ func TestGetStringMapString(t *testing.T) {
wantErr: assert.NoError,
},
{
+ name: "valid empty json",
+ args: args{
+ key: "FOO_BAR",
+ value: "{}",
+ },
+ want: map[string]string{},
+ wantErr: assert.NoError,
+ },
+ {
name: "invalid json format with extra comma at the end",
args: args{
key: "FOO_BAR", | command: Add additional string map unit test case
This test already passes, but it's a good corner case to test to avoid
running into regressions. | cilium_cilium | train |
fa3fa2db5a4f7eb6f0b98d1608ee8ee357900fe9 | diff --git a/FeatureContext.php b/FeatureContext.php
index <HASH>..<HASH> 100644
--- a/FeatureContext.php
+++ b/FeatureContext.php
@@ -406,6 +406,9 @@ class FeatureContext extends BehatContext {
$process = new Process("drush @{$this->drushAlias} \"{$role}\" {$name}");
$process->setTimeout(3600);
$process->run();
+ if (!$process->isSuccessful()) {
+ throw new RuntimeException($process->getErrorOutput());
+ }
}
// Login. | Throw an exception if the drush user-add-role command fails. | jhedstrom_DrupalDriver | train |
eb135801f2db36583ce6697e9fd1305c12843a46 | diff --git a/src/utilities/findBreakingChanges.js b/src/utilities/findBreakingChanges.js
index <HASH>..<HASH> 100644
--- a/src/utilities/findBreakingChanges.js
+++ b/src/utilities/findBreakingChanges.js
@@ -44,7 +44,7 @@ export const BreakingChangeType = {
export const DangerousChangeType = {
ARG_DEFAULT_VALUE_CHANGE: 'ARG_DEFAULT_VALUE_CHANGE',
VALUE_ADDED_TO_ENUM: 'VALUE_ADDED_TO_ENUM',
- INTERFACE_ADDED_TO_OBJECT: 'INTERFACE_ADDED_TO_OBJECT'
+ INTERFACE_ADDED_TO_OBJECT: 'INTERFACE_ADDED_TO_OBJECT',
TYPE_ADDED_TO_UNION: 'TYPE_ADDED_TO_UNION',
};
@@ -88,8 +88,8 @@ export function findDangerousChanges(
return [
...findArgChanges(oldSchema, newSchema).dangerousChanges,
...findValuesAddedToEnums(oldSchema, newSchema),
- ...findInterfacesAddedToObjectTypes(oldSchema, newSchema)
- ...findTypesAddedToUnions(oldSchema, newSchema)
+ ...findInterfacesAddedToObjectTypes(oldSchema, newSchema),
+ ...findTypesAddedToUnions(oldSchema, newSchema),
];
} | Trailing commas missed in conflict resolution | graphql_graphql-js | train |
72e0bd5f21a508df10dd693956c9dfda186162f7 | diff --git a/lib/xcode-installer/install.rb b/lib/xcode-installer/install.rb
index <HASH>..<HASH> 100644
--- a/lib/xcode-installer/install.rb
+++ b/lib/xcode-installer/install.rb
@@ -19,6 +19,8 @@ module XcodeInstaller
mgr = XcodeInstaller::ReleaseManager.new
@release = mgr.get_release(options.release, options.pre_release)
+ puts file_name()
+
files = Dir.glob('*.dmg')
if files.length == 0
puts 'No .dmg files found in current directory. Run the download command first.'
@@ -45,5 +47,9 @@ module XcodeInstaller
system 'hdiutil detach -quiet #{mountpoint}'
end
+ def file_name
+ return File.basename(@release['download_url'])
+ end
+
end
end | Create file_name method to extract the .dmg file name off the end of the download_url | phatblat_xcode-installer | train |
706f6d647ac56e6d105d2fadf0358dbbc6544158 | diff --git a/core/Archive.php b/core/Archive.php
index <HASH>..<HASH> 100644
--- a/core/Archive.php
+++ b/core/Archive.php
@@ -156,6 +156,34 @@ abstract class Piwik_Archive
'sum_daily_nb_uniq_visitors' => Piwik_Archive::INDEX_SUM_DAILY_NB_UNIQ_VISITORS,
);
+ /**
+ * Metrics calculated and archived by the Actions plugin.
+ *
+ * @var array
+ */
+ public static $actionsMetrics = array(
+ 'nb_pageviews',
+ 'nb_uniq_pageviews',
+ 'nb_downloads',
+ 'nb_uniq_downloads',
+ 'nb_outlinks',
+ 'nb_uniq_outlinks',
+ 'nb_searches',
+ 'nb_keywords',
+ 'nb_hits',
+ 'sum_time_spent',
+ 'exit_nb_uniq_visitors',
+ 'exit_nb_visits',
+ 'sum_daily_exit_nb_uniq_visitors',
+ 'entry_nb_uniq_visitors',
+ 'sum_daily_entry_nb_uniq_visitors',
+ 'entry_nb_visits',
+ 'entry_nb_actions',
+ 'entry_sum_visit_length',
+ 'entry_bounce_count',
+ 'nb_hits_following_search',
+ );
+
const LABEL_ECOMMERCE_CART = 'ecommerceAbandonedCart';
const LABEL_ECOMMERCE_ORDER = 'ecommerceOrder';
diff --git a/core/Archive/Single.php b/core/Archive/Single.php
index <HASH>..<HASH> 100644
--- a/core/Archive/Single.php
+++ b/core/Archive/Single.php
@@ -593,7 +593,13 @@ class Piwik_Archive_Single extends Piwik_Archive
{
return 'Goals_Metrics';
}
- return $metric;
+ // Actions metrics are processed by the Actions plugin (HACK) (3RD HACK IN FACT) (YES, THIS IS TOO MUCH HACKING)
+ // (FIXME PLEASE).
+ if (in_array($metric, Piwik_Archive::$actionsMetrics))
+ {
+ return 'Actions_Metrics';
+ }
+ return $metric;
}
/**
diff --git a/core/ArchiveProcessing.php b/core/ArchiveProcessing.php
index <HASH>..<HASH> 100644
--- a/core/ArchiveProcessing.php
+++ b/core/ArchiveProcessing.php
@@ -662,7 +662,8 @@ abstract class Piwik_ArchiveProcessing
if(empty($plugin)
|| !Piwik_PluginsManager::getInstance()->isPluginActivated($plugin))
{
- throw new Exception("Error: The report '$requestedReport' was requested but it is not available at this stage. You may also disable the related plugin to avoid this error.");
+ $pluginStr = empty($plugin) ? '' : "($plugin)";
+ throw new Exception("Error: The report '$requestedReport' was requested but it is not available at this stage. You may also disable the related plugin $pluginStr to avoid this error.");
}
return $plugin;
}
diff --git a/plugins/VisitsSummary/Controller.php b/plugins/VisitsSummary/Controller.php
index <HASH>..<HASH> 100644
--- a/plugins/VisitsSummary/Controller.php
+++ b/plugins/VisitsSummary/Controller.php
@@ -95,7 +95,8 @@ class Piwik_VisitsSummary_Controller extends Piwik_Controller
// by a method that already calls the API with some generic filters applied
"&disable_generic_filters=1";
$request = new Piwik_API_Request($requestString);
- return $request->process();
+ $result = $request->process();
+ return empty($result) ? new Piwik_DataTable() : $result;
}
static public function getVisits() | Fixes #<I>, make sure Actions metrics are associated w/ an Actions report in Piwik_Archive_Single::getRequestedReportFor and make sure Piwik_VisitsSummary_Controller::getVisitsSummary can handle case where Piwik_API_Request returns false.
git-svn-id: <URL> | matomo-org_matomo | train |
e8f82814cea18002cc71b95b00b0181129c9f82f | diff --git a/features/repo-updates.go b/features/repo-updates.go
index <HASH>..<HASH> 100644
--- a/features/repo-updates.go
+++ b/features/repo-updates.go
@@ -155,15 +155,28 @@ func (o *RepoInfo) Initialize(config *github.Config) error {
if !finfo.IsDir() {
return fmt.Errorf("--repo-dir is not a directory")
}
+
+ // check if the cloned dir already exists, if yes, cleanup.
+ if _, err := os.Stat(o.projectDir); !os.IsNotExist(err) {
+ if err := o.cleanUp(o.projectDir); err != nil {
+ return fmt.Errorf("Unable to remove old clone directory at %v: %v", o.projectDir, err)
+ }
+ }
+
if cloneUrl, err := o.cloneRepo(); err != nil {
return fmt.Errorf("Unable to clone %v: %v", cloneUrl, err)
}
return o.updateRepoUsers()
}
+func (o *RepoInfo) cleanUp(path string) error {
+ err := os.RemoveAll(path)
+ return err
+}
+
func (o *RepoInfo) cloneRepo() (string, error) {
cloneUrl := fmt.Sprintf("https://github.com/%s/%s.git", o.config.Org, o.config.Project)
- _, err := o.gitCommandDir([]string{"clone", cloneUrl}, o.baseDir)
+ _, err := o.gitCommandDir([]string{"clone", cloneUrl, o.projectDir}, o.baseDir)
return cloneUrl, err
} | Fixed crashing submit-queue when container restarts.
While emptyDir presents an empty directory when a pod is killed and
recreated by the deployment, it retains contents between container
restarts. This change removes the existing directory before trying to
clone again. | kubernetes_test-infra | train |
96a549271f3cd192c49c6c9d05529c4d88bb4cdf | diff --git a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/AuthType.java b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/AuthType.java
index <HASH>..<HASH> 100644
--- a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/AuthType.java
+++ b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/AuthType.java
@@ -25,7 +25,7 @@ public enum AuthType {
this.value = value;
}
- public static String getAuthType(String input) throws RuntimeException {
+ public static String getAuthType(String input) {
if (isBlank(input)) {
return NONE.getValue();
}
diff --git a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/BucketType.java b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/BucketType.java
index <HASH>..<HASH> 100644
--- a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/BucketType.java
+++ b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/BucketType.java
@@ -25,7 +25,7 @@ public enum BucketType {
this.value = value;
}
- public static String getBucketType(String input) throws RuntimeException {
+ public static String getBucketType(String input) {
if (isBlank(input)) {
return MEMCACHED.getValue();
}
diff --git a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/ConflictResolutionType.java b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/ConflictResolutionType.java
index <HASH>..<HASH> 100644
--- a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/ConflictResolutionType.java
+++ b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/ConflictResolutionType.java
@@ -25,7 +25,7 @@ public enum ConflictResolutionType {
this.value = value;
}
- public static String getConflictResolutionType(String input) throws RuntimeException {
+ public static String getConflictResolutionType(String input) {
if (isBlank(input)) {
return SEQNO.getValue();
}
diff --git a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/EvictionPolicy.java b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/EvictionPolicy.java
index <HASH>..<HASH> 100644
--- a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/EvictionPolicy.java
+++ b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/EvictionPolicy.java
@@ -25,7 +25,7 @@ public enum EvictionPolicy {
this.value = value;
}
- public static String getEvictionPolicy(String input) throws RuntimeException {
+ public static String getEvictionPolicy(String input) {
if (isBlank(input)) {
return VALUE_ONLY.getValue();
}
diff --git a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/UriSuffix.java b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/UriSuffix.java
index <HASH>..<HASH> 100644
--- a/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/UriSuffix.java
+++ b/cs-couchbase/src/main/java/io/cloudslang/content/couchbase/entities/couchbase/UriSuffix.java
@@ -26,7 +26,7 @@ public enum UriSuffix {
this.value = value;
}
- public static String getUriSuffix(String input) throws RuntimeException {
+ public static String getUriSuffix(String input) {
for (UriSuffix uriSuffix : UriSuffix.values()) {
if (uriSuffix.getKey().equalsIgnoreCase(input)) {
return uriSuffix.getValue();
@@ -39,6 +39,7 @@ public enum UriSuffix {
private String getKey() {
return key;
}
+
private String getValue() {
return value;
} | Couchbase - Create or Edit Bucket - handled review comments. | CloudSlang_cs-actions | train |
63ed3f70a409700c26eecc84a57b4658ae18fe6f | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -235,7 +235,7 @@ var $wrap = {
if ($isDBNull(txt)) {
return 'null';
}
- if(typeof(txt) !== 'string'){
+ if (typeof(txt) !== 'string') {
txt = txt.toString();
}
// replacing each single-quote symbol with two, and then
@@ -294,7 +294,9 @@ function $formatQuery(query, values) {
if (Array.isArray(values)) {
for (var i = 0; i < values.length; i++) {
var variable = '$' + (i + 1);
- if (q.indexOf(variable) === -1) {
+ // variable name must exist and not be followed by a digit;
+ var pattern = '\\' + variable + '(?!\\d)';
+ if (q.search(pattern) == -1) {
result.success = false;
result.error = "More values passed in array than variables in the query.";
break;
@@ -307,14 +309,15 @@ function $formatQuery(query, values) {
result.error = "Cannot convert parameter with index " + i;
break;
} else {
- var reg = new RegExp("\\" + variable, "g");
+ var reg = new RegExp(pattern, 'g');
q = q.replace(reg, value);
}
}
}
} else {
if (values !== undefined) {
- if (q.indexOf('$1') === -1) {
+ // variable name must exist and not be followed by a digit;
+ if (q.search(/\$1(?!\d)/) == -1) {
// a single value was passed, but variable $1 doesn't exist;
result.success = false;
result.error = "No variable found in the query to replace with the passed value.";
@@ -326,7 +329,7 @@ function $formatQuery(query, values) {
result.success = false;
result.error = "Cannot convert type '" + typeof(values) + "' into a query variable value.";
} else {
- q = q.replace(/\$1/g, value);
+ q = q.replace(/\$1(?!\d)/g, value);
}
}
}
@@ -491,7 +494,7 @@ function $extendProtocol(obj, cn, db, options) {
// connection attachment helper;
function attach(obj, int) {
if (txDB.client) {
- throw new Error('Invalid transaction attachment'); // this should never happen;
+ throw new Error("Invalid transaction attachment."); // this should never happen;
}
txDB.client = obj.client;
txDB.done = obj.done;
@@ -504,7 +507,7 @@ function $extendProtocol(obj, cn, db, options) {
// connection detachment helper;
function detach() {
if (!txDB.client) {
- throw new Error('Invalid transaction detachment'); // this should never happen;
+ throw new Error("Invalid transaction detachment."); // this should never happen;
}
if (internal) {
// connection was allocated;
diff --git a/test/indexSpec.js b/test/indexSpec.js
index <HASH>..<HASH> 100644
--- a/test/indexSpec.js
+++ b/test/indexSpec.js
@@ -274,5 +274,58 @@ describe("Type conversion in pgp.as", function () {
expect(q.success).toBe(false);
expect(q.error).toBe("Cannot convert parameter with index 1");
+ // test that errors in type conversion are
+ // detected and reported from left to right;
+ q = pgp.as.format("$1,$2", [{}, {}]);
+ expect(q.success).toBe(false);
+ expect(q.error).toBe("Cannot convert parameter with index 0");
+
+ // test that once a conversion issue is encountered,
+ // the rest of parameters are not verified;
+ q = pgp.as.format("$1,$2", [1, {}, 2, 3, 4, 5]);
+ expect(q.success).toBe(false);
+ expect(q.error).toBe("Cannot convert parameter with index 1");
+
+ // testing with lots of variables;
+ var source = "", dest = "", params = [];
+ for(var i = 1;i <= 1000;i ++){
+ source += '$' + i;
+ dest += i;
+ params.push(i);
+ }
+ q = pgp.as.format(source, params);
+ expect(q.success).toBe(true);
+ expect(q.query).toBe(dest);
+
+ // testing various cases with many variables:
+ // - variables next to each other;
+ // - variables not defined;
+ // - variables are repeated;
+ // - long variable names present;
+ q = pgp.as.format("$1$2,$3,$4,$5,$6,$7,$8,$9,$10$11,$12,$13,$14,$15,$1,$3", [1, 2, 'C', 'DDD', 'E', 'F', 'G', 'H', 'I', 88, 99, 'LLL']);
+ expect(q.success).toBe(true);
+ expect(q.query).toBe("12,'C','DDD','E','F','G','H','I',8899,'LLL',$13,$14,$15,1,'C'");
+
+ // test that $1 variable isn't confused with $12;
+ q = pgp.as.format("$12", 123);
+ expect(q.success).toBe(false);
+ expect(q.error).toBe("No variable found in the query to replace with the passed value.");
+
+ // test that $1 variable isn't confused with $112
+ q = pgp.as.format("$112", 123);
+ expect(q.success).toBe(false);
+ expect(q.error).toBe("No variable found in the query to replace with the passed value.");
+
+ // test that variable names are not confused for longer ones;
+ q = pgp.as.format("$11, $1, $111, $1", 123);
+ expect(q.success).toBe(true);
+ expect(q.query).toBe("$11, 123, $111, 123");
+
+ // test that variable names are not confused for longer ones,
+ // even when they are right next to each other;
+ q = pgp.as.format("$11$1$111$1", 123);
+ expect(q.success).toBe(true);
+ expect(q.query).toBe("$11123$111123");
+
});
}); | Fixing issue #9, adding corresponding tests. | vitaly-t_pg-promise | train |
18b1842416fb9614636f376d6cf91335613fab8d | diff --git a/resilience4j-bulkhead/src/main/java/io/github/resilience4j/bulkhead/internal/InMemoryBulkheadRegistry.java b/resilience4j-bulkhead/src/main/java/io/github/resilience4j/bulkhead/internal/InMemoryBulkheadRegistry.java
index <HASH>..<HASH> 100644
--- a/resilience4j-bulkhead/src/main/java/io/github/resilience4j/bulkhead/internal/InMemoryBulkheadRegistry.java
+++ b/resilience4j-bulkhead/src/main/java/io/github/resilience4j/bulkhead/internal/InMemoryBulkheadRegistry.java
@@ -51,7 +51,7 @@ public final class InMemoryBulkheadRegistry extends AbstractRegistry<Bulkhead, B
}
public InMemoryBulkheadRegistry(Map<String, BulkheadConfig> configs) {
- this(configs.getOrDefault(DEFAULT_CONFIG, BulkheadConfig.ofDefaults()), HashMap.empty());
+ this(configs, HashMap.empty());
}
public InMemoryBulkheadRegistry(Map<String, BulkheadConfig> configs, io.vavr.collection.Map<String, String> tags) {
diff --git a/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/internal/InMemoryCircuitBreakerRegistry.java b/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/internal/InMemoryCircuitBreakerRegistry.java
index <HASH>..<HASH> 100644
--- a/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/internal/InMemoryCircuitBreakerRegistry.java
+++ b/resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/internal/InMemoryCircuitBreakerRegistry.java
@@ -51,7 +51,7 @@ public final class InMemoryCircuitBreakerRegistry extends AbstractRegistry<Circu
}
public InMemoryCircuitBreakerRegistry(Map<String, CircuitBreakerConfig> configs) {
- this(configs.getOrDefault(DEFAULT_CONFIG, CircuitBreakerConfig.ofDefaults()), HashMap.empty());
+ this(configs, HashMap.empty());
}
public InMemoryCircuitBreakerRegistry(Map<String, CircuitBreakerConfig> configs, io.vavr.collection.Map<String, String> tags) { | Issue #<I>: Fixed a bug in InMemoryBulkheadRegistry and InMemoryCircuitBreakerRegistry. | resilience4j_resilience4j | train |
e52c67b9f77c18a9a7a6f0f22b04ea0a80ce63ae | diff --git a/js/load-image-meta.js b/js/load-image-meta.js
index <HASH>..<HASH> 100644
--- a/js/load-image-meta.js
+++ b/js/load-image-meta.js
@@ -181,7 +181,7 @@
var originalTransform = loadImage.transform
loadImage.transform = function (img, options, callback, file, data) {
- if (loadImage.hasMetaOption(options)) {
+ if (loadImage.requiresMetaData(options)) {
loadImage.parseMetaData(
file,
function (data) {
diff --git a/js/load-image-orientation.js b/js/load-image-orientation.js
index <HASH>..<HASH> 100644
--- a/js/load-image-orientation.js
+++ b/js/load-image-orientation.js
@@ -44,8 +44,8 @@ Exif orientation values to correctly display the letter F:
'use strict'
var originalTransform = loadImage.transform
- var originalHasCanvasOption = loadImage.hasCanvasOption
- var originalHasMetaOption = loadImage.hasMetaOption
+ var originalRequiresCanvas = loadImage.requiresCanvas
+ var originalRequiresMetaData = loadImage.requiresMetaData
var originalTransformCoordinates = loadImage.transformCoordinates
var originalGetTransformedOptions = loadImage.getTransformedOptions
@@ -95,19 +95,19 @@ Exif orientation values to correctly display the letter F:
}
// Determines if the target image should be a canvas element:
- loadImage.hasCanvasOption = function (options) {
+ loadImage.requiresCanvas = function (options) {
return (
(options.orientation === true && !loadImage.orientation) ||
(options.orientation > 1 && options.orientation < 9) ||
- originalHasCanvasOption.call(loadImage, options)
+ originalRequiresCanvas.call(loadImage, options)
)
}
// Determines if meta data should be loaded automatically:
- loadImage.hasMetaOption = function (options) {
+ loadImage.requiresMetaData = function (options) {
return (
(options && options.orientation === true && !loadImage.orientation) ||
- originalHasMetaOption.call(loadImage, options)
+ originalRequiresMetaData.call(loadImage, options)
)
}
diff --git a/js/load-image-scale.js b/js/load-image-scale.js
index <HASH>..<HASH> 100644
--- a/js/load-image-scale.js
+++ b/js/load-image-scale.js
@@ -109,15 +109,12 @@
}
// Determines if the target image should be a canvas element:
- loadImage.hasCanvasOption = function (options) {
+ loadImage.requiresCanvas = function (options) {
return options.canvas || options.crop || !!options.aspectRatio
}
// Scales and/or crops the given image (img or canvas HTML element)
- // using the given options.
- // Returns a canvas object if the browser supports canvas
- // and the hasCanvasOption method returns true or a canvas
- // object is passed as image, else the scaled image:
+ // using the given options:
loadImage.scale = function (img, options, data) {
// eslint-disable-next-line no-param-reassign
options = options || {}
@@ -125,8 +122,7 @@
data = data || {}
var canvas = document.createElement('canvas')
var useCanvas =
- img.getContext ||
- (loadImage.hasCanvasOption(options) && canvas.getContext)
+ img.getContext || (loadImage.requiresCanvas(options) && canvas.getContext)
var width = img.naturalWidth || img.width
var height = img.naturalHeight || img.height
var destWidth = width
diff --git a/js/load-image.js b/js/load-image.js
index <HASH>..<HASH> 100644
--- a/js/load-image.js
+++ b/js/load-image.js
@@ -54,7 +54,7 @@
return loadImage.onload(img, event, file, url, callback, options)
}
if (typeof file === 'string') {
- if (loadImage.hasMetaOption(options)) {
+ if (loadImage.requiresMetaData(options)) {
loadImage.fetchBlob(file, fetchBlobCallback, options)
} else {
fetchBlobCallback()
@@ -102,7 +102,7 @@
// Determines if meta data should be loaded automatically.
// Requires the load image meta extension to load meta data.
- loadImage.hasMetaOption = function (options) {
+ loadImage.requiresMetaData = function (options) {
return options && options.meta
} | Rename functions for canvas and meta requirements. | blueimp_JavaScript-Load-Image | train |
7226966b6db9ca02dbf4fde6d4c44fc76f42b8e3 | diff --git a/lib/geocoder/lookups/maxmind_local.rb b/lib/geocoder/lookups/maxmind_local.rb
index <HASH>..<HASH> 100644
--- a/lib/geocoder/lookups/maxmind_local.rb
+++ b/lib/geocoder/lookups/maxmind_local.rb
@@ -1,15 +1,19 @@
require 'geocoder/lookups/base'
require 'geocoder/results/maxmind_local'
-begin
- require 'geoip'
-rescue LoadError => e
- raise 'Could not load geoip dependency. To use MaxMind Local lookup you must add geoip gem to your Gemfile or have it installed in your system.'
-end
-
module Geocoder::Lookup
class MaxmindLocal < Base
+ def initialize
+ begin
+ require 'geoip'
+ rescue LoadError => e
+ raise 'Could not load geoip dependency. To use MaxMind Local lookup you must add geoip gem to your Gemfile or have it installed in your system.'
+ end
+
+ super
+ end
+
def name
"MaxMind Local"
end | Move geoip gem check to the initialize function.
Lookups are always loaded even when they aren't used. So the geoip gem was being checked even when user wouldn't like to use the maxmind_local lookup. | alexreisner_geocoder | train |
22aaa6cc41dd373bde65f8cb6de9fcc928e8cebf | diff --git a/src/main/java/org/codehaus/groovy/runtime/GStringUtil.java b/src/main/java/org/codehaus/groovy/runtime/GStringUtil.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/codehaus/groovy/runtime/GStringUtil.java
+++ b/src/main/java/org/codehaus/groovy/runtime/GStringUtil.java
@@ -119,7 +119,9 @@ public final class GStringUtil {
for (String string : ss) {
initialCapacity += string.length();
}
- initialCapacity += vs.length * Math.max(initialCapacity / ss.length, 8);
+ if (ss.length != 0) {
+ initialCapacity += vs.length * Math.max(initialCapacity / ss.length, 8);
+ }
return Math.max((int) (initialCapacity * 1.2), 16);
}
} | minor refactoring: avoid div by zero for empty GString strings | apache_groovy | train |
ce6459d674e9fcef7f8cccb22b8bb6cf7c55e4dd | diff --git a/tests/easy/test_proc.py b/tests/easy/test_proc.py
index <HASH>..<HASH> 100644
--- a/tests/easy/test_proc.py
+++ b/tests/easy/test_proc.py
@@ -2,16 +2,24 @@ from nose.tools import eq_, raises
from pyscreenshot.procutil import run_in_childprocess
+# win error with lambda: Can't pickle local object '...<lambda>'
+
+def f3():
+ return 3
+def f3px(x):
+ return 3 + x
+def f4px(x):
+ return 4 + x
def test():
- eq_(3, run_in_childprocess(lambda: 3))
- eq_(4, run_in_childprocess(lambda x: 3 + x, None, 1))
- eq_(5, run_in_childprocess(lambda x: 3 + x, None, x=2))
+ eq_(3, run_in_childprocess(f3))
+ eq_(4, run_in_childprocess(f3px, None, 1))
+ eq_(5, run_in_childprocess(f3px, None, x=2))
def test_codec():
- coder = decoder = lambda x: x + 4
- eq_(12, run_in_childprocess(lambda x: 3 + x, (coder, decoder), 1))
+ coder = decoder = f4px
+ eq_(12, run_in_childprocess(f3px, (coder, decoder), 1))
def exc(): | test: replace lambda with function | ponty_pyscreenshot | train |
aa1e1153e955204e178d9dbb030e6995642ea1fd | diff --git a/tests/test_json_field.py b/tests/test_json_field.py
index <HASH>..<HASH> 100644
--- a/tests/test_json_field.py
+++ b/tests/test_json_field.py
@@ -18,3 +18,16 @@ class JsonFieldTest(TestCase):
j = JSONFieldTestModel.objects.create(a=6, j_field=[])
self.assertTrue(isinstance(j.j_field, list))
self.assertEqual(j.j_field, [])
+
+ def test_float_values(self):
+ """ Tests that float values in JSONFields are correctly serialized over repeated saves.
+ Regression test for c382398b, which fixes floats being returned as strings after a second save.
+ """
+ test_instance = JSONFieldTestModel(a=6, j_field={'test': 0.1})
+ test_instance.save()
+
+ test_instance = JSONFieldTestModel.objects.get()
+ test_instance.save()
+
+ test_instance = JSONFieldTestModel.objects.get()
+ self.assertEqual(test_instance.j_field['test'], 0.1) | Add a regression test to make sure floats stored in JSON fields are being serialised correctly. | django-extensions_django-extensions | train |
135a6a44699f9b755e0ffddc1367607fd64f34f3 | diff --git a/lib/Doctrine/ODM/PHPCR/UnitOfWork.php b/lib/Doctrine/ODM/PHPCR/UnitOfWork.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ODM/PHPCR/UnitOfWork.php
+++ b/lib/Doctrine/ODM/PHPCR/UnitOfWork.php
@@ -1153,8 +1153,9 @@ class UnitOfWork
if ($class->versionable) {
$this->setVersionableMixin($class, $node);
- } elseif ($class->referenceable) {
- // referenceable is a supertype of versionable, only set if not versionable
+ }
+
+ if ($class->referenceable) {
$node->addMixin('mix:referenceable');
} | simple versioning does not imply referenceable | doctrine_phpcr-odm | train |
43fd235cce8377b2130e9014196f17f7253e9cbe | diff --git a/labware-library/src/labware-creator/index.js b/labware-library/src/labware-creator/index.js
index <HASH>..<HASH> 100644
--- a/labware-library/src/labware-creator/index.js
+++ b/labware-library/src/labware-creator/index.js
@@ -74,7 +74,7 @@ type MakeAutofillOnChangeArgs = {|
|}
const PDF_URL =
- 'https://opentrons-publications.s3.us-east-2.amazonaws.com/TestGuide_labware.pdf'
+ 'https://opentrons-publications.s3.us-east-2.amazonaws.com/labwareDefinition_testGuide.pdf'
const makeAutofillOnChange = ({
autofills, | refactor(labware-creator): Update custom labware definition test guide link (#<I>) | Opentrons_opentrons | train |
99b0f39cdbf8a8582ca6c22afc2aa038e04d24f6 | diff --git a/elasticsearch-model/lib/elasticsearch/model/indexing.rb b/elasticsearch-model/lib/elasticsearch/model/indexing.rb
index <HASH>..<HASH> 100644
--- a/elasticsearch-model/lib/elasticsearch/model/indexing.rb
+++ b/elasticsearch-model/lib/elasticsearch/model/indexing.rb
@@ -397,7 +397,7 @@ module Elasticsearch
# @see http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions:update
#
def update_document(options={})
- if attributes_in_database = self.instance_variable_get(:@__changed_model_attributes)
+ if attributes_in_database = self.instance_variable_get(:@__changed_model_attributes).presence
attributes = if respond_to?(:as_indexed_json)
self.as_indexed_json.select { |k,v| attributes_in_database.keys.map(&:to_s).include? k.to_s }
else
diff --git a/elasticsearch-model/test/unit/indexing_test.rb b/elasticsearch-model/test/unit/indexing_test.rb
index <HASH>..<HASH> 100644
--- a/elasticsearch-model/test/unit/indexing_test.rb
+++ b/elasticsearch-model/test/unit/indexing_test.rb
@@ -176,6 +176,19 @@ class Elasticsearch::Model::IndexingTest < Test::Unit::TestCase
end
end
+ class ::DummyIndexingModelWithNoChanges
+ extend Elasticsearch::Model::Indexing::ClassMethods
+ include Elasticsearch::Model::Indexing::InstanceMethods
+
+ def self.before_save(&block)
+ (@callbacks ||= {})[block.hash] = block
+ end
+
+ def changes_to_save
+ {}
+ end
+ end
+
class ::DummyIndexingModelWithCallbacksAndCustomAsIndexedJson
extend Elasticsearch::Model::Indexing::ClassMethods
include Elasticsearch::Model::Indexing::InstanceMethods
@@ -393,6 +406,26 @@ class Elasticsearch::Model::IndexingTest < Test::Unit::TestCase
instance.update_document
end
+ should "index instead of update when nothing was changed" do
+ client = mock('client')
+ instance = ::DummyIndexingModelWithNoChanges.new
+
+ # Set the fake `changes` hash
+ instance.instance_variable_set(:@__changed_model_attributes, {})
+ # Overload as_indexed_json for running index
+ instance.expects(:as_indexed_json).returns({ 'foo' => 'BAR' })
+
+ client.expects(:index)
+ client.expects(:update).never
+
+ instance.expects(:client).returns(client)
+ instance.expects(:index_name).returns('foo')
+ instance.expects(:document_type).returns('bar')
+ instance.expects(:id).returns('1')
+
+ instance.update_document({})
+ end
+
should "update only the specific attributes" do
client = mock('client')
instance = ::DummyIndexingModelWithCallbacks.new | [MODEL] Avoid making an update when no attributes are changed (#<I>)
Closes #<I> | elastic_elasticsearch-rails | train |
e8d59d914ade83e1bbf18c20f25f48b297e78af3 | diff --git a/algolia/recommendation/client.go b/algolia/recommendation/client.go
index <HASH>..<HASH> 100644
--- a/algolia/recommendation/client.go
+++ b/algolia/recommendation/client.go
@@ -17,6 +17,7 @@ type Client struct {
func NewClient(appID, apiKey string, region region.Region) *Client {
return NewClientWithConfig(
Configuration{
+ AppID: appID,
APIKey: apiKey,
Region: region,
}, | fix(recommendation): add app id to the default config | algolia_algoliasearch-client-go | train |
61a10a3b024c2d3536dc72cd2c4cdc5aac06dca5 | diff --git a/estnltk/tests/test_morph/test_vabamorf_analyzer_disambiguator.py b/estnltk/tests/test_morph/test_vabamorf_analyzer_disambiguator.py
index <HASH>..<HASH> 100644
--- a/estnltk/tests/test_morph/test_vabamorf_analyzer_disambiguator.py
+++ b/estnltk/tests/test_morph/test_vabamorf_analyzer_disambiguator.py
@@ -507,3 +507,63 @@ def test_morph_disambiguation_with_ignore_xml_tags():
# assert that all words have been disambiguated
assert len(span.annotations) == 1
assert IGNORE_ATTR not in span.annotations[0]
+
+
+
+# ----------------------------------------------------------------
+# Test
+# morphological analyser and disambiguator will output
+# ambiguous word's analyses in specific order
+# ----------------------------------------------------------------
+
+def test_ordering_of_ambiguous_morph_analyses():
+ # Test the default ordering of ambiguous morph analyses
+ text_str = '''
+ Need olid ühed levinuimad rattad omal ajastul.
+ Ma kusjuures ei olegi varem sinna kordagi sattunud.
+ Hästi jutustatud ja korraliku ideega.
+ Kuna peamine põhjus vähendada suitsugaaside kardinaid osatähtsus on vähenenud läbipääsu toru kogunenud tahm seintel.
+ '''
+ text=Text(text_str)
+ text.tag_layer(['words','sentences'])
+ analyzer2.tag(text)
+ disambiguator.retag( text )
+ # Collect ambiguous analyses
+ ambiguous_analyses = []
+ for morph_word in text.morph_analysis:
+ annotations = morph_word.annotations
+ #ambiguous_analyses.append( [morph_word.text]+[(a['root'], a['partofspeech'], a['form'] ) for a in annotations] )
+ if len( annotations ) > 1:
+ ambiguous_analyses.append( [morph_word.text]+[(a['root'], a['partofspeech'], a['form'] ) for a in annotations] )
+ # ==============
+ #print()
+ #for a in ambiguous_analyses:
+ # print(a)
+ # ==============
+ # ordering A: when VabamorfAnalyzer & VabamorfDisambiguator do not sort ambiguous analyses
+ # ==============
+ ordering_a = [ \
+ ['ühed', ('üks', 'N', 'pl n'), ('üks', 'P', 'pl n')],
+ ['sattunud', ('sattu', 'V', 'nud'), ('sattu=nud', 'A', ''), ('sattu=nud', 'A', 'sg n'), ('sattu=nud', 'A', 'pl n')],
+ ['jutustatud', ('jutusta', 'V', 'tud'), ('jutusta=tud', 'A', ''), ('jutusta=tud', 'A', 'sg n'), ('jutusta=tud', 'A', 'pl n')],
+ ['on', ('ole', 'V', 'b'), ('ole', 'V', 'vad')],
+ ['vähenenud', ('vähene', 'V', 'nud'), ('vähene=nud', 'A', ''), ('vähene=nud', 'A', 'sg n'), ('vähene=nud', 'A', 'pl n')],
+ ['kogunenud', ('kogune', 'V', 'nud'), ('kogune=nud', 'A', ''), ('kogune=nud', 'A', 'sg n'), ('kogune=nud', 'A', 'pl n')]
+ ]
+ # ==============
+ # ordering B: when VabamorfAnalyzer & VabamorfDisambiguator are sorting ambiguous analyses
+ # ==============
+ ordering_b = [ \
+ ['ühed', ('üks', 'N', 'pl n'), ('üks', 'P', 'pl n')],
+ ['sattunud', ('sattu=nud', 'A', ''), ('sattu=nud', 'A', 'sg n'), ('sattu=nud', 'A', 'pl n'), ('sattu', 'V', 'nud')],
+ ['jutustatud', ('jutusta=tud', 'A', ''), ('jutusta=tud', 'A', 'sg n'), ('jutusta=tud', 'A', 'pl n'), ('jutusta', 'V', 'tud')],
+ ['on', ('ole', 'V', 'b'), ('ole', 'V', 'vad')],
+ ['vähenenud', ('vähene=nud', 'A', ''), ('vähene=nud', 'A', 'sg n'), ('vähene=nud', 'A', 'pl n'), ('vähene', 'V', 'nud')],
+ ['kogunenud', ('kogune=nud', 'A', ''), ('kogune=nud', 'A', 'sg n'), ('kogune=nud', 'A', 'pl n'), ('kogune', 'V', 'nud')]
+ ]
+ # ==============
+ # validate the current ordering
+ # ==============
+ assert ordering_b == ambiguous_analyses
+
+ | Updated test_vabamorf_analyzer_disambiguator: added a test about the order of ambiguous analyses | estnltk_estnltk | train |
40d465da52b7f5bc8e50e3c30005534517c0455e | diff --git a/core/app/models/comable/order.rb b/core/app/models/comable/order.rb
index <HASH>..<HASH> 100644
--- a/core/app/models/comable/order.rb
+++ b/core/app/models/comable/order.rb
@@ -3,8 +3,8 @@ module Comable
belongs_to :customer, class_name: Comable::Customer.name, foreign_key: Comable::Customer.table_name.singularize.foreign_key, autosave: false
belongs_to :payment, class_name: Comable::Payment.name, foreign_key: Comable::Payment.table_name.singularize.foreign_key, autosave: false
belongs_to :shipment_method, class_name: Comable::ShipmentMethod.name, autosave: false
- belongs_to :bill_address, class_name: Comable::Address.name
- belongs_to :ship_address, class_name: Comable::Address.name
+ belongs_to :bill_address, class_name: Comable::Address.name, dependent: :destroy
+ belongs_to :ship_address, class_name: Comable::Address.name, dependent: :destroy
has_many :order_deliveries, dependent: :destroy, class_name: Comable::OrderDelivery.name, foreign_key: table_name.singularize.foreign_key, inverse_of: :order
accepts_nested_attributes_for :bill_address
diff --git a/core/spec/models/comable/order_spec.rb b/core/spec/models/comable/order_spec.rb
index <HASH>..<HASH> 100644
--- a/core/spec/models/comable/order_spec.rb
+++ b/core/spec/models/comable/order_spec.rb
@@ -1,10 +1,11 @@
describe Comable::Order do
- it { expect { described_class.new }.to_not raise_error }
-
subject(:order) { FactoryGirl.build(:order) }
subject { order }
+ it { is_expected.to belong_to(:bill_address).class_name(Comable::Address.name).dependent(:destroy) }
+ it { is_expected.to belong_to(:ship_address).class_name(Comable::Address.name).dependent(:destroy) }
+
describe 'attributes' do
describe '#save' do
context 'complete order' do | Destroy the addresses when destroy a order | appirits_comable | train |
f82c5c65435a01f1e99966d1bde637bac6ed8cb3 | diff --git a/openquake/calculators/ebrisk.py b/openquake/calculators/ebrisk.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/ebrisk.py
+++ b/openquake/calculators/ebrisk.py
@@ -39,8 +39,13 @@ def start_ebrisk(gmfgetter, crmodel, param, monitor):
"""
Launcher for ebrisk tasks
"""
+ with hdf5.File(gmfgetter.srcfilter.filename, 'r') as cache:
+ num_taxonomies = cache['num_taxonomies'][()]
with monitor('filtering ruptures'):
gmfgetter.init()
+ # redefine gmcomputer.weight
+ for comp in gmfgetter.computers:
+ comp.weight = num_taxonomies[comp.sids].sum()
if gmfgetter.computers:
yield from parallel.split_task(
ebrisk, gmfgetter.computers, gmfgetter.gmv_dt, gmfgetter.min_iml,
@@ -156,6 +161,8 @@ class EbriskCalculator(event_based.EventBasedCalculator):
with hdf5.File(self.hdf5cache, 'w') as cache:
cache['sitecol'] = self.sitecol.complete
cache['assetcol'] = self.assetcol
+ cache['num_taxonomies'] = U16(
+ self.assetcol.num_taxonomies_by_site())
self.param['lba'] = lba = (
LossesByAsset(self.assetcol, oq.loss_names,
self.policy_name, self.policy_dict))
diff --git a/openquake/hazardlib/calc/gmf.py b/openquake/hazardlib/calc/gmf.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/calc/gmf.py
+++ b/openquake/hazardlib/calc/gmf.py
@@ -113,14 +113,10 @@ class GmfComputer(object):
except AttributeError:
self.sctx, self.dctx = cmaker.make_contexts(sitecol, rupture)
self.sids = self.sctx.sids
+ self.weight = self.sids.sum()
if correlation_model: # store the filtered sitecol
self.sites = sitecol.complete.filtered(self.sids)
- @property
- def weight(self):
- """The number of affected sites"""
- return len(self.sids)
-
def compute_all(self, min_iml, rlzs_by_gsim, sig_eps=None):
"""
:returns: [(sid, eid, gmv), ...] | Changed the weight in ebrisk depending on the number of taxonomies per sites | gem_oq-engine | train |
a62a20a80d66656209372252979a317e9f07906f | diff --git a/apiserver/params/model.go b/apiserver/params/model.go
index <HASH>..<HASH> 100644
--- a/apiserver/params/model.go
+++ b/apiserver/params/model.go
@@ -420,5 +420,5 @@ type ChangeModelCredentialParams struct {
// ChangeModelCredentialsParams holds the arguments for changing
// cloud credentials on models.
type ChangeModelCredentialsParams struct {
- Models []ChangeModelCredentialParams `json:"models"`
+ Models []ChangeModelCredentialParams `json:"model-credentials"`
} | Rename param property to be more descriptive. | juju_juju | train |
7db707cb6d0961e8648809224edf99f0e9add833 | diff --git a/src/parse/directive.js b/src/parse/directive.js
index <HASH>..<HASH> 100644
--- a/src/parse/directive.js
+++ b/src/parse/directive.js
@@ -107,7 +107,12 @@ function parseDirective(directives: DirectiveMapping, eat: Eat, value: string):
if (currentLine === '') {
eatLine(currentLine);
content.push(NEWLINE);
- } else if (hasIndent(currentLine, CUSTOM_BLOCK_INDENT)) {
+ } else if (
+ hasIndent(currentLine, CUSTOM_BLOCK_INDENT)
+ && !(hasIndent(currentLine, 4) &&
+ !hasIndent(currentLine, 5) &&
+ !content.some(line => line !== '\n'))
+ ) {
eatLine(currentLine);
content.push(currentLine.slice(CUSTOM_BLOCK_INDENT));
} else { | fix(parse): fix parsing code blocks after directives | andreypopp_reactdown | train |
0f96f8425366fd7a7520467a1adb2e258512a883 | diff --git a/test/sg_release_test.js b/test/sg_release_test.js
index <HASH>..<HASH> 100644
--- a/test/sg_release_test.js
+++ b/test/sg_release_test.js
@@ -223,5 +223,26 @@ exports.sg_release = {
});
},
+
+ // ---
+
+
+ testCommitDevelopment: function (test) {
+ test.expect(1);
+
+ var testContent = '{"name":"foo", "version":"0.0.2"}\n';
+
+ // Touch 2 version files to simulate changes on repo
+ grunt.file.write(dir + '/bower.json', testContent);
+ grunt.file.write(dir + '/package.json', testContent);
+
+ gitHelper.commit(grunt, dir, messages.developVersionCommitMsg, function (stdout) {
+ console.log(stdout);
+ // output should contain success commit msg
+ test.notEqual(stdout.indexOf('2 files changed'), -1);
+ test.done();
+ });
+ }
+
}; | Added test for committing into develop branch | SunGard-Labs_grunt-sg-release | train |
2fa608ee02b9bea3cac329e5075a85932e5b7c84 | diff --git a/History.md b/History.md
index <HASH>..<HASH> 100644
--- a/History.md
+++ b/History.md
@@ -1,6 +1,7 @@
unreleased
==========
+ * export parsing functions
* `req.cookies` and `req.signedCookies` are now plain objects
* slightly faster parsing of many cookies
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -25,6 +25,18 @@ var cookieParser = require('cookie-parser')
- `options` an object that is passed to `cookie.parse` as the second option. See [cookie](https://www.npmjs.org/package/cookie) for more information.
- `decode` a function to decode the value of the cookie
+### cookieParser.JSONCookie(str)
+
+Parse a cookie value as a JSON cookie. This will return the parsed JSON value if it was a JSON cookie, otherwise it will return the passed value.
+
+### cookieParser.JSONCookies(cookies)
+
+Given an object, this will iterate over the keys and call `JSONCookie` on each value. This will return the same object passed in.
+
+### cookieParser.signedCookies(cookies, secret)
+
+Given an object, this will iterate over the keys and check if any value is a signed cookie. If it is a signed cookie and the signature is valid, the key will be deleted from the object and added to the new object that is returned.
+
## Example
```js
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -20,7 +20,7 @@ var parse = require('./lib/parse');
* @api public
*/
-module.exports = function cookieParser(secret, options){
+exports = module.exports = function cookieParser(secret, options){
return function cookieParser(req, res, next) {
if (req.cookies) return next();
var cookies = req.headers.cookie;
@@ -45,3 +45,11 @@ module.exports = function cookieParser(secret, options){
next();
};
};
+
+/**
+ * Export parsing functions.
+ */
+
+exports.JSONCookie = parse.JSONCookie;
+exports.JSONCookies = parse.JSONCookies;
+exports.signedCookies = parse.signedCookies;
diff --git a/lib/parse.js b/lib/parse.js
index <HASH>..<HASH> 100644
--- a/lib/parse.js
+++ b/lib/parse.js
@@ -13,7 +13,7 @@ var signature = require('cookie-signature');
exports.signedCookies = function(obj, secret){
var cookies = Object.keys(obj);
var key;
- var ret = {};
+ var ret = Object.create(null);
var val;
for (var i = 0; i < cookies.length; i++) {
diff --git a/test/cookieParser.js b/test/cookieParser.js
index <HASH>..<HASH> 100644
--- a/test/cookieParser.js
+++ b/test/cookieParser.js
@@ -1,4 +1,5 @@
+var assert = require('assert')
var cookieParser = require('..')
var http = require('http')
var request = require('supertest')
@@ -10,6 +11,18 @@ describe('cookieParser()', function(){
server = createServer('keyboard cat')
})
+ it('should export JSONCookie function', function(){
+ assert(typeof cookieParser.JSONCookie, 'function')
+ })
+
+ it('should export JSONCookies function', function(){
+ assert(typeof cookieParser.JSONCookies, 'function')
+ })
+
+ it('should export signedCookies function', function(){
+ assert(typeof cookieParser.signedCookies, 'function')
+ })
+
describe('when no cookies are sent', function(){
it('should default req.cookies to {}', function(done){
request(server) | Export the parsing functions
closes #7 | expressjs_cookie-parser | train |
e67063c86787aec79aa9322485634150422b731f | diff --git a/WeaveServices/src/weave/servlets/RService.java b/WeaveServices/src/weave/servlets/RService.java
index <HASH>..<HASH> 100644
--- a/WeaveServices/src/weave/servlets/RService.java
+++ b/WeaveServices/src/weave/servlets/RService.java
@@ -77,21 +77,23 @@ public class RService extends GenericServlet
private String docrootPath = "";
private String rFolderName = "R_output";
- REngine rEngine = null;
- private REngine getREngine() throws Exception
- {
- try
+ private REngine rEngine = null;
+ /**
+ * This function will initialize the private rEngine variable.
+ * @throws Exception
+ */
+ private void initializeREngine() throws Exception
+ {
+ // temporary to prevent server crashing
+ if (true)
+ throw new Exception("R support is temporarily disabled");
+
+ if (rEngine == null)
{
String cls = "org.rosuda.REngine.JRI.JRIEngine";
String[] args = { "--vanilla", "--slave" };
- rEngine= REngine.engineForClass(cls, args, new JRICallbacks(), false);
-
+ rEngine = REngine.engineForClass(cls, args, new JRICallbacks(), false);
}
- catch (Exception e)
- {
- e.printStackTrace();
- }
- return rEngine;
}
private String plotEvalScript(REngine rEngine, String script, boolean showWarnings) throws REXPMismatchException, REngineException
{
@@ -132,9 +134,8 @@ public class RService extends GenericServlet
{
//System.out.println(keys.length);
String output = "";
- if(rEngine == null){
- rEngine = getREngine();
- }
+ initializeREngine();
+
RResult[] results = null;
REXP evalValue;
try | disabled R support because it was crashing the server
Change-Id: I<I>aaf<I>d<I>b<I>a0ecd<I>e<I>d<I>cb0d<I>d<I>e | WeaveTeam_WeaveJS | train |
378c4581855f2593e902a2a8569aa98250f8e865 | diff --git a/pyqode/python/managers/file.py b/pyqode/python/managers/file.py
index <HASH>..<HASH> 100644
--- a/pyqode/python/managers/file.py
+++ b/pyqode/python/managers/file.py
@@ -47,8 +47,7 @@ class PyFileManager(FileManager):
return 'UTF-8'
def open(self, path, encoding=None, use_cached_encoding=True):
- if encoding is None:
- encoding = self.detect_encoding(path)
+ encoding = self.detect_encoding(path)
super(PyFileManager, self).open(
path, encoding=encoding, use_cached_encoding=use_cached_encoding)
try: | Force preferred encoding following python convention:
if there is an encoding shebang line, the specified encoding is used otherwise UTF-8 is assumed. | pyQode_pyqode.python | train |
6cfa9ed46e78db8f90cfeda28fad8cc5a4c37f61 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -306,7 +306,7 @@ module.exports = function(spec) {
// Helper funtions
var getType = function(args) {
- var defaultType = 'text_string';
+ var defaultType = 'string';
var type = defaultType;
var item = apiSpec.executables[args.executableName] || apiSpec.invokers[args.invokerName];
@@ -331,14 +331,12 @@ module.exports = function(spec) {
}
}
- if (type === 'string') {
- type = 'text_string';
- } else if (type === 'byte_string' && args.preferBase64) {
+ if (type === 'binary' && args.preferBase64) {
type = '__base64_string';
} else if (_.isPlainObject(type) && args.preferBase64) {
_.each(type, function(t, name) {
- if (t === 'string') type[name] = 'text_string';
- else if (t === 'byte_string') type[name] = '__base64_string';
+ if (t === 'string') type[name] = 'string';
+ else if (t === 'binary') type[name] = '__base64_string';
});
}
diff --git a/lib/NeDB.js b/lib/NeDB.js
index <HASH>..<HASH> 100644
--- a/lib/NeDB.js
+++ b/lib/NeDB.js
@@ -16,7 +16,7 @@ module.exports = function(spec) {
nedb.persistence.setAutocompactionInterval(spec.autocompactionInterval);
var wrap = function(value, type) {
- if (Buffer.isBuffer(value)) { // && type === 'byte_string'
+ if (Buffer.isBuffer(value)) { // && type === 'binary'
value = value.toString('base64'); //value.toJSON()
}
@@ -30,7 +30,7 @@ module.exports = function(spec) {
value = value.data;
- if (type === 'byte_string') {
+ if (type === 'binary') {
value = new Buffer(value, 'base64');
}
diff --git a/lib/Redis.js b/lib/Redis.js
index <HASH>..<HASH> 100644
--- a/lib/Redis.js
+++ b/lib/Redis.js
@@ -13,7 +13,7 @@ module.exports = function(spec) {
var wrap = function(value, type) {
if (type === 'json_object' || type === 'json_array' || type === 'boolean' || type === 'number') {
value = JSON.stringify(value);
- } else if (Buffer.isBuffer(value)) { // && type === 'byte_string'
+ } else if (Buffer.isBuffer(value)) { // && type === 'binary'
value = value.toString('base64'); //value.toJSON()
}
@@ -25,7 +25,7 @@ module.exports = function(spec) {
if (type === 'json_object' || type === 'json_array' || type === 'boolean' || type === 'number') {
value = JSON.parse(value);
- } else if (type === 'byte_string') {
+ } else if (type === 'binary') {
value = new Buffer(value, 'base64');
}
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "any2api-instancedb-redis",
- "version": "0.1.23",
+ "version": "0.1.24",
"author": "Johannes Wettinger <[email protected]>",
"description": "Redis-based instance DB (with embedded NeDB as fallback) for any2api",
"repository": { | any2api type 'byte_string' renamed to 'binary' | any2api_any2api-instancedb-redis | train |
09fa3047322a8e6f74bd23491f3dc9057a3abe16 | diff --git a/cslbot/commands/fullwidth.py b/cslbot/commands/fullwidth.py
index <HASH>..<HASH> 100644
--- a/cslbot/commands/fullwidth.py
+++ b/cslbot/commands/fullwidth.py
@@ -28,4 +28,4 @@ def cmd(send, msg, args):
"""
if not msg:
msg = gen_word()
- send(gen_fullwidth(msg))
+ send(gen_fullwidth(msg.upper()))
diff --git a/test/test.py b/test/test.py
index <HASH>..<HASH> 100755
--- a/test/test.py
+++ b/test/test.py
@@ -349,7 +349,7 @@ class FullwidthTest(BotTest):
def test_fullwidth_ascii(self):
"""Test fullwidth with ASCII characters."""
calls = self.send_msg('pubmsg', 'testnick', '#test-channel', ['!fullwidth ayy lmao'])
- self.assertEqual(calls, [('testBot', '#test-channel', 0, 'ayy\u3000lmao', 'privmsg'),
+ self.assertEqual(calls, [('testBot', '#test-channel', 0, 'AYY\u3000LMAO', 'privmsg'),
('testnick', '#test-channel', 0, '!fullwidth ayy lmao', 'pubmsg')])
def test_fullwidth_nonascii(self):
@@ -362,7 +362,7 @@ class FullwidthTest(BotTest):
"""Test fullwidth with no arguments."""
mock_gen_word.return_value = 'test'
calls = self.send_msg('pubmsg', 'testnick', '#test-channel', ['!fullwidth'])
- self.assertEqual(calls, [('testBot', '#test-channel', 0, 'test', 'privmsg'), ('testnick', '#test-channel', 0, '!fullwidth', 'pubmsg')])
+ self.assertEqual(calls, [('testBot', '#test-channel', 0, 'TEST', 'privmsg'), ('testnick', '#test-channel', 0, '!fullwidth', 'pubmsg')])
if __name__ == '__main__': | F U L L W I D T H | tjcsl_cslbot | train |
4b54999dc5a70c214f740bf5882a5ad86a9d359c | diff --git a/config/config.php b/config/config.php
index <HASH>..<HASH> 100644
--- a/config/config.php
+++ b/config/config.php
@@ -32,16 +32,6 @@ return [
],
],
- 'block_type' => [
- 'basic' => Gzero\Cms\Handlers\Block\Basic::class,
- 'menu' => Gzero\Cms\Handlers\Block\Menu::class,
- 'slider' => Gzero\Cms\Handlers\Block\Slider::class,
- 'widget' => Gzero\Cms\Handlers\Block\Widget::class
- ],
- 'content_type' => [
- 'content' => Gzero\Cms\Handlers\Content\ContentHandler::class,
- 'category' => Gzero\Cms\Handlers\Content\Category::class
- ],
'file_type' => [
'image' => Gzero\Cms\Handlers\File\Image::class,
'document' => Gzero\Cms\Handlers\File\Document::class,
diff --git a/database/migrations/2015_11_26_115322_create_block.php b/database/migrations/2015_11_26_115322_create_block.php
index <HASH>..<HASH> 100644
--- a/database/migrations/2015_11_26_115322_create_block.php
+++ b/database/migrations/2015_11_26_115322_create_block.php
@@ -18,6 +18,7 @@ class CreateBlock extends Migration {
function (Blueprint $table) {
$table->increments('id');
$table->string('name')->unique();
+ $table->string('handler');
$table->timestamps();
}
);
@@ -97,9 +98,10 @@ class CreateBlock extends Migration {
*/
private function seedBlockTypes()
{
- foreach (['basic', 'menu', 'slider', 'widget'] as $type) {
- BlockType::firstOrCreate(['name' => $type]);
- }
+ BlockType::firstOrCreate(['name' => 'basic', 'handler' => Gzero\Cms\Handlers\Block\Basic::class]);
+ BlockType::firstOrCreate(['name' => 'menu', 'handler' => Gzero\Cms\Handlers\Block\Menu::class]);
+ BlockType::firstOrCreate(['name' => 'slider', 'handler' => Gzero\Cms\Handlers\Block\Slider::class]);
+ BlockType::firstOrCreate(['name' => 'widget', 'handler' => Gzero\Cms\Handlers\Block\Widget::class]);
}
}
diff --git a/src/Gzero/Cms/Models/BlockType.php b/src/Gzero/Cms/Models/BlockType.php
index <HASH>..<HASH> 100644
--- a/src/Gzero/Cms/Models/BlockType.php
+++ b/src/Gzero/Cms/Models/BlockType.php
@@ -6,7 +6,8 @@ class BlockType extends Model {
/** @var array */
protected $fillable = [
- 'name'
+ 'name',
+ 'handler'
];
/** | Added handler column to bolock types (#<I>) | GrupaZero_cms | train |
6e4f0d5e7bbf4b9d6858e2d6d798ba4f2967d540 | diff --git a/rarfile.py b/rarfile.py
index <HASH>..<HASH> 100644
--- a/rarfile.py
+++ b/rarfile.py
@@ -18,6 +18,7 @@ import os, re
from struct import pack, unpack
from binascii import crc32
from cStringIO import StringIO
+from tempfile import mkstemp
# whether to speed up decompression by using tmp archive
_use_extract_hack = 1
@@ -394,8 +395,9 @@ class RarFile:
size = inf.compress_size + inf.header_size
rf = open(self.rarfile, "rb")
rf.seek(inf.header_offset)
- tmpname = os.tempnam() + ".rar"
- tmpf = open(tmpname, "wb")
+
+ tmpfd, tmpname = mkstemp(suffix='.rar')
+ tmpf = os.fdopen(tmpfd, "wb")
# create main header: crc, type, flags, size, res1, res2
mh = pack("<HBHHHL", 0x90CF, 0x73, 0, 13, 0, 0)
@@ -478,11 +480,3 @@ class _UnicodeFilename:
self.put(self.std_byte(), 0)
return self.buf.getvalue().decode("utf-16le", "replace")
-# ignore os.tempnam() warning
-try:
- import warnings
- warnings.filterwarnings(action = 'ignore', category = RuntimeWarning,
- module = 'rarfile')
-except Exception, det:
- pass
- | Replace os.tempnam() with tempfile.mkstemp().
That also gets rid of ugly hack with warnings.filterwarnings()
Patch contributed by Jason Moiron. | markokr_rarfile | train |
3512f6dc9ccb639db0a51186e67d899c453e2b95 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -137,6 +137,8 @@ else:
'quantum-debug = quantum.debug.shell:main',
'quantum-ovs-cleanup = quantum.agent.ovs_cleanup_util:main',
'quantum-db-manage = quantum.db.migration.cli:main',
+ ('quantum-lbaas-agent = '
+ 'quantum.plugins.services.agent_loadbalancer.agent:main'),
('quantum-check-nvp-config = '
'quantum.plugins.nicira.nicira_nvp_plugin.check_nvp_config:main'),
] | LBaaS Agent Reference Implementation
implements blueprint lbaas-namespace-agent
This a reference implemention of the Quantum load balancing service
using HAProxy. The implemention is designed for vendors, developers,
and deployers to become familiar with the API and service workflow.
This change also adds some constraint checks for data integrity.
Change-Id: I<I>a<I>da<I>ccf<I>b<I>f4f<I>a1 | openstack_networking-cisco | train |
1ac5ce3fbf907e9eba7701a242625c4ebb886483 | diff --git a/cmd/show_test.go b/cmd/show_test.go
index <HASH>..<HASH> 100644
--- a/cmd/show_test.go
+++ b/cmd/show_test.go
@@ -23,9 +23,21 @@ import (
"reflect"
"testing"
+ "github.com/spf13/cobra"
+ "github.com/spf13/pflag"
"gopkg.in/yaml.v2"
)
+func resetFlagsOf(cmd *cobra.Command) {
+ cmd.Flags().VisitAll(func(f *pflag.Flag) {
+ if sv, ok := f.Value.(pflag.SliceValue); ok {
+ sv.Replace(nil)
+ } else {
+ f.Value.Set(f.DefValue)
+ }
+ })
+}
+
func cmdOutput(t *testing.T, args []string) string {
var buf bytes.Buffer
RootCmd.SetOutput(&buf)
@@ -86,6 +98,7 @@ func TestShow(t *testing.T) {
"-V", "anVar",
"--ext-str-file", "filevar=" + filepath.FromSlash("../testdata/extvar.file"),
})
+ defer resetFlagsOf(RootCmd)
t.Log("output is", output)
actual, err := parser(output)
@@ -136,6 +149,7 @@ func TestShowUsingExtVarFiles(t *testing.T) {
"--tla-code-file", "sink=sink.jsonnet",
"--ext-str-file", "filevar=var.txt",
})
+ defer resetFlagsOf(RootCmd)
t.Log("output is", output)
var actual interface{} | Reset flags set on RootCmd in "show" test | bitnami_kubecfg | train |
02577e9aa212a33da2e1f20209634837c54d120f | diff --git a/js/browser.js b/js/browser.js
index <HASH>..<HASH> 100755
--- a/js/browser.js
+++ b/js/browser.js
@@ -842,7 +842,7 @@ class Browser {
for (let trackView of this.trackViews) {
const trackId = trackView.track.id;
- if (trackId !== 'sequence' && trackId !== 'ruler' && trackId !== 'ideogram') {
+ if (trackId !== 'ruler' && trackId !== 'ideogram') {
this.trackContainer.removeChild(trackView.trackDiv);
this.fireEvent('trackremoved', [trackView.track]);
trackView.dispose(); | bug fix -- accumulating sequence tracks | igvteam_igv.js | train |
21a1b4c2c518367dac45181f84fa2d485e01e6ef | diff --git a/projects/samskivert/src/java/com/samskivert/servlet/user/UserRepository.java b/projects/samskivert/src/java/com/samskivert/servlet/user/UserRepository.java
index <HASH>..<HASH> 100644
--- a/projects/samskivert/src/java/com/samskivert/servlet/user/UserRepository.java
+++ b/projects/samskivert/src/java/com/samskivert/servlet/user/UserRepository.java
@@ -1,9 +1,10 @@
//
-// $Id: UserRepository.java,v 1.12 2001/06/07 08:40:30 mdb Exp $
+// $Id: UserRepository.java,v 1.13 2001/07/08 19:34:39 mdb Exp $
package com.samskivert.servlet.user;
import java.sql.*;
+import java.util.ArrayList;
import java.util.Calendar;
import java.util.Properties;
@@ -347,6 +348,44 @@ public class UserRepository extends MySQLRepository
return result;
}
+ /**
+ * Returns an array with the real names of every user in the system.
+ * This is for Paul who whined about not knowing who was using Who,
+ * Where, When because he didn't feel like emailing anyone that wasn't
+ * already using it to link up.
+ */
+ public String[] loadAllRealNames ()
+ throws SQLException
+ {
+ final ArrayList names = new ArrayList();
+
+ // do the query
+ execute(new Operation () {
+ public Object invoke () throws SQLException
+ {
+ Statement stmt = _session.connection.createStatement();
+ try {
+ String query = "select realname from users";
+ ResultSet rs = stmt.executeQuery(query);
+ while (rs.next()) {
+ names.add(rs.getString(1));
+ }
+
+ // nothing to return
+ return null;
+
+ } finally {
+ JDBCUtil.close(stmt);
+ }
+ }
+ });
+
+ // finally construct our result
+ String[] result = new String[names.size()];
+ names.toArray(result);
+ return result;
+ }
+
public static void main (String[] args)
{
Properties props = new Properties(); | Added means by which all users' real names can be loaded.
git-svn-id: <URL> | samskivert_samskivert | train |
50fd0d921b697ad72431553cf289dce7d5dfac7c | diff --git a/src/js/chosen.js b/src/js/chosen.js
index <HASH>..<HASH> 100644
--- a/src/js/chosen.js
+++ b/src/js/chosen.js
@@ -220,6 +220,7 @@ MIT License, https://github.com/harvesthq/chosen/blob/master/LICENSE.md
_this.max_selected_options = _options.max_selected_options || Infinity;
_this.drop_direction = _options.drop_direction || 'auto';
_this.drop_item_height = _options.drop_item_height !== undefined ? _options.drop_item_height : 25;
+ _this.max_drop_height = _options.max_drop_height !== undefined ? _options.max_drop_height : 240;
_this.middle_highlight = _options.middle_highlight;
_this.compact_search = _options.compact_search || false;
_this.inherit_select_classes = _options.inherit_select_classes || false;
@@ -1012,8 +1013,8 @@ MIT License, https://github.com/harvesthq/chosen/blob/master/LICENSE.md
if (!that.drop_directionFixed) {
var $drop = that.container.find('.chosen-drop');
var dropHeight = $drop.outerHeight();
- if (that.drop_item_height && dropHeight < that.drop_item_height * 3) {
- dropHeight = dropHeight + $drop.find('.chosen-results>.active-result').length * that.drop_item_height;
+ if (that.drop_item_height && dropHeight < that.max_drop_height) {
+ dropHeight = Math.min(that.max_drop_height, $drop.find('.chosen-results>.active-result').length * that.drop_item_height);
}
var offset = that.container.offset();
if(offset.top + dropHeight + 30 > $(window).height() + $(window).scrollTop()) { | * improve UI of drop menu of chosen. | easysoft_zui | train |
14167e3e7689168ef35ce5c7c41bbda96cf42b0b | diff --git a/resources/views/password/reset.php b/resources/views/password/reset.php
index <HASH>..<HASH> 100644
--- a/resources/views/password/reset.php
+++ b/resources/views/password/reset.php
@@ -1,8 +1,12 @@
<?php $view->layout() ?>
<ul id="js-reset-tabs" class="nav tab-underline border-bottom">
- <li class="active border-primary"><a class="text-active-primary" href="#tab-mobile" data-toggle="tab">手机找回</a></li>
- <li class="border-primary"><a class="text-active-primary" href="#tab-email" data-toggle="tab">邮箱找回</a></li>
+ <li class="nav-item active border-primary">
+ <a class="nav-link text-active-primary" href="#tab-mobile" data-toggle="tab">手机找回</a>
+ </li>
+ <li class="nav-item border-primary">
+ <a class="nav-link text-active-primary" href="#tab-email" data-toggle="tab">邮箱找回</a>
+ </li>
</ul>
<div class="tab-content">
diff --git a/resources/views/users/register.php b/resources/views/users/register.php
index <HASH>..<HASH> 100644
--- a/resources/views/users/register.php
+++ b/resources/views/users/register.php
@@ -5,8 +5,12 @@
</div>
<ul class="js-register-tabs nav tab-underline border-bottom">
- <li class="active border-primary"><a class="text-active-primary" href="#tabMobile" data-toggle="tab">手机注册</a></li>
- <li class="border-primary"><a class="text-active-primary" href="#tabEmail" data-toggle="tab">邮箱注册</a></li>
+ <li class="nav-item active border-primary">
+ <a class="nav-link text-active-primary" href="#tabMobile" data-toggle="tab">手机注册</a>
+ </li>
+ <li class="nav-item border-primary">
+ <a class="nav-link text-active-primary" href="#tabEmail" data-toggle="tab">邮箱注册</a>
+ </li>
</ul>
<div class="tab-content"> | refactor: nav更新为bs4 | miaoxing_user | train |
00bfcb9552cf61ee834d8f0980ead381936c2654 | diff --git a/VERSION.yml b/VERSION.yml
index <HASH>..<HASH> 100644
--- a/VERSION.yml
+++ b/VERSION.yml
@@ -2,4 +2,3 @@
:major: 0
:minor: 11
:patch: 0
-:build: alpha.9
diff --git a/lib/staticmatic/base.rb b/lib/staticmatic/base.rb
index <HASH>..<HASH> 100644
--- a/lib/staticmatic/base.rb
+++ b/lib/staticmatic/base.rb
@@ -97,7 +97,7 @@ module StaticMatic
compass_config_path = File.join(@base_dir, "config", "compass.rb")
if File.exists?(compass_config_path)
- Compass.add_configuration(compass_config_path)
+ Compass.add_configuration(compass_config_path)
end
configuration.sass_options.merge!(Compass.configuration.to_sass_engine_options)
diff --git a/lib/staticmatic/compass.rb b/lib/staticmatic/compass.rb
index <HASH>..<HASH> 100644
--- a/lib/staticmatic/compass.rb
+++ b/lib/staticmatic/compass.rb
@@ -1,5 +1,5 @@
["installer", "configuration_defaults", "app_integration"].each do |file|
- require File.join(File.dirname(__FILE__), "compass", file)
+ require File.join(File.dirname(__FILE__), "compass", file)
end
Compass.add_configuration(:staticmatic)
diff --git a/lib/staticmatic/compass/app_integration.rb b/lib/staticmatic/compass/app_integration.rb
index <HASH>..<HASH> 100644
--- a/lib/staticmatic/compass/app_integration.rb
+++ b/lib/staticmatic/compass/app_integration.rb
@@ -5,15 +5,12 @@ module Compass
extend self
-
-
def installer(*args)
Installer.new(*args)
end
def configuration
- Compass::Configuration::Data.new('staticmatic').
- extend(ConfigurationDefaults)
+ Compass::Configuration::Data.new('staticmatic').extend(ConfigurationDefaults)
end
end
diff --git a/lib/staticmatic/compass/configuration_defaults.rb b/lib/staticmatic/compass/configuration_defaults.rb
index <HASH>..<HASH> 100644
--- a/lib/staticmatic/compass/configuration_defaults.rb
+++ b/lib/staticmatic/compass/configuration_defaults.rb
@@ -5,7 +5,11 @@ module Compass
def project_type_without_default
:staticmatic
end
-
+
+ def http_path
+ "/"
+ end
+
def sass_dir_without_default
"src/stylesheets"
end
@@ -21,12 +25,13 @@ module Compass
def images_dir_without_default
"site/images"
end
+
def default_http_images_path
- "site/images"
+ "images"
end
def default_http_javascripts_path
- "site/javascripts"
+ "javascripts"
end
def default_cache_dir
diff --git a/spec/compass_integration_spec.rb b/spec/compass_integration_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/compass_integration_spec.rb
+++ b/spec/compass_integration_spec.rb
@@ -1,7 +1,7 @@
require File.dirname(__FILE__) + "/spec_helper"
describe "Compass integration" do
- context "with the default staticmatic configuraiton" do
+ context "with the default staticmatic configuration" do
before do
setup_staticmatic
end
@@ -11,7 +11,7 @@ describe "Compass integration" do
Compass.configuration.sass_dir.should == File.join("src", "stylesheets")
Compass.configuration.css_dir.should == File.join("site", "stylesheets")
Compass.configuration.images_dir.should == File.join("site", "images")
- Compass.configuration.http_images_path.should == "site/images"
+ Compass.configuration.http_images_path.should == "images"
end
end
diff --git a/spec/sandbox/test_site/src/stylesheets/application.sass b/spec/sandbox/test_site/src/stylesheets/application.sass
index <HASH>..<HASH> 100644
--- a/spec/sandbox/test_site/src/stylesheets/application.sass
+++ b/spec/sandbox/test_site/src/stylesheets/application.sass
@@ -2,4 +2,4 @@
+blueprint
body
- :font-family = "Verdana, Helvetica, sans-serif"
\ No newline at end of file
+ font-family: "Verdana, Helvetica, sans-serif"
\ No newline at end of file
diff --git a/staticmatic.gemspec b/staticmatic.gemspec
index <HASH>..<HASH> 100644
--- a/staticmatic.gemspec
+++ b/staticmatic.gemspec
@@ -5,7 +5,7 @@
Gem::Specification.new do |s|
s.name = %q{staticmatic}
- s.version = "0.11.0.alpha.9"
+ s.version = "0.11.0.alpha.10"
s.required_rubygems_version = Gem::Requirement.new("> 1.3.1") if s.respond_to? :required_rubygems_version=
s.authors = ["Stephen Bartholomew"] | Fix compass integration defaults, fix tests, prep for version <I> release | staticmatic_staticmatic | train |
d0f14f848843b926cdf060c5a672734d230cb7fc | diff --git a/examples/chat/app.js b/examples/chat/app.js
index <HASH>..<HASH> 100644
--- a/examples/chat/app.js
+++ b/examples/chat/app.js
@@ -8,7 +8,7 @@ configure(function(){
oneMinute = 60000
use(MethodOverride)
use(ContentLength)
- use(Profiler)
+ use(Profiler, { format: 'plot' })
use(Cookie)
use(Cache, { lifetime: fiveMinutes, reapInterval: oneMinute })
use(Session, { lifetime: fiveMinutes, reapInterval: oneMinute })
diff --git a/lib/express/plugins/profiler.js b/lib/express/plugins/profiler.js
index <HASH>..<HASH> 100644
--- a/lib/express/plugins/profiler.js
+++ b/lib/express/plugins/profiler.js
@@ -4,6 +4,26 @@
var n = 0
exports.Profiler = Plugin.extend({
+ extend: {
+
+ /**
+ * Initialize profiler options.
+ *
+ * Options:
+ *
+ * - format 'plot' outputs request duration in milliseconds only
+ *
+ * @param {hash} options
+ * @api private
+ */
+
+ init: function(options) {
+ process.mixin(this, options)
+ }
+ },
+
+ // --- Events
+
on: {
/**
@@ -19,10 +39,13 @@ exports.Profiler = Plugin.extend({
*/
response: function(event) {
- puts(event.request.method + ' ' +
- event.request.url.pathname + ': ' +
- (Number(new Date) - this.start) + ' ms' +
- ' #' + ++n)
+ if (exports.Profiler.format === 'plot')
+ puts(Number(new Date) - this.start)
+ else
+ puts(event.request.method + ' ' +
+ event.request.url.pathname + ': ' +
+ (Number(new Date) - this.start) + ' ms' +
+ ' #' + ++n)
}
}
})
\ No newline at end of file | Added "plot" format option for Profiler
This can be used to generate gnuplot graphs etc | expressjs_express | train |
cd91325d3917e5117bc66143de2c4bf7aaf7ecaf | diff --git a/lib/poolparty/net/remote_bases/ec2.rb b/lib/poolparty/net/remote_bases/ec2.rb
index <HASH>..<HASH> 100644
--- a/lib/poolparty/net/remote_bases/ec2.rb
+++ b/lib/poolparty/net/remote_bases/ec2.rb
@@ -121,14 +121,14 @@ begin
"if [ -z \"$(grep -v '#' /etc/hosts | grep '#{o.name}')\" ]; then echo '127.0.0.1 #{o.name}' >> /etc/hosts; fi",
"hostname #{o.name}",
"echo #{o.name} > /etc/hostname",
- "cd /var/poolparty && wget http://rubyforge.org/frs/download.php/43666/amazon-ec2-0.3.1.gem -O amazon-ec2.gem 2>&1"
+ "cd /var/poolparty && wget http://rubyforge.org/frs/download.php/43666/amazon-ec2-0.3.1.gem -O amazon-ec2.gem 2>&1",
+ "/usr/bin/gem install -y --no-ri --no-rdoc amazon-ec2.gem 2>&1"
]
end
def custom_configure_tasks_for(o)
[
- "# ec2 configuration",
- "/usr/bin/gem install -y --no-ri --no-rdoc amazon-ec2.gem 2>&1"
+ "# ec2 configuration"
]
end | Updated basics for ec2 | auser_poolparty | train |
31acb6028088b0a09216850cc93fbc28736e61a2 | diff --git a/src/Pug/Keyword/Minify.php b/src/Pug/Keyword/Minify.php
index <HASH>..<HASH> 100644
--- a/src/Pug/Keyword/Minify.php
+++ b/src/Pug/Keyword/Minify.php
@@ -94,8 +94,8 @@ class Minify
$pug = new Pug(array(
'classAttribute' => $classAttribute,
- 'singleQuote' => $this->pug->getOption('singleQuote'),
- 'prettyprint' => $this->pug->getOption('prettyprint'),
+ 'singleQuote' => $this->getOption('singleQuote'),
+ 'prettyprint' => $this->getOption('prettyprint'),
));
return $pug->render($code);
@@ -127,26 +127,33 @@ class Minify
return array($extension, $path, $source, $destination);
}
+ protected function needUpate($source, $destination)
+ {
+ return !$this->dev || !file_exists($destination) || filemtime($source) >= filemtime($destination);
+ }
+
protected function parseScript($path)
{
list($extension, $path, $source, $destination) = $this->getPathInfo($path, 'js');
- switch ($extension) {
- case 'jsxp':
- $contents = preg_replace_callback('/(?<!\s)(\s*)::`(([^`]+|(?<!`)`(?!`))*?)`(?!`)/', array($this, 'parsePugInJsx'), file_get_contents($source));
- $this->writeWith(new React($contents), $destination);
- break;
- case 'jsx':
- $this->writeWith(new React($source), $destination);
- break;
- case 'cofp':
- $contents = preg_replace_callback('/(?<!\s)(\s*)::"""([\s\S]*?)"""/', array($this, 'parsePugInCoffee'), file_get_contents($source));
- $this->writeWith(new Coffeescript($contents), $destination);
- break;
- case 'coffee':
- $this->writeWith(new Coffeescript($source), $destination);
- break;
- default:
- copy($source, $destination);
+ if ($this->needUpate($source, $destination)) {
+ switch ($extension) {
+ case 'jsxp':
+ $contents = preg_replace_callback('/(?<!\s)(\s*)::`(([^`]+|(?<!`)`(?!`))*?)`(?!`)/', array($this, 'parsePugInJsx'), file_get_contents($source));
+ $this->writeWith(new React($contents), $destination);
+ break;
+ case 'jsx':
+ $this->writeWith(new React($source), $destination);
+ break;
+ case 'cofp':
+ $contents = preg_replace_callback('/(?<!\s)(\s*)::"""([\s\S]*?)"""/', array($this, 'parsePugInCoffee'), file_get_contents($source));
+ $this->writeWith(new Coffeescript($contents), $destination);
+ break;
+ case 'coffee':
+ $this->writeWith(new Coffeescript($source), $destination);
+ break;
+ default:
+ copy($source, $destination);
+ }
}
if ($this->dev) {
return $path . '?' . time();
@@ -157,15 +164,17 @@ class Minify
protected function parseStyle($path)
{
list($extension, $path, $source, $destination) = $this->getPathInfo($path, 'css');
- switch ($extension) {
- case 'styl':
- $this->writeWith(new Stylus($source), $destination);
- break;
- case 'less':
- $this->writeWith(new Less($source), $destination);
- break;
- default:
- copy($source, $destination);
+ if ($this->needUpate($source, $destination)) {
+ switch ($extension) {
+ case 'styl':
+ $this->writeWith(new Stylus($source), $destination);
+ break;
+ case 'less':
+ $this->writeWith(new Less($source), $destination);
+ break;
+ default:
+ copy($source, $destination);
+ }
}
if ($this->dev) {
return $path . '?' . time(); | Update only new resources in dev mode | pug-php_pug-minify | train |
b3c2cbc32fc2695b58545e7c764cd56eaca4d98c | diff --git a/scripts/release/mirror.js b/scripts/release/mirror.js
index <HASH>..<HASH> 100644
--- a/scripts/release/mirror.js
+++ b/scripts/release/mirror.js
@@ -317,7 +317,7 @@ const bumpWebView = (sourceData) => {
* @param {SupportStatement} data
* @param {string} destination
*/
-export const bumpVersion = (sourceData, destination) => {
+export const bumpSupport = (sourceData, destination) => {
let newData = null;
if (sourceData == null) {
@@ -326,7 +326,7 @@ export const bumpVersion = (sourceData, destination) => {
if (Array.isArray(sourceData)) {
return combineStatements(
- ...sourceData.map((data) => bumpVersion(data, destination)),
+ ...sourceData.map((data) => bumpSupport(data, destination)),
);
}
@@ -350,12 +350,28 @@ export const bumpVersion = (sourceData, destination) => {
return { version_added: false };
}
+ if (newData.version_added === newData.version_removed) {
+ // If version_added and version_removed are the same, feature is unsupported
+ return { ...newData, version_added: false, version_removed: undefined };
+ }
+
return newData;
};
-const bumpData = (destination, data) => {
+const mirrorSupport = (destination, data) => {
const upstream = browsers[destination].upstream;
- return bumpVersion(data[upstream], destination);
+ if (!upstream) {
+ throw new Error(
+ `Upstream is not defined for ${destination}, cannot mirror!`,
+ );
+ }
+
+ if (data[upstream] == 'mirror') {
+ // Perform mirroring upstream if needed
+ data[upstream] = mirrorSupport(upstream, data);
+ }
+
+ return bumpSupport(data[upstream], destination);
};
-export default bumpData;
+export default mirrorSupport; | Mirroring: apply suggestions from code review (#<I>)
* Mirroring: apply suggestions from code review
* Fix error message
* Perform upstream mirroring if needed
* Fix function call | mdn_browser-compat-data | train |
4feed2ece83bf874daa368ae22320535c90f690f | diff --git a/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java b/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
index <HASH>..<HASH> 100644
--- a/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
+++ b/aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
@@ -1377,20 +1377,15 @@ class ConsensusModuleAgent implements Agent
LogReplay newLogReplay(final long logPosition, final long appendPosition)
{
- if (logPosition < appendPosition)
- {
- return new LogReplay(
- archive,
- recoveryPlan.log.recordingId,
- logPosition,
- appendPosition,
- recoveryPlan.log.leadershipTermId,
- recoveryPlan.log.sessionId,
- logAdapter,
- ctx);
- }
-
- return null;
+ return new LogReplay(
+ archive,
+ recoveryPlan.log.recordingId,
+ logPosition,
+ appendPosition,
+ recoveryPlan.log.leadershipTermId,
+ recoveryPlan.log.sessionId,
+ logAdapter,
+ ctx);
}
void awaitServicesReadyForReplay(
diff --git a/aeron-cluster/src/main/java/io/aeron/cluster/Election.java b/aeron-cluster/src/main/java/io/aeron/cluster/Election.java
index <HASH>..<HASH> 100644
--- a/aeron-cluster/src/main/java/io/aeron/cluster/Election.java
+++ b/aeron-cluster/src/main/java/io/aeron/cluster/Election.java
@@ -118,10 +118,11 @@ class Election
int doWork(final long nowNs)
{
int workCount = ElectionState.INIT == state ? init(nowNs) : 0;
- workCount += consensusAdapter.poll();
try
{
+ workCount += consensusAdapter.poll();
+
switch (state)
{
case CANVASS:
@@ -565,15 +566,19 @@ class Election
if (null == logReplay)
{
+ workCount += 1;
logSessionId = consensusModuleAgent.addNewLogPublication();
ClusterMember.resetLogPositions(clusterMembers, NULL_POSITION);
thisMember.leadershipTermId(leadershipTermId).logPosition(appendPosition);
- if (null == (logReplay = consensusModuleAgent.newLogReplay(logPosition, appendPosition)))
+ if (logPosition < appendPosition)
+ {
+ logReplay = consensusModuleAgent.newLogReplay(logPosition, appendPosition);
+ }
+ else
{
state(LEADER_TRANSITION, nowNs);
- workCount = 1;
}
}
else
@@ -670,10 +675,14 @@ class Election
if (null == logReplay)
{
- if (null == (logReplay = consensusModuleAgent.newLogReplay(logPosition, appendPosition)))
+ workCount += 1;
+ if (logPosition < appendPosition)
+ {
+ logReplay = consensusModuleAgent.newLogReplay(logPosition, appendPosition);
+ }
+ else
{
state(nextState, nowNs);
- workCount = 1;
}
}
else
@@ -872,50 +881,48 @@ class Election
private void state(final ElectionState newState, final long nowNs)
{
- if (newState == state)
+ if (newState != state)
{
- return;
- }
+ stateChange(state, newState, thisMember.id());
- stateChange(state, newState, thisMember.id());
+ if (CANVASS == newState)
+ {
+ resetMembers();
+ }
- if (CANVASS == newState)
- {
- resetMembers();
- }
+ if (CANVASS == state)
+ {
+ isExtendedCanvass = false;
+ }
- if (CANVASS == state)
- {
- isExtendedCanvass = false;
- }
+ switch (newState)
+ {
+ case INIT:
+ case CANVASS:
+ case NOMINATE:
+ case FOLLOWER_BALLOT:
+ case FOLLOWER_CATCHUP_TRANSITION:
+ case FOLLOWER_CATCHUP:
+ case FOLLOWER_REPLAY:
+ case FOLLOWER_TRANSITION:
+ case FOLLOWER_READY:
+ consensusModuleAgent.role(Cluster.Role.FOLLOWER);
+ break;
- switch (newState)
- {
- case INIT:
- case CANVASS:
- case NOMINATE:
- case FOLLOWER_BALLOT:
- case FOLLOWER_CATCHUP_TRANSITION:
- case FOLLOWER_CATCHUP:
- case FOLLOWER_REPLAY:
- case FOLLOWER_TRANSITION:
- case FOLLOWER_READY:
- consensusModuleAgent.role(Cluster.Role.FOLLOWER);
- break;
+ case CANDIDATE_BALLOT:
+ consensusModuleAgent.role(Cluster.Role.CANDIDATE);
+ break;
- case CANDIDATE_BALLOT:
- consensusModuleAgent.role(Cluster.Role.CANDIDATE);
- break;
+ case LEADER_TRANSITION:
+ case LEADER_READY:
+ consensusModuleAgent.role(Cluster.Role.LEADER);
+ break;
+ }
- case LEADER_TRANSITION:
- case LEADER_READY:
- consensusModuleAgent.role(Cluster.Role.LEADER);
- break;
+ state = newState;
+ ctx.electionStateCounter().setOrdered(newState.code());
+ timeOfLastStateChangeNs = nowNs;
}
-
- state = newState;
- ctx.electionStateCounter().setOrdered(newState.code());
- timeOfLastStateChangeNs = nowNs;
}
private void resetCatchup() | [Java] Clarify logic on when log replay id done from election and catch exceptions from polling the consensus stream so the election can be reset. | real-logic_aeron | train |
a4bdb3027ffc1bca960ac45fc0441502b8d24501 | diff --git a/src/Solr/Entity/JobProxy.php b/src/Solr/Entity/JobProxy.php
index <HASH>..<HASH> 100644
--- a/src/Solr/Entity/JobProxy.php
+++ b/src/Solr/Entity/JobProxy.php
@@ -49,6 +49,14 @@ class JobProxy extends AbstractIdentifiableModificationDateAwareEntity implement
}
/**
+ * @see \Jobs\Entity\JobInterface::getId()
+ */
+ public function getId()
+ {
+ return $this->getSolrResultValue('applyId') ?:$this->job->getId();
+ }
+
+ /**
* @see \Jobs\Entity\JobInterface::getApplications()
*/
public function getApplications()
diff --git a/test/SolrTest/FacetsTest.php b/test/SolrTest/FacetsTest.php
index <HASH>..<HASH> 100644
--- a/test/SolrTest/FacetsTest.php
+++ b/test/SolrTest/FacetsTest.php
@@ -239,8 +239,8 @@ class FacetsTest extends \PHPUnit_Framework_TestCase
'invalid name' => [['invalid' => []], 0],
'invalid type' => [[static::DEFINITION_NAME => 'invalid'], 0],
'empty value' => [[static::DEFINITION_NAME => []], 0],
- 'regular value' => [[static::DEFINITION_NAME => ['one' => '', 'two' => '']], 1, function ($value) {
- return strpos($value, static::DEFINITION_NAME.':(one OR two)') !== false;
+ 'regular value' => [[static::DEFINITION_NAME => ['one with spaces' => '', 'two' => '']], 1, function ($value) {
+ return strpos($value, static::DEFINITION_NAME.':("one with spaces" OR "two")') !== false;
}],
];
} | fixes unittest. Returns job id from solr search. | yawik_Solr | train |
e9e98b518d78fc7422b799576d4db05161bf83b8 | diff --git a/src/inputmask.js b/src/inputmask.js
index <HASH>..<HASH> 100644
--- a/src/inputmask.js
+++ b/src/inputmask.js
@@ -4,7 +4,7 @@ function init(Survey) {
var widget = {
name: "maskedit",
numericGroupSeparator: ",",
- numericRadixPoint: "",
+ numericRadixPoint: undefined,
numericAutoGroup: true,
numericDigits: 2,
numericDigitsOptional: false, | Fixed #<I> - InputMask - Decimal/Currency not working | surveyjs_widgets | train |
8455a8a2d2058d1daf55e9f8446a2b4a0676339e | diff --git a/paramiko/ecdsakey.py b/paramiko/ecdsakey.py
index <HASH>..<HASH> 100644
--- a/paramiko/ecdsakey.py
+++ b/paramiko/ecdsakey.py
@@ -24,7 +24,6 @@ import binascii
from hashlib import sha256
from ecdsa import SigningKey, VerifyingKey, der, curves
-from ecdsa.test_pyecdsa import ECDSA
from paramiko.common import four_byte, one_byte
from paramiko.message import Message
@@ -51,7 +50,7 @@ class ECDSAKey (PKey):
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
- self.verifying_key, self.signing_key = vals
+ self.signing_key, self.verifying_key = vals
else:
if msg is None:
raise SSHException('Key object may not be empty')
@@ -125,7 +124,7 @@ class ECDSAKey (PKey):
key = self.signing_key or self.verifying_key
self._write_private_key('EC', file_obj, key.to_der(), password)
- def generate(bits, progress_func=None):
+ def generate(curve=curves.NIST256p, progress_func=None):
"""
Generate a new private RSA key. This factory function can be used to
generate a new host key or authentication key.
@@ -138,7 +137,7 @@ class ECDSAKey (PKey):
@return: new private key
@rtype: L{RSAKey}
"""
- signing_key = ECDSA.generate()
+ signing_key = SigningKey.generate(curve)
key = ECDSAKey(vals=(signing_key, signing_key.get_verifying_key()))
return key
generate = staticmethod(generate) | fix ecdsa key generation | paramiko_paramiko | train |
9d2df34b38283af72abca8dfa828c41f581a03aa | diff --git a/src/instruments/gauge_controller.py b/src/instruments/gauge_controller.py
index <HASH>..<HASH> 100644
--- a/src/instruments/gauge_controller.py
+++ b/src/instruments/gauge_controller.py
@@ -35,6 +35,8 @@ class PressureGauge(Instrument):
ACK = chr(6) # \x06
NAK = chr(21) # \x15
+ PROBES = ['pressure', 'units']
+
def __init__(self, name='PressureGauge', parameter_list=[]):
"""
The serial connection should be setup with the following parameters:
@@ -45,7 +47,7 @@ class PressureGauge(Instrument):
super(PressureGauge, self).__init__(name, parameter_list)
self.ser = serial.Serial(port=self.port, baudrate=self.baudrate, timeout=self.timeout)
- self.probes = ['pressure', 'units']
+
@property
def parameters_default(self):
@@ -56,7 +58,7 @@ class PressureGauge(Instrument):
possible_com_ports = ['COM' + str(i) for i in range(0, 256)]
parameter_list_default = [
- Parameter('port', 'COM3', possible_com_ports, 'com port to which the gauge controller is connected'),
+ Parameter('port', 'COM4', possible_com_ports, 'com port to which the gauge controller is connected'),
Parameter('timeout', 1.0, float, 'amount of time to wait for a response from the gauge controller for each query'),
Parameter('baudrate', 9600, int, 'baudrate of serial communication with gauge')
]
diff --git a/tests/instrument_tests/test_PressureGauge.py b/tests/instrument_tests/test_PressureGauge.py
index <HASH>..<HASH> 100644
--- a/tests/instrument_tests/test_PressureGauge.py
+++ b/tests/instrument_tests/test_PressureGauge.py
@@ -21,7 +21,7 @@ class TestAGC100(TestCase):
def test_get_gauge_model(self):
model = self.gauge.model
- self.assertTrue(model == 'FRG70x')
+ self.assertEqual(model, 'FRG70x')
def test_get_units(self):
units = self.gauge.units
@@ -30,3 +30,7 @@ class TestAGC100(TestCase):
def test_is_connected(self):
self.assertTrue(self.gauge.is_connected())
+
+ def test_probe_list(self):
+ probes = self.gauge.PROBES
+ self.assertEqual(probes, ['pressure', 'units'])
\ No newline at end of file | AS: minor style changes on gauge controller, added PROBES variable, changed default params in piezocontroller to take a list of 'x' 'y' or 'z'. | LISE-B26_pylabcontrol | train |
6ed70ebbe3e0fa58ed04e51652a6d6bc2603cada | diff --git a/influxql/engine.go b/influxql/engine.go
index <HASH>..<HASH> 100644
--- a/influxql/engine.go
+++ b/influxql/engine.go
@@ -986,20 +986,47 @@ func ReducePercentile(percentile float64) ReduceFunc {
}
}
+// TODO: make this more efficient to stream data.
func MapRawQuery(itr Iterator, e *Emitter, tmin int64) {
- for k, _, v := itr.Next(); k != 0; k, _, v = itr.Next() {
- e.Emit(Key{k, itr.Tags()}, v)
+ var values []*rawQueryMapOutput
+
+ for k, d, v := itr.Next(); k != 0; k, d, v = itr.Next() {
+ values = append(values, &rawQueryMapOutput{k, d, v})
+ // Emit in batches.
+ // unbounded emission of data can lead to excessive memory use
+ // or other potential performance problems.
+ if len(values) == emitBatchSize {
+ e.Emit(Key{0, itr.Tags()}, values)
+ values = []*rawQueryMapOutput{}
+ }
+ }
+ if len(values) > 0 {
+ e.Emit(Key{0, itr.Tags()}, values)
}
}
type rawQueryMapOutput struct {
timestamp int64
+ data []byte
value interface{}
}
+type rawOutputs []*rawQueryMapOutput
+func (a rawOutputs) Len() int { return len(a) }
+func (a rawOutputs) Less(i, j int) bool { return a[i].timestamp < a[j].timestamp }
+func (a rawOutputs) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// TODO: make this streaming so it doesn't buffer everything in memory
func ReduceRawQuery(key Key, values []interface{}, e *Emitter) {
+ var a []*rawQueryMapOutput
for _, v := range values {
- e.Emit(key, v)
+ a = append(a, v.([]*rawQueryMapOutput)...)
+ }
+ if len(a) > 0 {
+ sort.Sort(rawOutputs(a))
+ for _, o := range a {
+ e.Emit(Key{o.timestamp, key.Values}, o.value)
+ }
}
} | WIP: fix for ordering of queries of raw values | influxdata_influxdb | train |
706a845b06bab872e1b58334c91415ed6692cdcb | diff --git a/src/AdminServiceProvider.php b/src/AdminServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/AdminServiceProvider.php
+++ b/src/AdminServiceProvider.php
@@ -67,7 +67,7 @@ class AdminServiceProvider extends ServiceProvider
if ($this->app->runningInConsole()) {
$this->publishes([__DIR__.'/../config' => config_path()], 'laravel-admin-config');
$this->publishes([__DIR__.'/../resources/lang' => resource_path('lang')], 'laravel-admin-lang');
-// $this->publishes([__DIR__.'/../resources/views' => resource_path('views/admin')], 'laravel-admin-views');
+// $this->publishes([__DIR__.'/../resources/views' => resource_path('views/vendor/admin')], 'laravel-admin-views');
$this->publishes([__DIR__.'/../database/migrations' => database_path('migrations')], 'laravel-admin-migrations');
$this->publishes([__DIR__.'/../resources/assets' => public_path('vendor/laravel-admin')], 'laravel-admin-assets');
} | Update AdminServiceProvider.php
fix view publish path | z-song_laravel-admin | train |
912b3b6aa92e065c48793b7bbefb24e1260c85dc | diff --git a/lib/octopress.rb b/lib/octopress.rb
index <HASH>..<HASH> 100644
--- a/lib/octopress.rb
+++ b/lib/octopress.rb
@@ -1,5 +1,6 @@
require 'mercenary'
require 'titlecase'
+require 'octopress-docs'
module Octopress
require 'octopress/configuration'
@@ -46,3 +47,8 @@ module Octopress
end
end
end
+
+Octopress::Docs.add({
+ name: "Octopress",
+ dir: File.expand_path(File.join(File.dirname(__FILE__), "../../")),
+}) | Integrated octopress docs | octopress_octopress | train |
6e84f4d0d4680d821f27565e27f68d595930c38a | diff --git a/lib/simple_notifications/version.rb b/lib/simple_notifications/version.rb
index <HASH>..<HASH> 100644
--- a/lib/simple_notifications/version.rb
+++ b/lib/simple_notifications/version.rb
@@ -1,3 +1,3 @@
module SimpleNotifications
- VERSION = '1.1.8'
+ VERSION = '1.1.9'
end | 1. Code refactored
2. Action added in notification
3. Some minor issues fixed | aashishgarg_simple_notifications | train |
e92308a50826cdf1a5a6f4e00598eacf08761106 | diff --git a/core/src/main/java/org/mobicents/ha/javax/sip/LoadBalancerHeartBeatingServiceImpl.java b/core/src/main/java/org/mobicents/ha/javax/sip/LoadBalancerHeartBeatingServiceImpl.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/mobicents/ha/javax/sip/LoadBalancerHeartBeatingServiceImpl.java
+++ b/core/src/main/java/org/mobicents/ha/javax/sip/LoadBalancerHeartBeatingServiceImpl.java
@@ -90,8 +90,18 @@ public class LoadBalancerHeartBeatingServiceImpl implements LoadBalancerHeartBea
logger = clusteredSipStack.getStackLogger();
balancers = stackProperties.getProperty(BALANCERS);
}
+
+ public void stopBalancer() {
+ stop();
+ }
public void start() {
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+ public void run() {
+ stopBalancer();
+ logger.logInfo("Shutting down the Load Balancer Link");}
+ });
+
if (!started) {
if (balancers != null && balancers.length() > 0) {
String[] balancerDescriptions = balancers.split(BALANCERS_CHAR_SEPARATOR); | Shutdown the Balancer Link ASAP. Otherwise the server will keep advertising itself during the whole shutdown, but it is not able to serve requests.
git-svn-id: <URL> | RestComm_jain-sip.ha | train |
cb0855b7a42bb1cb4c110773a2bf67edc1b50ab9 | diff --git a/reuseport/reuseport_test.go b/reuseport/reuseport_test.go
index <HASH>..<HASH> 100644
--- a/reuseport/reuseport_test.go
+++ b/reuseport/reuseport_test.go
@@ -13,7 +13,7 @@ func TestTCP4(t *testing.T) {
}
func TestTCP6(t *testing.T) {
- testNewListener(t, "tcp6", "ip6-localhost:10081", 20, 1000)
+ testNewListener(t, "tcp6", "[::1]:10082", 20, 1000)
}
func testNewListener(t *testing.T, network, addr string, serversCount, requestsCount int) { | an attempt to fix TestTCP6 on travis. It looks like travis doesnt know about `ip6-localhost`. Substitute it with [::1] | valyala_fasthttp | train |
5442a3bc298a1b4c2eb219bec40e9788eb88ab07 | diff --git a/Slim/App.php b/Slim/App.php
index <HASH>..<HASH> 100644
--- a/Slim/App.php
+++ b/Slim/App.php
@@ -16,11 +16,8 @@ use Interop\Container\ContainerInterface;
* App
*
* This is the primary class with which you instantiate,
- * configure, and run a Slim Framework application. This
- * is also a \Pimple\Container instance, meaning you can
- * register custom Pimple service providers on each
- * \Slim\App instance. The \Slim\App class also accepts
- * Slim Framework middleware.
+ * configure, and run a Slim Framework application.
+ * The \Slim\App class also accepts Slim Framework middleware.
*
* @property-read array $settings App settings
* @property-read \Slim\Interfaces\Http\EnvironmentInterface $environment | Fix method documentation to reflect recent changes
\Slim\App is not a instance of \Pimple\Container since commit <URL> | slimphp_Slim | train |
e61d2b8a9d9613b2c9a1ef63404884b00496a7ed | diff --git a/firefox/src/java/org/openqa/selenium/firefox/FirefoxProfile.java b/firefox/src/java/org/openqa/selenium/firefox/FirefoxProfile.java
index <HASH>..<HASH> 100644
--- a/firefox/src/java/org/openqa/selenium/firefox/FirefoxProfile.java
+++ b/firefox/src/java/org/openqa/selenium/firefox/FirefoxProfile.java
@@ -19,11 +19,9 @@ package org.openqa.selenium.firefox;
import java.io.BufferedReader;
import java.io.File;
-import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
-import java.io.InputStream;
import java.io.Writer;
import java.text.MessageFormat;
import java.util.HashMap;
@@ -35,7 +33,6 @@ import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.firefox.internal.ClasspathExtension;
import org.openqa.selenium.firefox.internal.FileExtension;
import org.openqa.selenium.internal.Cleanly;
-import org.openqa.selenium.internal.FileHandler;
import org.openqa.selenium.internal.TemporaryFilesystem;
import org.openqa.selenium.internal.Zip;
@@ -89,7 +86,7 @@ public class FirefoxProfile {
}
try {
- addExtension(FirefoxProfile.class, "webdriver.xpi");
+ addExtension(FirefoxProfile.class, "/webdriver.xpi");
} catch (IOException e) {
if (!Boolean.getBoolean("webdriver.development")) {
throw new WebDriverException("Failed to install webdriver extension", e);
diff --git a/firefox/src/java/org/openqa/selenium/firefox/internal/ClasspathExtension.java b/firefox/src/java/org/openqa/selenium/firefox/internal/ClasspathExtension.java
index <HASH>..<HASH> 100644
--- a/firefox/src/java/org/openqa/selenium/firefox/internal/ClasspathExtension.java
+++ b/firefox/src/java/org/openqa/selenium/firefox/internal/ClasspathExtension.java
@@ -18,10 +18,13 @@ limitations under the License.
package org.openqa.selenium.firefox.internal;
import java.io.File;
-import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URL;
+import com.google.common.io.Closeables;
+import com.google.common.io.Resources;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.internal.FileHandler;
@@ -35,28 +38,26 @@ public class ClasspathExtension implements Extension {
}
public void writeTo(File extensionsDir) throws IOException {
- // Try and load it from the classpath
- InputStream resource = loadResourcesUsing.getResourceAsStream(loadFrom);
- if (resource == null && !loadFrom.startsWith("/")) {
- resource = loadResourcesUsing.getResourceAsStream("/" + loadFrom);
- }
- if (resource == null) {
- resource = getClass().getResourceAsStream(loadFrom);
- }
- if (resource == null && !loadFrom.startsWith("/")) {
- resource = getClass().getResourceAsStream("/" + loadFrom);
- }
- if (resource == null) {
- throw new FileNotFoundException("Cannot locate resource with name: " + loadFrom);
+ if (!FileHandler.isZipped(loadFrom)) {
+ throw new WebDriverException("Will only install zipped extensions for now");
}
- File root;
- if (FileHandler.isZipped(loadFrom)) {
- root = FileHandler.unzip(resource);
- } else {
- throw new WebDriverException("Will only install zipped extensions for now");
+
+ File holdingPen = new File(extensionsDir, "webdriver-staging");
+ FileHandler.createDir(holdingPen);
+ File extractedXpi = new File(holdingPen, loadFrom);
+
+ URL resourceUrl = Resources.getResource(loadResourcesUsing, loadFrom);
+ OutputStream stream = null;
+
+ try {
+ stream = new FileOutputStream(extractedXpi);
+ Resources.copy(resourceUrl, stream);
+ } finally {
+ Closeables.closeQuietly(stream);
}
- new FileExtension(root).writeTo(extensionsDir);
+
+ new FileExtension(extractedXpi).writeTo(extensionsDir);
}
}
diff --git a/firefox/src/java/org/openqa/selenium/firefox/internal/FileExtension.java b/firefox/src/java/org/openqa/selenium/firefox/internal/FileExtension.java
index <HASH>..<HASH> 100644
--- a/firefox/src/java/org/openqa/selenium/firefox/internal/FileExtension.java
+++ b/firefox/src/java/org/openqa/selenium/firefox/internal/FileExtension.java
@@ -31,6 +31,7 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Iterator;
+import com.google.common.io.Files;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.internal.FileHandler;
import org.w3c.dom.Document;
@@ -63,9 +64,10 @@ public class FileExtension implements Extension {
throw new IOException("Unable to delete existing extension directory: " + extensionDirectory);
}
+
FileHandler.createDir(extensionDirectory);
FileHandler.makeWritable(extensionDirectory);
- FileHandler.copy(root, extensionDirectory);
+ Files.move(root, extensionDirectory);
}
private File obtainRootDirectory(File extensionToInstall) throws IOException { | SimonStewart: A little tidying of the newly introduced classes
r<I> | SeleniumHQ_selenium | train |
decf9db153f635cf0eb04f63fedd3a97c99e0e81 | diff --git a/search/query.php b/search/query.php
index <HASH>..<HASH> 100644
--- a/search/query.php
+++ b/search/query.php
@@ -178,8 +178,8 @@
if (isset($vars)) {
foreach ($vars as $key => $value) {
// htmlentities breaks non-ascii chars
- $adv->key = stripslashes($value);
- //$adv->$key = stripslashes(htmlentities($value));
+ $adv->key = $value;
+ //$adv->$key = htmlentities($value);
}
}
?>
@@ -187,7 +187,7 @@
<?php
if (!$advanced) {
?>
- <input type="text" name="query_string" length="50" value="<?php print stripslashes($query_string) ?>" />
+ <input type="text" name="query_string" length="50" value="<?php p($query_string) ?>" />
<input type="submit" value="<?php print_string('search', 'search') ?>" />
<a href="query.php?a=1"><?php print_string('advancedsearch', 'search') ?></a> |
<a href="stats.php"><?php print_string('statistics', 'search') ?></a>
@@ -310,7 +310,7 @@
print "<br />";
- print $hit_count.' '.get_string('resultsreturnedfor', 'search') . " '".stripslashes($query_string)."'.";
+ print $hit_count.' '.get_string('resultsreturnedfor', 'search') . " '".s($query_string)."'.";
print "<br />";
if ($hit_count > 0) { | MDL-<I> removing stripslahses()
MDL-<I> proper form value quoting | moodle_moodle | train |
fd5d1424533cced83658798ca988bbef5a654a67 | diff --git a/internal/restorable/image.go b/internal/restorable/image.go
index <HASH>..<HASH> 100644
--- a/internal/restorable/image.go
+++ b/internal/restorable/image.go
@@ -218,6 +218,7 @@ func (i *Image) Extend(width, height int) *Image {
newImg.clearDrawTrianglesHistory()
newImg.basePixels = i.basePixels
newImg.stale = i.stale
+ newImg.staleRegion = i.staleRegion
i.Dispose() | internal/restorable: bug fix: needed to copy the stale region when extending an image
Updates #<I> | hajimehoshi_ebiten | train |
d7f58c3bed756d3a98cf409a240be48c1b9e07f5 | diff --git a/manifest.php b/manifest.php
index <HASH>..<HASH> 100755
--- a/manifest.php
+++ b/manifest.php
@@ -41,7 +41,7 @@ return [
'author' => 'Open Assessment Technologies SA',
'requires' => [
'generis' => '>=12.15.0',
- 'tao' => '>=41.5.0'
+ 'tao' => '>=41.6.0'
],
'routes' => [
'/taoLti' => 'oat\\taoLti\\controller'
diff --git a/models/classes/Security/Business/Service/SecretKeyService.php b/models/classes/Security/Business/Service/SecretKeyService.php
index <HASH>..<HASH> 100644
--- a/models/classes/Security/Business/Service/SecretKeyService.php
+++ b/models/classes/Security/Business/Service/SecretKeyService.php
@@ -45,14 +45,4 @@ final class SecretKeyService extends InjectionAwareService implements SecretKeyS
throw SecretKeyGenerationException::create($exception);
}
}
-
- /**
- * @inheritDoc
- */
- protected function getDependencies(): array
- {
- return [
- $this->length,
- ];
- }
} | TBD-<I> chore: remove `SecretKeyService::getDependencies()`
TBD-<I> chore(dependency): require tao `<I>`, which implemented `InjectionAwareService` dependencies auto-detection | oat-sa_extension-tao-lti | train |
eb12d9a525288b1e46bcbc968663d149358f809d | diff --git a/lib/core/seleniumRunner.js b/lib/core/seleniumRunner.js
index <HASH>..<HASH> 100644
--- a/lib/core/seleniumRunner.js
+++ b/lib/core/seleniumRunner.js
@@ -213,23 +213,13 @@ class SeleniumRunner {
});
});
})();`;
-
- const startUri = await driver.executeScript(
- 'return document.documentURI;'
- );
+ // Navigate to the page
await driver.executeScript(navigate);
- // Wait max 50s on navigation
- for (let i = 0; i < 100; i++) {
- await delay(500);
- const newUri = await driver.executeScript(
- 'return document.documentURI;'
- );
- // When the URI changed or of we are on the same page
- // see https://github.com/sitespeedio/browsertime/pull/623
- if (startUri != newUri || url === newUri) {
- break;
- }
- }
+ // If you run with default settings, the webdriver will give back control after
+ // the onload event. But you can change that with the --pageLoadStrategy none
+ // Then you will be in control immediately and therefore wanna wait some extra time
+ // befor you start to run your page complete check
+ await delay(this.options.pageCompleteCheckStartWait || 500);
await this.wait(pageCompleteCheck);
} catch (e) {
log.error('Could not load URL' + e);
diff --git a/lib/support/cli.js b/lib/support/cli.js
index <HASH>..<HASH> 100644
--- a/lib/support/cli.js
+++ b/lib/support/cli.js
@@ -487,6 +487,12 @@ module.exports.parseCommandLine = function parseCommandLine() {
describe:
'The time in ms to wait for running the page complete check the next time.'
})
+ .option('pageCompleteCheckStartWait', {
+ type: 'number',
+ default: 500,
+ describe:
+ 'The time in ms to wait for running the page complete check for the first time. Use this when you have a pageLoadStrategy set to none'
+ })
.option('pageLoadStrategy', {
type: 'string',
default: 'normal', | Remove and simplify old code when running with pageLoadStrategy none (#<I>)
* Remove and simplify old code when running with pageLoadStrategy none
We had some real old code that didn't add any value. Instead we now
have a simple setup when you run as default, and a sane configurable
setup when you run with a strategy set to none.
* keep minimum wait time to <I> ms (as before) | sitespeedio_browsertime | train |
4d698caaf5bb78dc9e7feb071bb816dfbd7f38a7 | diff --git a/src/Storage/Mapping/MetadataDriver.php b/src/Storage/Mapping/MetadataDriver.php
index <HASH>..<HASH> 100644
--- a/src/Storage/Mapping/MetadataDriver.php
+++ b/src/Storage/Mapping/MetadataDriver.php
@@ -399,6 +399,10 @@ class MetadataDriver implements MappingDriver
$type = get_class($column->getType());
}
+ if ($column->getName() === 'slug') {
+ $type = 'slug';
+ }
+
if ($type === 'select' && isset($this->contenttypes[$name]['fields'][$column->getName()]['multiple']) && $this->contenttypes[$name]['fields'][$column->getName()]['multiple'] === true) {
$type = 'selectmultiple';
} | Ensure slug column is always of type slug | bolt_bolt | train |
27ebe660cc85b6fffe7de4c16b8333bd9c7ce21d | diff --git a/src/main/java/com/github/joelittlejohn/embedmongo/StartMojo.java b/src/main/java/com/github/joelittlejohn/embedmongo/StartMojo.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/joelittlejohn/embedmongo/StartMojo.java
+++ b/src/main/java/com/github/joelittlejohn/embedmongo/StartMojo.java
@@ -132,7 +132,7 @@ public class StartMojo extends AbstractEmbeddedMongoMojo {
*
* @since 0.3.4
*/
- @Parameter(property = "embedmongo.storageEngine", defaultValue = "wiredTiger")
+ @Parameter(property = "embedmongo.storageEngine", defaultValue = "mmapv1")
private String storageEngine;
@Component | Change default storage engine to mmapv1 (to match mongod) | joelittlejohn_embedmongo-maven-plugin | train |
92dc146c2a9a021f45a7d37da13868e93fd0bc0c | diff --git a/rest_framework_nested/routers.py b/rest_framework_nested/routers.py
index <HASH>..<HASH> 100644
--- a/rest_framework_nested/routers.py
+++ b/rest_framework_nested/routers.py
@@ -69,8 +69,6 @@ class NestedSimpleRouter(SimpleRouter):
route_contents = route._asdict()
route_contents['url'] = route.url.replace('^', '^'+self.parent_regex)
- if 'mapping' not in route_contents:
- route_contents['mapping'] = {}
- nested_routes.append(rest_framework.routers.Route(**route_contents))
+ nested_routes.append(type(route)(**route_contents))
self.routes = nested_routes | Don't check mapping, use route type to recreate routes | alanjds_drf-nested-routers | train |
508ba4a02235c11a2923249903af3822f0811313 | diff --git a/analysis/src/main/java/com/buschmais/jqassistant/core/analysis/impl/RuleSetWriterImpl.java b/analysis/src/main/java/com/buschmais/jqassistant/core/analysis/impl/RuleSetWriterImpl.java
index <HASH>..<HASH> 100644
--- a/analysis/src/main/java/com/buschmais/jqassistant/core/analysis/impl/RuleSetWriterImpl.java
+++ b/analysis/src/main/java/com/buschmais/jqassistant/core/analysis/impl/RuleSetWriterImpl.java
@@ -1,5 +1,7 @@
package com.buschmais.jqassistant.core.analysis.impl;
+import static com.buschmais.jqassistant.core.analysis.api.rule.Constraint.DEFAULT_SEVERITY;
+
import java.io.Writer;
import java.util.Collection;
import java.util.Map;
@@ -19,6 +21,7 @@ import com.buschmais.jqassistant.core.analysis.api.rule.Concept;
import com.buschmais.jqassistant.core.analysis.api.rule.Constraint;
import com.buschmais.jqassistant.core.analysis.api.rule.Group;
import com.buschmais.jqassistant.core.analysis.api.rule.RuleSet;
+import com.buschmais.jqassistant.core.analysis.api.rule.Severity;
import com.buschmais.jqassistant.core.analysis.rules.schema.v1.ConceptType;
import com.buschmais.jqassistant.core.analysis.rules.schema.v1.ConstraintType;
import com.buschmais.jqassistant.core.analysis.rules.schema.v1.GroupType;
@@ -128,9 +131,9 @@ public class RuleSetWriterImpl implements RuleSetWriter {
groupType.getIncludeConcept().add(conceptReferenceType);
}
for (Constraint includeConstraint : group.getConstraints()) {
- IncludedConstraintType includedConstraintType = new IncludedConstraintType();
+ IncludedConstraintType includedConstraintType = new IncludedConstraintType();
includedConstraintType.setRefId(includeConstraint.getId());
- includedConstraintType.setSeverity(SeverityEnumType.fromValue(includeConstraint.getSeverity().getValue()));
+ includedConstraintType.setSeverity(getSeverity(includeConstraint.getSeverity()));
groupType.getIncludeConstraint().add(includedConstraintType);
}
rules.getQueryDefinitionOrConceptOrConstraint().add(groupType);
@@ -157,7 +160,7 @@ public class RuleSetWriterImpl implements RuleSetWriter {
ConstraintType constraintType = new ConstraintType();
constraintType.setId(constraint.getId());
constraintType.setDescription(constraint.getDescription());
- constraintType.setSeverity(SeverityEnumType.fromValue(constraint.getSeverity().getValue()));
+ constraintType.setSeverity(getSeverity(constraint.getSeverity()));
constraintType.setCypher(constraint.getQuery().getCypher());
for (Concept requiresConcept : constraint.getRequiresConcepts()) {
ReferenceType conceptReferenceType = new ReferenceType();
@@ -167,4 +170,18 @@ public class RuleSetWriterImpl implements RuleSetWriter {
rules.getQueryDefinitionOrConceptOrConstraint().add(constraintType);
}
}
+
+ /**
+ * Converts {@link Severity} to {@link SeverityEnumType}
+ *
+ * @param severity
+ * {@link Severity}
+ * @return {@link SeverityEnumType}
+ */
+ private SeverityEnumType getSeverity(Severity severity) {
+ if (severity == null) {
+ severity = DEFAULT_SEVERITY;
+ }
+ return SeverityEnumType.fromValue(severity.getValue());
+ }
} | #<I> severity support in sonar extension | buschmais_jqa-core-framework | train |
47d16da2f2533f74aea576247380ba7bd566a0b5 | diff --git a/lib/sinatra/param.rb b/lib/sinatra/param.rb
index <HASH>..<HASH> 100644
--- a/lib/sinatra/param.rb
+++ b/lib/sinatra/param.rb
@@ -37,19 +37,13 @@ module Sinatra
end
def one_of(*names)
- count = 0
- names.each do |name|
- if params[name] and present?(params[name])
- count += 1
- next unless count > 1
-
- error = "Parameters #{names.join(', ')} are mutually exclusive"
- if content_type and content_type.match(mime_type(:json))
- error = {message: error}.to_json
- end
-
- halt 400, error
+ if names.count{|name| params[name] and present?(params[name])} > 1
+ error = "Parameters #{names.join(', ')} are mutually exclusive"
+ if content_type and content_type.match(mime_type(:json))
+ error = {message: error}.to_json
end
+
+ halt 400, error
end
end | Refactoring implementation of one_of | mattt_sinatra-param | train |
aa82d3da7e3a0f90c8ceab5c6c9c094cd6877786 | diff --git a/src/org/jgroups/protocols/TP.java b/src/org/jgroups/protocols/TP.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/protocols/TP.java
+++ b/src/org/jgroups/protocols/TP.java
@@ -43,7 +43,7 @@ import java.util.concurrent.locks.ReentrantLock;
* The {@link #receive(Address, Address, byte[], int, int)} method must
* be called by subclasses when a unicast or multicast message has been received.
* @author Bela Ban
- * @version $Id: TP.java,v 1.105 2007/01/06 23:32:18 belaban Exp $
+ * @version $Id: TP.java,v 1.106 2007/01/06 23:41:17 belaban Exp $
*/
@SuppressWarnings("unchecked") // todo: remove once all unchecked use has been converted into checked use
public abstract class TP extends Protocol {
@@ -98,8 +98,7 @@ public abstract class TP extends Protocol {
final ExposedByteArrayInputStream in_stream=new ExposedByteArrayInputStream(new byte[]{'0'});
- final ExposedBufferedInputStream buf_in_stream=new ExposedBufferedInputStream(in_stream);
- final DataInputStream dis=new DataInputStream(buf_in_stream);
+ final DataInputStream dis=new DataInputStream(in_stream);
/** If true, messages sent to self are treated specially: unicast messages are
@@ -1059,7 +1058,6 @@ public abstract class TP extends Protocol {
try {
synchronized(in_stream) {
in_stream.setData(data, offset, length);
- buf_in_stream.reset(length);
version=dis.readShort();
if(Version.compareTo(version) == false) {
if(warn) {
@@ -1163,20 +1161,17 @@ public abstract class TP extends Protocol {
}
ExposedByteArrayOutputStream out_stream=null;
- ExposedBufferedOutputStream buf_out_stream=null;
ExposedDataOutputStream dos=null;
Buffer buf;
try {
out_stream=new ExposedByteArrayOutputStream(1024);
- buf_out_stream=new ExposedBufferedOutputStream(out_stream, 1024);
- dos=new ExposedDataOutputStream(buf_out_stream);
+ dos=new ExposedDataOutputStream(out_stream);
writeMessage(msg, dos, multicast);
dos.flush();
buf=new Buffer(out_stream.getRawBuffer(), 0, out_stream.size());
}
finally {
Util.close(dos);
- Util.close(buf_out_stream);
Util.close(out_stream);
}
doSend(buf, dest, multicast);
@@ -1503,13 +1498,11 @@ public abstract class TP extends Protocol {
boolean is_message_list, multicast;
byte flags;
ExposedByteArrayInputStream in_stream=null;
- ExposedBufferedInputStream buf_in_stream=null;
DataInputStream dis=null;
try {
in_stream=new ExposedByteArrayInputStream(buf, offset, length);
- buf_in_stream=new ExposedBufferedInputStream(in_stream);
- dis=new DataInputStream(buf_in_stream);
+ dis=new DataInputStream(in_stream);
version=dis.readShort();
if(Version.compareTo(version) == false) {
if(warn) {
@@ -1550,7 +1543,6 @@ public abstract class TP extends Protocol {
}
finally {
Util.close(dis);
- Util.close(buf_in_stream);
Util.close(in_stream);
}
}
@@ -1808,8 +1800,7 @@ public abstract class TP extends Protocol {
Address dst;
ExposedByteArrayOutputStream out_stream=new ExposedByteArrayOutputStream(1024);
- ExposedBufferedOutputStream buf_out_stream=new ExposedBufferedOutputStream(out_stream, 1024);
- ExposedDataOutputStream dos=new ExposedDataOutputStream(buf_out_stream);
+ ExposedDataOutputStream dos=new ExposedDataOutputStream(out_stream);
for(Iterator it=msgs.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
@@ -1821,7 +1812,6 @@ public abstract class TP extends Protocol {
synchronized(out_stream) {
try {
out_stream.reset();
- buf_out_stream.reset(out_stream.getCapacity());
dos.reset();
writeMessageList(list, dos, multicast); // flushes output stream when done
dos.flush(); | removed buffered input/output streams - not needed when we write to or read from byte[] buffers ! They all maintain their own byte[] buf copy too ! | belaban_JGroups | train |
0472e7167616fd3bdfbf3155930bee35a4dfc5d9 | diff --git a/app/Controllers/AdminCollectionController.php b/app/Controllers/AdminCollectionController.php
index <HASH>..<HASH> 100644
--- a/app/Controllers/AdminCollectionController.php
+++ b/app/Controllers/AdminCollectionController.php
@@ -26,7 +26,7 @@ class AdminCollectionController extends AdminBaseController
// Fetch collections and page details
$data['collections'] = $collectionMapper->find();
- $data['collectionDetails'] = $pageMapper->findCollectionPages(false);
+ $data['collectionDetails'] = $pageMapper->findCollections();
return $this->render('collections.html', $data);
}
diff --git a/app/Library/Twig/Front.php b/app/Library/Twig/Front.php
index <HASH>..<HASH> 100644
--- a/app/Library/Twig/Front.php
+++ b/app/Library/Twig/Front.php
@@ -78,7 +78,6 @@ class Front extends Base
return $this->container->view->fetch("elements/{$element->template}", ['data' => $element]);
}
-
/**
* Get Collection Page List
*
@@ -93,9 +92,8 @@ class Front extends Base
$pageMapper = $dataMapper('pageMapper');
// Page Collection
- $data = $pageMapper->findCollectionPages($collectionId);
+ $data = $pageMapper->findCollectionPagesById($collectionId);
return $data;
}
-
}
diff --git a/app/Models/PageMapper.php b/app/Models/PageMapper.php
index <HASH>..<HASH> 100644
--- a/app/Models/PageMapper.php
+++ b/app/Models/PageMapper.php
@@ -73,15 +73,30 @@ class PageMapper extends DataMapperAbstract
}
/**
- * Find All Collection Pages
+ * Find All Collections
*
- * Finds all collection pages, does not include element data
- * @param bool $published Filter on published collection pages
- * @return mixed Array | null
+ * @return mixed Array | null
+ */
+ public function findCollections()
+ {
+ $this->makeCollectionPageSelect();
+
+ return $this->find();
+ }
+
+ /**
+ * Find Collection Pages by ID
+ *
+ * Finds all collection pages
+ * @param int $collectionId
+ * @param bool $published Filter on published collection pages
+ * @return mixed Array | null
*/
- public function findCollectionPages($published = true)
+ public function findCollectionPagesById($collectionId, $published = true)
{
$this->makeCollectionPageSelect();
+ $this->sql .= ' and c.id = ?';
+ $this->bindValues[] = $collectionId;
if ($published) {
$this->sql .= " and p.published_date <= '{$this->today()}'";
@@ -91,9 +106,9 @@ class PageMapper extends DataMapperAbstract
}
/**
- * Find Published Collection Page Detail By Slug
+ * Find Published Collection Page By Slug
*
- * Finds collection page, does not include element data
+ * Finds collection page by collection slug and page slug
* @param bool $published Filter on published collection pages
* @return mixed Array | null
*/ | Fixed issue in getting collection pages by ID in PageMapper. | PitonCMS_Engine | train |
73c3d51f33926e562a19b647cc09d653d0d91d5a | diff --git a/lib/zbar/lib.rb b/lib/zbar/lib.rb
index <HASH>..<HASH> 100644
--- a/lib/zbar/lib.rb
+++ b/lib/zbar/lib.rb
@@ -38,9 +38,12 @@ module ZBar
attach_function :zbar_processor_create, [:int], :pointer
attach_function :zbar_processor_destroy, [:pointer], :void
attach_function :zbar_processor_init, [:pointer, :string, :int], :int
+ attach_function :zbar_processor_set_config, [:pointer, :int, :int, :int], :int
attach_function :zbar_process_image, [:pointer, :pointer], :int
+ attach_function :zbar_parse_config, [:string, :pointer, :pointer, :pointer], :int
+
attach_function :zbar_set_verbosity, [:int], :void
attach_function :zbar_get_symbol_name, [:int], :string
attach_function :zbar_get_addon_name, [:int], :string
diff --git a/lib/zbar/processor.rb b/lib/zbar/processor.rb
index <HASH>..<HASH> 100644
--- a/lib/zbar/processor.rb
+++ b/lib/zbar/processor.rb
@@ -1,9 +1,17 @@
module ZBar
class Processor
# Create a new processor.
- def initialize(threads = 0)
+ # Accepts a hash of configurations
+ def initialize(config = nil)
+ # This function used to accept an integer refering to the number of threads
+ if config.kind_of?(Fixnum)
+ config = { :threads => config }
+ end
+
+ config ||= {}
+
@processor = FFI::AutoPointer.new(
- ZBar.zbar_processor_create(threads),
+ ZBar.zbar_processor_create(config[:threads] || 0),
ZBar.method(:zbar_processor_destroy)
)
@@ -11,8 +19,76 @@ module ZBar
ZBar._zbar_error_spew(@processor, 0)
raise "error!"
end
+
+ config.each { |key,value|
+ if key == :threads
+ # do nothing; we handled this above
+ else
+ setter = "#{key}=".to_sym
+ if respond_to?(setter)
+ send(setter, value)
+ else
+ raise ArgumentError, "unsupported configuration option: #{key}"
+ end
+ end
+ }
+ end
+
+ # Indicates that this processor should only search for the indicated symbologies.
+ # Accepts an array of the following symbologies, specified as a string or symbol:
+ # - qrcode
+ # - upca
+ # - upce
+ # - ean13
+ # - ean8
+ # - i25
+ # - scanner
+ # - isbn13
+ # - isbn10
+ # - code39
+ # - pdf417
+ # - code128
+ def symbologies=(symbologies)
+ self.zbar_config = ["disable"] + symbologies.map { |sym| "#{sym}.enable" }
+ true
+ end
+
+ # Indicates that this processor should only search for the indicated symbology.
+ # See #symbologies= for details.
+ def symbology=(symbology)
+ self.symbologies = [symbology]
+ end
+
+ # Configures this processor using the specified configuration string. See
+ # zbar_parse_config (zbar/config.c) for supported values.
+ def zbar_config=(config_value)
+ case config_value
+ when String
+ symbology = FFI::MemoryPointer.new :int
+ config_t = FFI::MemoryPointer.new :int
+ value = FFI::MemoryPointer.new :int
+
+ if ZBar.zbar_parse_config(config_value, symbology, config_t, value) == 0
+ # parsed successfully
+ if ZBar.zbar_processor_set_config(@processor, symbology.read_int, config_t.read_int, value.read_int) == 0
+ true
+ else
+ raise ArgumentError, "config string #{config_value.inspect} parsed but could not be set"
+ end
+ else
+ raise ArgumentError, "config string #{config_value.inspect} was not recognized"
+ end
+
+ when Enumerable
+ config_value.each { |value|
+ self.zbar_config = value
+ }
+
+ else
+ raise ArgumentError, "unsupported config value"
+ end
end
-
+
# Attempt to recognize barcodes in this image. Raises an exception if ZBar
# signals an error, otherwise returns an array of ZBar::Symbol objects.
def process(image) | Add ZBar::Processor#zbar_config= and associated functions | willglynn_ruby-zbar | train |
c214431f0ea047289bd7d4180283521e1f6ff80d | diff --git a/src/utils.js b/src/utils.js
index <HASH>..<HASH> 100644
--- a/src/utils.js
+++ b/src/utils.js
@@ -28,9 +28,6 @@ var getLongestTransitionOrAnimationTime = function( el ){
var delay, duration, subTotals;
delay = getComputedStyle( el )[compat.prefixed(cssType + "Delay")]
- if (!delay) {
- return [0];
- }
delay = delay.split(',').map(parseFloat);
duration = getComputedStyle( el )[compat.prefixed(cssType + "Duration")]
duration = duration.split(',').map(parseFloat); | Revert Return early from getLongestTransitionOrAnimationTime for browsers that don't support css transitions. Default to no delay. | weareoffsider_css-driven | train |
9ef0ddb248da65025c6e95f88480d912468f8623 | diff --git a/piplicenses.py b/piplicenses.py
index <HASH>..<HASH> 100644
--- a/piplicenses.py
+++ b/piplicenses.py
@@ -139,8 +139,9 @@ def get_packages(args):
pkg.project_name.replace("-", "_"), pkg.version)
patterns = []
[patterns.extend(sorted(glob.glob(os.path.join(pkg.location,
- pkg_dirname,
- f)))) for f in file_names]
+ pkg_dirname,
+ f))))
+ for f in file_names]
for test_file in patterns:
if os.path.exists(test_file):
included_file = test_file | Fix errors from pycodestyle | raimon49_pip-licenses | train |
bef7bbdee2664afbbb8f607a92c800dc8eee147d | diff --git a/sacred/observers/s3_observer.py b/sacred/observers/s3_observer.py
index <HASH>..<HASH> 100644
--- a/sacred/observers/s3_observer.py
+++ b/sacred/observers/s3_observer.py
@@ -2,8 +2,6 @@ import json
import os
import os.path
-from botocore.errorfactory import ClientError
-
from sacred.commandline_options import cli_option
from sacred.dependencies import get_digest
from sacred.observers.base import RunObserver
@@ -126,6 +124,8 @@ class S3Observer(RunObserver):
return len(all_keys) > 0
def _bucket_exists(self):
+ from botocore.errorfactory import ClientError
+
try:
self.s3.meta.client.head_bucket(Bucket=self.bucket)
except ClientError as er:
diff --git a/tests/test_observers/test_s3_observer.py b/tests/test_observers/test_s3_observer.py
index <HASH>..<HASH> 100644
--- a/tests/test_observers/test_s3_observer.py
+++ b/tests/test_observers/test_s3_observer.py
@@ -1,8 +1,6 @@
#!/usr/bin/env python
# coding=utf-8
-from moto import mock_s3
-
import datetime
import os
import pytest
@@ -12,8 +10,9 @@ from sacred.observers import S3Observer
import tempfile
import hashlib
-import boto3
-from botocore.exceptions import ClientError
+moto = pytest.importorskip("moto")
+boto3 = pytest.importorskip("boto3")
+pytest.importorskip("botocore")
T1 = datetime.datetime(1999, 5, 4, 3, 2, 1, 0)
T2 = datetime.datetime(1999, 5, 5, 5, 5, 5, 5)
@@ -72,6 +71,8 @@ def tmpfile():
def _bucket_exists(bucket_name):
+ from botocore.exceptions import ClientError
+
s3 = boto3.resource("s3")
try:
s3.meta.client.head_bucket(Bucket=bucket_name)
@@ -96,7 +97,7 @@ def _get_file_data(bucket_name, key):
return s3.Object(bucket_name, key).get()["Body"].read()
-@mock_s3
[email protected]_s3
def test_fs_observer_started_event_creates_bucket(observer, sample_run):
_id = observer.started_event(**sample_run)
run_dir = s3_join(BASEDIR, str(_id))
@@ -121,7 +122,7 @@ def test_fs_observer_started_event_creates_bucket(observer, sample_run):
}
-@mock_s3
[email protected]_s3
def test_fs_observer_started_event_increments_run_id(observer, sample_run):
_id = observer.started_event(**sample_run)
_id2 = observer.started_event(**sample_run)
@@ -138,7 +139,7 @@ def test_s3_observer_equality():
assert obs_one != different_bucket
-@mock_s3
[email protected]_s3
def test_raises_error_on_duplicate_id_directory(observer, sample_run):
observer.started_event(**sample_run)
sample_run["_id"] = 1
@@ -146,7 +147,7 @@ def test_raises_error_on_duplicate_id_directory(observer, sample_run):
observer.started_event(**sample_run)
-@mock_s3
[email protected]_s3
def test_completed_event_updates_run_json(observer, sample_run):
observer.started_event(**sample_run)
run = json.loads(
@@ -164,7 +165,7 @@ def test_completed_event_updates_run_json(observer, sample_run):
assert run["status"] == "COMPLETED"
-@mock_s3
[email protected]_s3
def test_interrupted_event_updates_run_json(observer, sample_run):
observer.started_event(**sample_run)
run = json.loads(
@@ -182,7 +183,7 @@ def test_interrupted_event_updates_run_json(observer, sample_run):
assert run["status"] == "SERVER_EXPLODED"
-@mock_s3
[email protected]_s3
def test_failed_event_updates_run_json(observer, sample_run):
observer.started_event(**sample_run)
run = json.loads(
@@ -200,7 +201,7 @@ def test_failed_event_updates_run_json(observer, sample_run):
assert run["status"] == "FAILED"
-@mock_s3
[email protected]_s3
def test_queued_event_updates_run_json(observer, sample_run):
del sample_run["start_time"]
sample_run["queue_time"] = T2
@@ -213,7 +214,7 @@ def test_queued_event_updates_run_json(observer, sample_run):
assert run["status"] == "QUEUED"
-@mock_s3
[email protected]_s3
def test_artifact_event_works(observer, sample_run, tmpfile):
observer.started_event(**sample_run)
observer.artifact_event("test_artifact.py", tmpfile.name)
diff --git a/tox.ini b/tox.ini
index <HASH>..<HASH> 100644
--- a/tox.ini
+++ b/tox.ini
@@ -57,8 +57,7 @@ basepython = python
deps =
pytest==4.3.0
mock==2.0.0
- moto==1.3.13
-commands =
+commands =
pytest {posargs}
[testenv:flake8] | Removed boto as dependency. (#<I>)
Removed boto as mandatory dependency. | IDSIA_sacred | train |
45ba5bca59948626086d60b61f8ba3687d2fea56 | diff --git a/eureka-client/src/main/java/com/netflix/discovery/InstanceInfoReplicator.java b/eureka-client/src/main/java/com/netflix/discovery/InstanceInfoReplicator.java
index <HASH>..<HASH> 100644
--- a/eureka-client/src/main/java/com/netflix/discovery/InstanceInfoReplicator.java
+++ b/eureka-client/src/main/java/com/netflix/discovery/InstanceInfoReplicator.java
@@ -74,21 +74,26 @@ class InstanceInfoReplicator implements Runnable {
public boolean onDemandUpdate() {
if (rateLimiter.acquire(burstSize, allowedRatePerMinute)) {
- scheduler.submit(new Runnable() {
- @Override
- public void run() {
- logger.debug("Executing on-demand update of local InstanceInfo");
-
- Future latestPeriodic = scheduledPeriodicRef.get();
- if (latestPeriodic != null && !latestPeriodic.isDone()) {
- logger.debug("Canceling the latest scheduled update, it will be rescheduled at the end of on demand update");
- latestPeriodic.cancel(false);
+ if (!scheduler.isShutdown()) {
+ scheduler.submit(new Runnable() {
+ @Override
+ public void run() {
+ logger.debug("Executing on-demand update of local InstanceInfo");
+
+ Future latestPeriodic = scheduledPeriodicRef.get();
+ if (latestPeriodic != null && !latestPeriodic.isDone()) {
+ logger.debug("Canceling the latest scheduled update, it will be rescheduled at the end of on demand update");
+ latestPeriodic.cancel(false);
+ }
+
+ InstanceInfoReplicator.this.run();
}
-
- InstanceInfoReplicator.this.run();
- }
- });
- return true;
+ });
+ return true;
+ } else {
+ logger.warn("Ignoring onDemand update due to stopped scheduler");
+ return false;
+ }
} else {
logger.warn("Ignoring onDemand update due to rate limiter");
return false; | Verify is scheduler is active before submit
Fixes gh-<I>. | Netflix_eureka | train |
Subsets and Splits