hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
c34089a78edc65cd78f5f8e7d4a55bd8cff4ad0d | diff --git a/tests/test_validator.py b/tests/test_validator.py
index <HASH>..<HASH> 100755
--- a/tests/test_validator.py
+++ b/tests/test_validator.py
@@ -186,8 +186,7 @@ def test_validate(files_source):
def test_validate_sample_factors(files_source):
mwfile = next(mwtab.read_files(files_source))
validation_errors = validate_file(mwfile)
- print(validation_errors[0])
- pass
+ print(vars(validation_errors[0]))
def test_validate_metabolites(): | Begins adding unit tests for validation.py. | MoseleyBioinformaticsLab_mwtab | train |
0620d3c3fd0cb1ba832643c0b9b6c3377e61981c | diff --git a/promise.js b/promise.js
index <HASH>..<HASH> 100644
--- a/promise.js
+++ b/promise.js
@@ -149,18 +149,17 @@
this.queue = [];
}
PendingPromise.prototype.resolve = function(deferred, onFulfilled, onRejected) {
- this.queue.push([
+ this.queue.push(
deferred,
isFunction(onFulfilled) ? onFulfilled : identity,
isFunction(onRejected) ? onRejected : rejectIdentity
- ]);
+ );
return deferred.promise;
};
PendingPromise.prototype.resolveQueued = function(promise) {
- var queue = this.queue, tuple;
- for (var i = 0, l = queue.length; i < l; i++) {
- tuple = queue[i];
- promise.resolve(tuple[0], tuple[1], tuple[2]);
+ var queue = this.queue;
+ for (var i = 0, l = queue.length; i < l; i += 3) {
+ promise.resolve(queue[i], queue[i + 1], queue[i + 2]);
}
}; | Have a PendingPromise use a flat array. | jridgewell_PJs | train |
4b39d6db0b320de2ba73d82458389f167a8b561c | diff --git a/lib/get-classes-from-bulma.js b/lib/get-classes-from-bulma.js
index <HASH>..<HASH> 100644
--- a/lib/get-classes-from-bulma.js
+++ b/lib/get-classes-from-bulma.js
@@ -3,9 +3,8 @@ const { readFileSync } = require('fs')
const globby = require('globby')
const uniq = require('lodash.uniq')
-module.exports = () => {
- const srcDir = join(__dirname, '../node_modules/bulma')
- const srcGlob = join(srcDir, 'sass/**/*.sass')
+module.exports = bulmaPath => {
+ const srcGlob = join(bulmaPath, 'sass/**/*.sass')
const srcFiles = globby.sync(srcGlob).filter(srcFile => {
return (
basename(srcFile).charAt(0) !== '_' &&
diff --git a/lib/make-slim-build.js b/lib/make-slim-build.js
index <HASH>..<HASH> 100644
--- a/lib/make-slim-build.js
+++ b/lib/make-slim-build.js
@@ -11,7 +11,7 @@ const getClassesFromTemplate = require('./get-classes-from-template')
module.exports = (options = {}) => {
const srcFiles = globby.sync(options.srcGlobs)
const bulmaDir = getBulmaPath(options.rootDir)
- const bulmaClasses = getClassesFromBulma()
+ const bulmaClasses = getClassesFromBulma(bulmaDir)
const rawClasses = srcFiles.map(srcFile => {
const content = readFileSync(srcFile, 'utf8')
const { template } = compiler.parseComponent(content)
diff --git a/package-lock.json b/package-lock.json
index <HASH>..<HASH> 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,6 +1,6 @@
{
"name": "nuxt-bulma-slim",
- "version": "0.1.0",
+ "version": "0.1.1",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "nuxt-bulma-slim",
- "version": "0.1.1",
+ "version": "0.1.4",
"description": "Nuxt.js module to automatically make a slim Bulma build of used features",
"license": "MIT",
"author": {
diff --git a/test/get-classes-from-bulma.test.js b/test/get-classes-from-bulma.test.js
index <HASH>..<HASH> 100644
--- a/test/get-classes-from-bulma.test.js
+++ b/test/get-classes-from-bulma.test.js
@@ -1,8 +1,10 @@
+const { join } = require('path')
const getClassesFromBulma = require('../lib/get-classes-from-bulma')
describe('Get Classes From Bulma', () => {
it('should return all the defined classes', () => {
- const classes = getClassesFromBulma()
+ const bulmaPath = join(__dirname, '../node_modules/bulma')
+ const classes = getClassesFromBulma(bulmaPath)
expect(classes).toHaveLength(30) | use correct bulma path when extracting classes, fixed tests | mustardamus_nuxt-bulma-slim | train |
915c94922e5aa06bf5ccfaa8177a3d660481d897 | diff --git a/src/Sendy.php b/src/Sendy.php
index <HASH>..<HASH> 100644
--- a/src/Sendy.php
+++ b/src/Sendy.php
@@ -106,6 +106,8 @@ class Sendy {
{
$return_options = array(
'list' => $this->list_id,
+ //Passing list_id too, because old API calls use list, new ones use list_id
+ 'list_id' => $this->list_id, # ¯\_(ツ)_/¯
'api_key' => $this->api_key,
'boolean' => 'true'
); | old api was using list... new calls use list_id added both... :D | hocza_sendy-laravel | train |
3fb02e3d1c7aa47f050acfa53e4a1d804eb8660b | diff --git a/Branch-SDK/src/io/branch/referral/network/BranchRemoteInterface.java b/Branch-SDK/src/io/branch/referral/network/BranchRemoteInterface.java
index <HASH>..<HASH> 100644
--- a/Branch-SDK/src/io/branch/referral/network/BranchRemoteInterface.java
+++ b/Branch-SDK/src/io/branch/referral/network/BranchRemoteInterface.java
@@ -27,7 +27,10 @@ import io.branch.referral.ServerResponse;
* </p>
*/
public abstract class BranchRemoteInterface {
- private static final String BRANCH_KEY = "branch_key";
+ /**
+ * Key for adding retry numbers for the request. This will help better network issue analysis and debugging.
+ * Add the retry number to the GET Request as a query param and as a JSon key value for post
+ */
public static final String RETRY_NUMBER = "retryNumber";
//----------- Abstract methods-----------------------//
@@ -43,6 +46,8 @@ public abstract class BranchRemoteInterface {
* BranchRemoteException contains the corresponding BranchError code for the error {@link BranchError#ERR_BRANCH_NO_CONNECTIVITY } | {@link BranchError#ERR_BRANCH_REQ_TIMED_OUT}
* @see {@link io.branch.referral.network.BranchRemoteInterface.BranchRemoteException}
* {@link io.branch.referral.network.BranchRemoteInterface.BranchResponse}
+ *
+ * NOTE: For better debugging purpose conside adding {@link #RETRY_NUMBER} as a query params if you implement multiple retries for your request
* </p>
*/
public abstract BranchResponse doRestfulGet(String url) throws BranchRemoteException;
@@ -59,6 +64,8 @@ public abstract class BranchRemoteInterface {
* BranchRemoteException contains the corresponding BranchError code for the error {@link BranchError#ERR_BRANCH_NO_CONNECTIVITY } | {@link BranchError#ERR_BRANCH_REQ_TIMED_OUT}
* @see {@link io.branch.referral.network.BranchRemoteInterface.BranchRemoteException}
* {@link io.branch.referral.network.BranchRemoteInterface.BranchResponse}
+ *
+ * NOTE: For better debugging purpose conside adding {@link #RETRY_NUMBER} as a JSon keyvalue if you implement multiple retries for your request
* </p>
*/
public abstract BranchResponse doRestfulPost(String url, JSONObject payload) throws BranchRemoteException;
@@ -193,7 +200,7 @@ public abstract class BranchRemoteInterface {
try {
post.put("sdk", "android" + BuildConfig.VERSION_NAME);
if (!branch_key.equals(PrefHelper.NO_STRING_VALUE)) {
- post.put(BRANCH_KEY, branch_key);
+ post.put(Defines.Jsonkey.BranchKey.getKey(), branch_key);
return true;
}
} catch (JSONException ignore) { | Adding a note on usage of `RETRY_NUMBER` | BranchMetrics_android-branch-deep-linking | train |
4b00ee8cd64c402d33402d61854095dcd01b150b | diff --git a/src/Extension/BaseExtension.php b/src/Extension/BaseExtension.php
index <HASH>..<HASH> 100644
--- a/src/Extension/BaseExtension.php
+++ b/src/Extension/BaseExtension.php
@@ -11,9 +11,10 @@
namespace Sonata\FormatterBundle\Extension;
-use Twig_Environment;
+use Twig\Environment;
+use Twig\Extension\AbstractExtension;
-abstract class BaseExtension extends \Twig_Extension implements ExtensionInterface
+abstract class BaseExtension extends AbstractExtension implements ExtensionInterface
{
public function getAllowedFilters()
{
@@ -40,7 +41,7 @@ abstract class BaseExtension extends \Twig_Extension implements ExtensionInterfa
return [];
}
- public function initRuntime(Twig_Environment $environment)
+ public function initRuntime(Environment $environment)
{
return [];
}
diff --git a/src/Extension/BaseProxyExtension.php b/src/Extension/BaseProxyExtension.php
index <HASH>..<HASH> 100644
--- a/src/Extension/BaseProxyExtension.php
+++ b/src/Extension/BaseProxyExtension.php
@@ -11,9 +11,10 @@
namespace Sonata\FormatterBundle\Extension;
-use Twig_Environment;
+use Twig\Environment;
+use Twig\Extension\AbstractExtension;
-abstract class BaseProxyExtension extends \Twig_Extension implements ExtensionInterface
+abstract class BaseProxyExtension extends AbstractExtension implements ExtensionInterface
{
/**
* @return \Twig_ExtensionInterface
@@ -45,7 +46,7 @@ abstract class BaseProxyExtension extends \Twig_Extension implements ExtensionIn
return [];
}
- public function initRuntime(Twig_Environment $environment)
+ public function initRuntime(Environment $environment)
{
$this->getTwigExtension()->initRuntime($environment);
}
diff --git a/src/Formatter/Pool.php b/src/Formatter/Pool.php
index <HASH>..<HASH> 100644
--- a/src/Formatter/Pool.php
+++ b/src/Formatter/Pool.php
@@ -14,9 +14,9 @@ namespace Sonata\FormatterBundle\Formatter;
use Psr\Log\LoggerAwareInterface;
use Psr\Log\LoggerInterface;
use Psr\Log\NullLogger;
-use Twig_Environment;
-use Twig_Error_Syntax;
-use Twig_Sandbox_SecurityError;
+use Twig\Environment;
+use Twig\Error\SyntaxError;
+use Twig\Sandbox\SecurityError;
class Pool implements LoggerAwareInterface
{
@@ -90,7 +90,7 @@ class Pool implements LoggerAwareInterface
/**
* @param string $code
*/
- public function add($code, FormatterInterface $formatter, Twig_Environment $env = null)
+ public function add($code, FormatterInterface $formatter, Environment $env = null)
{
$this->formatters[$code] = [$formatter, $env];
}
@@ -143,7 +143,7 @@ class Pool implements LoggerAwareInterface
$text = $env->render($text);
}
}
- } catch (Twig_Error_Syntax $e) {
+ } catch (SyntaxError $e) {
$this->logger->critical(sprintf(
'[FormatterBundle::transform] %s - Error while parsing twig template : %s',
$code,
@@ -152,7 +152,7 @@ class Pool implements LoggerAwareInterface
'text' => $text,
'exception' => $e,
]);
- } catch (Twig_Sandbox_SecurityError $e) {
+ } catch (SecurityError $e) {
$this->logger->critical(sprintf(
'[FormatterBundle::transform] %s - the user try an non white-listed keyword : %s',
$code,
diff --git a/src/Formatter/TwigFormatter.php b/src/Formatter/TwigFormatter.php
index <HASH>..<HASH> 100644
--- a/src/Formatter/TwigFormatter.php
+++ b/src/Formatter/TwigFormatter.php
@@ -12,15 +12,18 @@
namespace Sonata\FormatterBundle\Formatter;
use Sonata\FormatterBundle\Extension\ExtensionInterface;
+use Twig\Environment;
+use Twig\Loader\ArrayLoader;
+use Twig\Loader\ChainLoader;
-class TwigFormatter implements \Sonata\FormatterBundle\Formatter\FormatterInterface
+class TwigFormatter implements FormatterInterface
{
/**
- * @var \Twig_Environment
+ * @var Environment
*/
protected $twig;
- public function __construct(\Twig_Environment $twig)
+ public function __construct(Environment $twig)
{
$this->twig = $twig;
}
@@ -33,8 +36,8 @@ class TwigFormatter implements \Sonata\FormatterBundle\Formatter\FormatterInterf
$hash = sha1($text);
- $chainLoader = new \Twig_Loader_Chain();
- $chainLoader->addLoader(new \Twig_Loader_Array([$hash => $text]));
+ $chainLoader = new ChainLoader();
+ $chainLoader->addLoader(new ArrayLoader([$hash => $text]));
$chainLoader->addLoader($oldLoader);
$this->twig->setLoader($chainLoader); | Migrate more classes to Twig namespaces.
I ran it just on src/Twig the other day. | sonata-project_SonataFormatterBundle | train |
68525c9dd8c3153a56ade7f5e66459ffc1c9ae37 | diff --git a/classes/LocaleManager.php b/classes/LocaleManager.php
index <HASH>..<HASH> 100644
--- a/classes/LocaleManager.php
+++ b/classes/LocaleManager.php
@@ -43,7 +43,7 @@ class LocaleManager
$this->sessionSegment = $session->getSegment('Alltube\LocaleManager');
$cookieLocale = $this->sessionSegment->get('locale');
if (isset($cookieLocale)) {
- $this->setLocale(new Locale($this->sessionSegment->get('locale')));
+ $this->setLocale(new Locale($cookieLocale));
}
} | We don't need to call sessionSegment->get() twice | Rudloff_alltube | train |
b1204724fdc88e64f5cc6173630ecae116165163 | diff --git a/unixtimestampfield/fields.py b/unixtimestampfield/fields.py
index <HASH>..<HASH> 100644
--- a/unixtimestampfield/fields.py
+++ b/unixtimestampfield/fields.py
@@ -238,12 +238,11 @@ class UnixTimeStampField(TimestampPatchMixin, Field):
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = self.get_datetimenow()
- setattr(model_instance, self.attname, value)
- return value
else:
value = getattr(model_instance, self.attname)
- setattr(model_instance, self.attname, field_value_middleware(self, value))
- return value
+
+ setattr(model_instance, self.attname, field_value_middleware(self, value))
+ return value
def to_python(self, value):
return field_value_middleware(self, value) | Applied submiddleware to auto field | myyang_django-unixtimestampfield | train |
a0217d02101b401a58fcb0be05be673285081ed9 | diff --git a/bindings/jaxrs/src/main/java/org/commonjava/indy/core/bind/jaxrs/admin/StoreAdminHandler.java b/bindings/jaxrs/src/main/java/org/commonjava/indy/core/bind/jaxrs/admin/StoreAdminHandler.java
index <HASH>..<HASH> 100644
--- a/bindings/jaxrs/src/main/java/org/commonjava/indy/core/bind/jaxrs/admin/StoreAdminHandler.java
+++ b/bindings/jaxrs/src/main/java/org/commonjava/indy/core/bind/jaxrs/admin/StoreAdminHandler.java
@@ -22,9 +22,6 @@ import static javax.ws.rs.core.Response.notModified;
import static javax.ws.rs.core.Response.ok;
import static javax.ws.rs.core.Response.status;
import static org.apache.commons.lang.StringUtils.isEmpty;
-import static org.commonjava.indy.bind.jaxrs.util.ResponseUtils.formatCreatedResponseWithJsonEntity;
-import static org.commonjava.indy.bind.jaxrs.util.ResponseUtils.formatOkResponseWithJsonEntity;
-import static org.commonjava.indy.bind.jaxrs.util.ResponseUtils.formatResponse;
import static org.commonjava.indy.model.core.ArtifactStore.METADATA_CHANGELOG;
import static org.commonjava.indy.util.ApplicationContent.application_json; | Update StoreAdminHandler.java
Removed ResponseUtils java class because it is renamed to ResponseHelper | Commonjava_indy | train |
dd70a65541e66ee8f23a0d9b93c29e1f8a4c4b65 | diff --git a/nitroCommon.js b/nitroCommon.js
index <HASH>..<HASH> 100644
--- a/nitroCommon.js
+++ b/nitroCommon.js
@@ -23,7 +23,7 @@ function makeRequest(host,path,key,query,settings,callback){
var defaults = {
Accept: 'application/json',
- User_Agent: 'BBCiPlayerRadio/2.8.0.5579 (SM-G903F; Android 5.1.1)',
+ User_Agent: 'BBCiPlayerRadio/1.6.1.1522345 (SM-N900; Android 4.4.2)',
api_key_name: 'api_key',
proto: 'http'
}
@@ -83,7 +83,7 @@ function makeRequest(host,path,key,query,settings,callback){
rateLimitEvents++;
// rate limiting, back off by 45 seconds
setTimeout(function(){
- make_request(host,path,key,query,settings,callback)
+ makeRequest(host,path,key,query,settings,callback)
},45000);
}
else { | Fix further issue with rate-limit recovery | MikeRalphson_bbcparse | train |
24bc9946db8ad194834ed8853610e3638c6b2de6 | diff --git a/core/src/test/java/com/emc/ia/sdk/support/datetime/WhenWorkingWithDates.java b/core/src/test/java/com/emc/ia/sdk/support/datetime/WhenWorkingWithDates.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/com/emc/ia/sdk/support/datetime/WhenWorkingWithDates.java
+++ b/core/src/test/java/com/emc/ia/sdk/support/datetime/WhenWorkingWithDates.java
@@ -28,8 +28,12 @@ public class WhenWorkingWithDates {
offset = Math.abs(offset);
int tzHour = offset / 60 / 60;
int tzMinute = offset / 60 % 60;
- String expected = String.format("%1$tY-%1$tm-%1$tdT%1$tH:%1$tM:%1$tS%2$s%3$02d:%4$02d",
- dateTime, sign, tzHour, tzMinute);
+ String expected = String.format("%1$tY-%1$tm-%1$tdT%1$tH:%1$tM:%1$tS%2$s", dateTime, sign);
+ if (tzHour == 0 && tzMinute == 0) {
+ expected += "Z";
+ } else {
+ expected += String.format("%02d:%02d", tzHour, tzMinute);
+ }
String actual = Dates.toIso(dateTime); | Attempt to fix test that fails on Travis | Enterprise-Content-Management_infoarchive-sip-sdk | train |
6e343f25e8d99b211a16b1c1b6662c352c8ba699 | diff --git a/integration/client/client_unix_test.go b/integration/client/client_unix_test.go
index <HASH>..<HASH> 100644
--- a/integration/client/client_unix_test.go
+++ b/integration/client/client_unix_test.go
@@ -19,7 +19,6 @@
package containerd
import (
- "runtime"
"testing"
. "github.com/containerd/containerd"
@@ -33,28 +32,11 @@ const (
)
var (
- testImage string
+ testImage = "mirror.gcr.io/library/busybox:latest"
shortCommand = withProcessArgs("true")
longCommand = withProcessArgs("/bin/sh", "-c", "while true; do sleep 1; done")
)
-func init() {
- switch runtime.GOARCH {
- case "386":
- testImage = "docker.io/i386/alpine:latest"
- case "arm":
- testImage = "docker.io/arm32v6/alpine:latest"
- case "arm64":
- testImage = "docker.io/arm64v8/alpine:latest"
- case "ppc64le":
- testImage = "docker.io/ppc64le/alpine:latest"
- case "s390x":
- testImage = "docker.io/s390x/alpine:latest"
- default:
- testImage = "docker.io/library/alpine:latest"
- }
-}
-
func TestImagePullSchema1WithEmptyLayers(t *testing.T) {
client, err := newClient(t, address)
if err != nil {
diff --git a/integration/client/container_linux_test.go b/integration/client/container_linux_test.go
index <HASH>..<HASH> 100644
--- a/integration/client/container_linux_test.go
+++ b/integration/client/container_linux_test.go
@@ -887,10 +887,10 @@ func TestContainerUsername(t *testing.T) {
io.Copy(buf, direct.Stdout)
}()
- // squid user in the alpine image has a uid of 31
+ // the www-data user in the busybox image has a uid of 33
container, err := client.NewContainer(ctx, id,
WithNewSnapshot(id, image),
- WithNewSpec(oci.WithImageConfig(image), oci.WithUsername("squid"), oci.WithProcessArgs("id", "-u")),
+ WithNewSpec(oci.WithImageConfig(image), oci.WithUsername("www-data"), oci.WithProcessArgs("id", "-u")),
)
if err != nil {
t.Fatal(err)
@@ -916,16 +916,16 @@ func TestContainerUsername(t *testing.T) {
wg.Wait()
output := strings.TrimSuffix(buf.String(), "\n")
- if output != "31" {
- t.Errorf("expected squid uid to be 31 but received %q", output)
+ if output != "33" {
+ t.Errorf("expected www-data uid to be 33 but received %q", output)
}
}
func TestContainerUser(t *testing.T) {
t.Parallel()
- t.Run("UserNameAndGroupName", func(t *testing.T) { testContainerUser(t, "squid:squid", "31:31") })
- t.Run("UserIDAndGroupName", func(t *testing.T) { testContainerUser(t, "1001:squid", "1001:31") })
- t.Run("UserNameAndGroupID", func(t *testing.T) { testContainerUser(t, "squid:1002", "31:1002") })
+ t.Run("UserNameAndGroupName", func(t *testing.T) { testContainerUser(t, "www-data:www-data", "33:33") })
+ t.Run("UserIDAndGroupName", func(t *testing.T) { testContainerUser(t, "1001:www-data", "1001:33") })
+ t.Run("UserNameAndGroupID", func(t *testing.T) { testContainerUser(t, "www-data:1002", "33:1002") })
t.Run("UserIDAndGroupID", func(t *testing.T) { testContainerUser(t, "1001:1002", "1001:1002") })
}
@@ -1225,7 +1225,7 @@ func TestContainerUserID(t *testing.T) {
io.Copy(buf, direct.Stdout)
}()
- // adm user in the alpine image has a uid of 3 and gid of 4.
+ // sys user in the busybox image has a uid and gid of 3.
container, err := client.NewContainer(ctx, id,
WithNewSnapshot(id, image),
WithNewSpec(oci.WithImageConfig(image), oci.WithUserID(3), oci.WithProcessArgs("sh", "-c", "echo $(id -u):$(id -g)")),
@@ -1254,8 +1254,8 @@ func TestContainerUserID(t *testing.T) {
wg.Wait()
output := strings.TrimSuffix(buf.String(), "\n")
- if output != "3:4" {
- t.Errorf("expected uid:gid to be 3:4, but received %q", output)
+ if output != "3:3" {
+ t.Errorf("expected uid:gid to be 3:3, but received %q", output)
}
} | Switch test image to a non rate-limited manifest list | containerd_containerd | train |
5949d7bd96c9d2eb97e83cbed51abe20b40e7e48 | diff --git a/engine/scheduler/__init__.py b/engine/scheduler/__init__.py
index <HASH>..<HASH> 100644
--- a/engine/scheduler/__init__.py
+++ b/engine/scheduler/__init__.py
@@ -9,6 +9,7 @@
import os
import time
import threading
+import platform
import subprocess
import tornado.options
@@ -25,9 +26,12 @@ class Scheduler(threading.Thread, dpEngine):
self.interrupted = False
self.schedules = []
self.path = os.path.dirname(os.path.realpath(__file__))
- self.path = os.path.join(self.path, '..', '..', 'scheduler.py')
+ self.path = os.path.dirname(self.path)
+ self.path = os.path.dirname(self.path)
+ self.path = os.path.join(self.path, 'scheduler.py')
self.python = tornado.options.options.python
self.ts = self.helper.datetime.time()
+ self.support_bg = False if platform.system() == 'Windows' else True
for e in schedules:
i = e[2] if len(e) >= 3 and isinstance(e[2], int) else 1
@@ -54,7 +58,10 @@ class Scheduler(threading.Thread, dpEngine):
if ts >= e['n']:
e['n'] = ts + e['s'] if isinstance(e['s'], int) else e['s'].get_next()
- subprocess.Popen([self.python, self.path, e['c']])
+ if not self.support_bg:
+ subprocess.Popen([self.python, self.path, e['c']])
+ else:
+ os.system('%s %s %s &' % (self.python, self.path, e['c']))
time.sleep(2) | Scheduler job executing as background. | why2pac_dp-tornado | train |
16ce4227e1974ce9d40523e149b4934e8190ce48 | diff --git a/rails_event_store_active_record-legacy/lib/rails_event_store_active_record/legacy/event_repository.rb b/rails_event_store_active_record-legacy/lib/rails_event_store_active_record/legacy/event_repository.rb
index <HASH>..<HASH> 100644
--- a/rails_event_store_active_record-legacy/lib/rails_event_store_active_record/legacy/event_repository.rb
+++ b/rails_event_store_active_record-legacy/lib/rails_event_store_active_record/legacy/event_repository.rb
@@ -8,8 +8,6 @@ module RailsEventStoreActiveRecord
class LegacyEvent < ::ActiveRecord::Base
self.primary_key = :id
self.table_name = 'event_store_events'
- serialize :metadata
- serialize :data
end
private_constant :LegacyEvent | No double data/metadata serialization in Legacy::EventRepository.
Probably missed as early as #<I>. Did not have a chance to show up until
repository stopped handling serialization itself (now it's the client).
[#<I>] | RailsEventStore_rails_event_store | train |
bd78751cb349b09363d9d3a9d21be8a6ac32da42 | diff --git a/far/far.py b/far/far.py
index <HASH>..<HASH> 100644
--- a/far/far.py
+++ b/far/far.py
@@ -15,7 +15,7 @@ class Far:
self.logger = Logger(self.verbosity)
def find_and_replace(self, old=None, new=None):
- if old is None or new is None:
+ if not old or not new:
return
cmd = "find . -type f -not -path '*/\.git*' -exec sed -i 's/" + old + "/" + new + "/g' {} + "
os.system(cmd)
diff --git a/tests/test_far.py b/tests/test_far.py
index <HASH>..<HASH> 100644
--- a/tests/test_far.py
+++ b/tests/test_far.py
@@ -22,16 +22,28 @@ class TestFar(unittest.TestCase):
self.obj = Far()
@patch('os.system')
+ def test_should_do_nothing_if_old_is_empty(self, mock_system):
+ mock_system.return_value = 0
+ self.obj.find_and_replace('', self.new)
+ self.assertFalse(mock_system.called)
+
+ @patch('os.system')
+ def test_should_do_nothing_if_new_is_empty(self, mock_system):
+ mock_system.return_value = 0
+ self.obj.find_and_replace(self.old, '')
+ self.assertFalse(mock_system.called)
+
+ @patch('os.system')
def test_should_do_nothing_if_old_is_none(self, mock_system):
mock_system.return_value = 0
self.obj.find_and_replace(None, self.new)
- self.assertEqual(mock_system.called, 0)
+ self.assertFalse(mock_system.called)
@patch('os.system')
def test_should_do_nothing_if_new_is_none(self, mock_system):
mock_system.return_value = 0
self.obj.find_and_replace(self.old, None)
- self.assertEqual(mock_system.called, 0)
+ self.assertFalse(mock_system.called)
@patch('os.system')
def test_should_run_system_command(self, mock_system): | Handle situation where old/new is empty | ylogx_far | train |
e2aa1d5f81191e7a8d62ce8510fe8c2726012130 | diff --git a/src/com/jfinal/core/Const.java b/src/com/jfinal/core/Const.java
index <HASH>..<HASH> 100644
--- a/src/com/jfinal/core/Const.java
+++ b/src/com/jfinal/core/Const.java
@@ -23,7 +23,7 @@ import com.jfinal.render.ViewType;
*/
public interface Const {
- String JFINAL_VERSION = "2.1";
+ String JFINAL_VERSION = "2.2";
ViewType DEFAULT_VIEW_TYPE = ViewType.FREE_MARKER; | jfinal <I> release ^_^ | jfinal_jfinal | train |
0fb8c5e022742b9e217d237f52277f45e1226ee4 | diff --git a/scripts/install.js b/scripts/install.js
index <HASH>..<HASH> 100644
--- a/scripts/install.js
+++ b/scripts/install.js
@@ -28,6 +28,9 @@ var packageUrl = function(suffix) {
}
var download = function(suffix){
+ console.log(
+ 'Downloading and extracting the binary from: ' + packageUrl(suffix))
+
request
.get(packageUrl(suffix))
.pipe(zlib.createGunzip())
diff --git a/scripts/run.js b/scripts/run.js
index <HASH>..<HASH> 100755
--- a/scripts/run.js
+++ b/scripts/run.js
@@ -15,7 +15,7 @@ var platform = os.platform()
var arch = process.arch
var execute = function(suffix) {
- exec(executablePath(suffix), [ 'install' ], { stdio: 'inherit' })
+ exec(executablePath(suffix), [ process.argv.slice(2) ], { stdio: 'inherit' })
}
var executablePath = function(suffix) { | Log and pass arguments to the binary. | gdotdesign_elm-github-install | train |
0904692fb9683150797e7ea6a4756cb9360438c3 | diff --git a/anyconfig/tests/schema.py b/anyconfig/tests/schema.py
index <HASH>..<HASH> 100644
--- a/anyconfig/tests/schema.py
+++ b/anyconfig/tests/schema.py
@@ -32,15 +32,15 @@ class Test_00_Base(unittest.TestCase):
class Test_00_Functions(Test_00_Base):
- def test_20_array_to_schema_node(self):
- scm = TT.array_to_schema_node([1])
- ref_scm = {'type': 'integer'}
- self.assertTrue(dicts_equal(scm, ref_scm), scm)
-
- def test_22_array_to_schema_node__empty_array(self):
- scm = TT.array_to_schema_node([])
- ref_scm = {'type': 'string'}
- self.assertTrue(dicts_equal(scm, ref_scm), scm)
+ def test_20_array_to_schema(self):
+ scm = TT.array_to_schema([1])
+ ref = dict(items=dict(type="integer"), type="array")
+ self.assertTrue(dicts_equal(scm, ref), scm)
+
+ def test_22_array_to_schema__empty_array(self):
+ scm = TT.array_to_schema([])
+ ref = dict(items=dict(type="string"), type="array")
+ self.assertTrue(dicts_equal(scm, ref), scm)
def test_30_object_to_schema_nodes_iter(self):
scm = TT.object_to_schema({'a': 1}) | change: alter a test case of array_to_schema_node to array_to_schema | ssato_python-anyconfig | train |
cc03f5c3b14c3ccf957a6bd9b00034c496b0a825 | diff --git a/source/rafcon/mvc/controllers/state_editor/source_editor.py b/source/rafcon/mvc/controllers/state_editor/source_editor.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/mvc/controllers/state_editor/source_editor.py
+++ b/source/rafcon/mvc/controllers/state_editor/source_editor.py
@@ -53,11 +53,11 @@ class SourceEditorController(EditorController):
@property
def source_text(self):
- return self.model.state.script.script
+ return self.model.state.script_text
@source_text.setter
def source_text(self, text):
- self.model.state.script.script = text
+ self.model.state.script_text = text
# ===============================================================
def code_changed(self, source):
diff --git a/source/rafcon/mvc/controllers/utils/editor.py b/source/rafcon/mvc/controllers/utils/editor.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/mvc/controllers/utils/editor.py
+++ b/source/rafcon/mvc/controllers/utils/editor.py
@@ -63,7 +63,7 @@ class EditorController(ExtendedController):
def _undo(self, *args):
buffer = self.view.textview.get_buffer()
- if self.view.textview.is_focus() and buffer.can_undo():
+ if self.view.textview.is_focus() and hasattr(buffer, 'can_undo') and buffer.can_undo():
logger.debug('Run undo on {}'.format(self.__class__.__name__))
return buffer.undo()
else:
@@ -71,7 +71,7 @@ class EditorController(ExtendedController):
def _redo(self, *args):
buffer = self.view.textview.get_buffer()
- if self.view.textview.is_focus() and buffer.can_redo():
+ if self.view.textview.is_focus() and hasattr(buffer, 'can_redo') and buffer.can_redo():
logger.debug('Run redo on {}'.format(self.__class__.__name__))
return buffer.redo()
else:
diff --git a/source/rafcon/mvc/views/state_editor/description_editor.py b/source/rafcon/mvc/views/state_editor/description_editor.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/mvc/views/state_editor/description_editor.py
+++ b/source/rafcon/mvc/views/state_editor/description_editor.py
@@ -15,5 +15,7 @@ class DescriptionEditorView(EditorView):
self.textview.set_show_line_numbers(False)
self.textview.set_auto_indent(True)
self.textview.set_highlight_current_line(True)
+ b = self.textview.get_buffer()
+ b.set_highlight_syntax(False)
except NameError:
pass
diff --git a/source/rafcon/mvc/views/state_editor/state_editor.py b/source/rafcon/mvc/views/state_editor/state_editor.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/mvc/views/state_editor/state_editor.py
+++ b/source/rafcon/mvc/views/state_editor/state_editor.py
@@ -75,7 +75,6 @@ class StateEditorView(View):
self['new_scoped_variable_button'].set_border_width(constants.BUTTON_BORDER_WIDTH)
self['delete_scoped_variable_button'].set_border_width(constants.BUTTON_BORDER_WIDTH)
-
def bring_tab_to_the_top(self, tab_label):
"""Find tab with label tab_label in list of notebook's and set it to the current page.
diff --git a/source/rafcon/statemachine/states/container_state.py b/source/rafcon/statemachine/states/container_state.py
index <HASH>..<HASH> 100644
--- a/source/rafcon/statemachine/states/container_state.py
+++ b/source/rafcon/statemachine/states/container_state.py
@@ -145,7 +145,6 @@ class ContainerState(State):
state = self.__class__(self.name, self.state_id, input_data_ports, output_data_ports, outcomes, states,
transitions, data_flows, self.start_state_id, scoped_variables, None)
- # state.script_text = copy(self.script_text)
state.description = deepcopy(self.description)
return state | fix asterisk issue of source editor and remove code-highlighting in description editor
- textbuffer without undo and redo can be used in Editor base class now
- remove uneceassry line in ContainerState | DLR-RM_RAFCON | train |
5a3ea3df85758d13ae51610a0939d20f2f9bb65b | diff --git a/bcbio/chipseq/antibodies.py b/bcbio/chipseq/antibodies.py
index <HASH>..<HASH> 100644
--- a/bcbio/chipseq/antibodies.py
+++ b/bcbio/chipseq/antibodies.py
@@ -1,38 +1,41 @@
-from dataclasses import dataclass
+#from dataclasses import dataclass
+from collections import namedtuple
VALID_PEAKTYPES = ["narrow", "broad"]
-@dataclass
-class Antibody:
- """
- ChIP-seq antibody
- """
- name: str
- # call narrow or broad peaks
- peaktype: str
- # remove duplicates?
- rmdup: bool = True
+# @dataclass
+# class Antibody:
+# """
+# ChIP-seq antibody
+# """
+# name: str
+# # call narrow or broad peaks
+# peaktype: str
+# # remove duplicates?
+# rmdup: bool = True
- def __post_init__(self):
- if self.peaktype not in VALID_PEAKTYPES:
- raise TypeError(f"peaktype {self.peatktype} is not one of {VALID_PEAKTYPES}")
+# def __post_init__(self):
+# if self.peaktype not in VALID_PEAKTYPES:
+# raise TypeError(f"peaktype {self.peatktype} is not one of {VALID_PEAKTYPES}")
+
+Antibody = namedtuple('Antibody', 'name peaktype rmdup')
_ANTIBODIES = [
- Antibody("h3f3a", "broad"),
- Antibody("h3k27me3", "broad"),
- Antibody("h3k36me3", "broad"),
- Antibody("h3k4me1", "broad"),
- Antibody("h3k79me2", "broad"),
- Antibody("h3k79me3", "broad"),
- Antibody("h3k9me1", "broad"),
- Antibody("h3k9me2", "broad"),
- Antibody("h4k20me1", "broad"),
- Antibody("h2afz", "narrow"),
- Antibody("h3ac", "narrow"),
- Antibody("h3k27ac", "narrow"),
- Antibody("h3k4me2", "narrow"),
- Antibody("h3k4me3", "narrow"),
- Antibody("h3k9ac", "narrow"),
+ Antibody("h3f3a", "broad", True),
+ Antibody("h3k27me3", "broad", True),
+ Antibody("h3k36me3", "broad", True),
+ Antibody("h3k4me1", "broad", True),
+ Antibody("h3k79me2", "broad", True),
+ Antibody("h3k79me3", "broad", True),
+ Antibody("h3k9me1", "broad", True),
+ Antibody("h3k9me2", "broad", True),
+ Antibody("h4k20me1", "broad", True),
+ Antibody("h2afz", "narrow", True),
+ Antibody("h3ac", "narrow", True),
+ Antibody("h3k27ac", "narrow", True),
+ Antibody("h3k4me2", "narrow", True),
+ Antibody("h3k4me3", "narrow", True),
+ Antibody("h3k9ac", "narrow", True),
Antibody("h3k9me3", "broad", False)
]
diff --git a/bcbio/pipeline/datadict.py b/bcbio/pipeline/datadict.py
index <HASH>..<HASH> 100644
--- a/bcbio/pipeline/datadict.py
+++ b/bcbio/pipeline/datadict.py
@@ -208,7 +208,7 @@ LOOKUPS = {
"disc_bam": {"keys": ["work_bam_plus", "disc"]},
"sr_bam": {"keys": ["work_bam_plus", "sr"]},
"peddy_report": {"keys": ["peddy_report"]},
- "chipseq_antibody": {"keys": ["config", "algorithm", "chipseq", "antibody"]},
+ "chipseq_antibody": {"keys": ["config", "algorithm", "chipseq", "antibody"], "default": ""},
"peaktype": {"keys": ["config", "algorithm", "chipseq", "peaktype"]},
"tools_off": {"keys": ["config", "algorithm", "tools_off"], "default": [], "always_list": True},
"tools_on": {"keys": ["config", "algorithm", "tools_on"], "default": [], "always_list": True}, | Use namedtuple instead of dataclasses.
dataclasses isn't supported in <I>. I fixed this before, but it
looks like I didn't commit the change. | bcbio_bcbio-nextgen | train |
4e1cbd455b35b48c5793bd06ce819b8664322cb6 | diff --git a/foolbox/attacks/adef_attack.py b/foolbox/attacks/adef_attack.py
index <HASH>..<HASH> 100644
--- a/foolbox/attacks/adef_attack.py
+++ b/foolbox/attacks/adef_attack.py
@@ -256,8 +256,8 @@ class ADefAttack(Attack):
assert isinstance(subsample, int)
ind_of_candidates = np.arange(1, subsample)
else:
- pred, _ = a.predictions(perturbed)
- pred_sorted = (-pred).argsort()
+ logits, _ = a.predictions(perturbed)
+ pred_sorted = (-logits).argsort()
index_of_target_class, = np.where(pred_sorted == target_class)
ind_of_candidates = index_of_target_class
# Include the correct label (index 0) in the list of targets.
@@ -355,16 +355,16 @@ class ADefAttack(Attack):
norm_full = norm_min
# getting the current label after applying the vector field
- fx, _ = a.predictions(perturbed)
- current_label = np.argmax(fx)
- fx = fx - fx[current_label]
+ logits, _ = a.predictions(perturbed)
+ current_label = np.argmax(logits)
+ fx = logits - logits[current_label]
logging.info('Iterations finished: {} '.format(n))
logging.info('Current label: {} '.format(current_label))
logging.info('Norm vector field: {} '.format(norm_full))
- fx, _ = a.predictions(perturbed)
- current_label = np.argmax(fx)
+ logits, _ = a.predictions(perturbed)
+ current_label = np.argmax(logits)
logging.info('{} -> {}'.format(original_label, current_label))
a.predictions(perturbed) | renamed variables to allow for better readability | bethgelab_foolbox | train |
f2bd7099e0e4933e67ea4ce149b943b007cb60f8 | diff --git a/tamil/utf8.py b/tamil/utf8.py
index <HASH>..<HASH> 100644
--- a/tamil/utf8.py
+++ b/tamil/utf8.py
@@ -341,6 +341,19 @@ def compare_words_lexicographic( word_a, word_b ):
# else result depends on if La is shorter than Lb
return cmp(La,Lb)
+# return a list of ordered-pairs containing positions
+# that are common in word_a, and word_b; e.g.
+# தேடுக x தடங்கல் -> one common letter க [(2,3)]
+# சொல் x தேடுக -> no common letters []
+def word_intersection( word_a, word_b ):
+ positions = []
+ word_a_letters = get_letters( word_a )
+ word_b_letters = get_letters( word_b )
+ for idx,wa in enumerate(word_a_letters):
+ for idy,wb in enumerate(word_b_letters):
+ if ( wa == wb ):
+ positions.append( (idx, idy) )
+ return positions
def splitMeiUyir(uyirmei_char):
"""
@@ -373,7 +386,6 @@ def splitMeiUyir(uyirmei_char):
return (mei_letters[meiidx], uyir_letters[uyiridx])
# end of def splitMeiUyir(uyirmei_char):
-
def joinMeiUyir(mei_char, uyir_char):
"""
This function join mei character and uyir character, and retuns as | upstream function added "word_intersection" after resolved merge | Ezhil-Language-Foundation_open-tamil | train |
bbda6b98db8e65bf52e15d9f52042e7e7ac3936c | diff --git a/lib/thinking_sphinx/active_record/property_query.rb b/lib/thinking_sphinx/active_record/property_query.rb
index <HASH>..<HASH> 100644
--- a/lib/thinking_sphinx/active_record/property_query.rb
+++ b/lib/thinking_sphinx/active_record/property_query.rb
@@ -27,6 +27,7 @@ primary key.
attr_reader :property, :source, :type
delegate :unscoped, :to => :base_association_class, :prefix => true
+ delegate :sql, :to => Arel
def base_association
reflections.first
@@ -135,7 +136,7 @@ primary key.
relation = relation.joins(joins) if joins.present?
relation = relation.where("#{quoted_foreign_key} BETWEEN $start AND $end") if ranged?
relation = relation.where("#{quoted_foreign_key} IS NOT NULL")
- relation = relation.order("#{quoted_foreign_key} ASC") if type.nil?
+ relation = relation.order(sql("#{quoted_foreign_key} ASC")) if type.nil?
relation.to_sql
end | Use SQL literals in ORDER construction.
ActiveRecord is printing warnings for this due to an upcoming change. | pat_thinking-sphinx | train |
b995ecf8dded074aa2f62c48653de6948de590ef | diff --git a/core/src/main/java/org/trimou/engine/locator/TemplateLocator.java b/core/src/main/java/org/trimou/engine/locator/TemplateLocator.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/trimou/engine/locator/TemplateLocator.java
+++ b/core/src/main/java/org/trimou/engine/locator/TemplateLocator.java
@@ -34,7 +34,7 @@ import org.trimou.engine.priority.WithPriority;
public interface TemplateLocator extends WithPriority, ConfigurationAware {
/**
- * The reader is always closed right after the template source is read.
+ * The reader is always closed by the engine right after the template source is read.
*
* @param templateId
* The template identifier
diff --git a/core/src/main/java/org/trimou/util/IOUtils.java b/core/src/main/java/org/trimou/util/IOUtils.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/trimou/util/IOUtils.java
+++ b/core/src/main/java/org/trimou/util/IOUtils.java
@@ -30,6 +30,9 @@ import com.google.common.io.CharStreams;
@Internal
public final class IOUtils {
+ private IOUtils() {
+ }
+
/**
* The reader is closed right after the input is read.
*
@@ -48,6 +51,7 @@ public final class IOUtils {
}
/**
+ * Does not close the {@code Reader}.
*
* @param input
* @param bufferSize
@@ -62,6 +66,7 @@ public final class IOUtils {
}
/**
+ * Does not close the {@code Reader}.
*
* @param input
* @param output
diff --git a/core/src/test/java/org/trimou/engine/MustacheEngineTest.java b/core/src/test/java/org/trimou/engine/MustacheEngineTest.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/org/trimou/engine/MustacheEngineTest.java
+++ b/core/src/test/java/org/trimou/engine/MustacheEngineTest.java
@@ -1,7 +1,9 @@
package org.trimou.engine;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertTrue;
import java.io.Reader;
import java.io.StringReader;
@@ -13,16 +15,19 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.Before;
import org.junit.Test;
import org.trimou.AbstractEngineTest;
import org.trimou.ArchiveType;
+import org.trimou.ExceptionAssert;
import org.trimou.Mustache;
import org.trimou.engine.config.EngineConfigurationKey;
import org.trimou.engine.locator.AbstractTemplateLocator;
import org.trimou.engine.locator.MapTemplateLocator;
import org.trimou.engine.locator.TemplateLocator;
+import org.trimou.exception.MustacheException;
import org.trimou.lambda.Lambda;
import org.trimou.lambda.SpecCompliantLambda;
@@ -113,7 +118,7 @@ public class MustacheEngineTest extends AbstractEngineTest {
Map<String, String> templates = new HashMap<String, String>();
templates.put("foo", "0");
- long timeout = 2;
+ long timeout = 1;
MustacheEngine engine = MustacheEngineBuilder
.newBuilder()
@@ -203,9 +208,71 @@ public class MustacheEngineTest extends AbstractEngineTest {
}
@Test
+ public void testTemplateLocatorReaderIsAlwaysClosed() {
+
+ final String template = "FOO";
+ final String illegalTemplate = "{{foo";
+ final AtomicBoolean isCloseInvoked = new AtomicBoolean(false);
+
+ TemplateLocator locator = new AbstractTemplateLocator(1) {
+ @SuppressWarnings("resource")
+ @Override
+ public Reader locate(String templateId) {
+ return "foo".equals(templateId) ? new MyStringReader(template,
+ isCloseInvoked) : new MyStringReader(illegalTemplate,
+ isCloseInvoked);
+ }
+
+ @Override
+ public Set<String> getAllIdentifiers() {
+ return null;
+ }
+ };
+
+ final MustacheEngine engine = MustacheEngineBuilder.newBuilder()
+ .addTemplateLocator(locator).build();
+
+ assertEquals(template, engine.getMustacheSource("foo"));
+ assertTrue(isCloseInvoked.get());
+
+ engine.invalidateTemplateCache();
+ isCloseInvoked.set(false);
+ assertFalse(isCloseInvoked.get());
+
+ assertEquals(template, engine.getMustache("foo").render(null));
+ assertTrue(isCloseInvoked.get());
+
+ isCloseInvoked.set(false);
+ assertFalse(isCloseInvoked.get());
+
+ ExceptionAssert.expect(MustacheException.class).check(new Runnable() {
+ public void run() {
+ engine.getMustache("whatever").render(null);
+ }
+ });
+ assertTrue(isCloseInvoked.get());
+ }
+
+ @Test
public void testHelloWorld() {
String data = "Hello world!";
assertEquals(data, MustacheEngineBuilder.newBuilder().build()
.compileMustache("myTemplateName", "{{this}}").render(data));
}
+
+ private static class MyStringReader extends StringReader {
+
+ final AtomicBoolean isCloseInvoked;
+
+ public MyStringReader(String s, AtomicBoolean isClosed) {
+ super(s);
+ this.isCloseInvoked = isClosed;
+ }
+
+ @Override
+ public void close() {
+ isCloseInvoked.set(true);
+ super.close();
+ }
+ }
} | Add test - TemplateLocator reader is always closed (#<I>) | trimou_trimou | train |
0b0f26f462a1fb717e181c428dec8997e33fc8bd | diff --git a/tests/test_kde.py b/tests/test_kde.py
index <HASH>..<HASH> 100644
--- a/tests/test_kde.py
+++ b/tests/test_kde.py
@@ -33,7 +33,7 @@ def test_kde_entropy():
somatic_base = ['C']
# check mutation info
- aa_info = pt.get_aa_mut_info(coding_pos, somatic_base, gs)
+ aa_info = utils.get_aa_mut_info(coding_pos, somatic_base, gs)
pos_array = np.array(aa_info['Codon Pos'], dtype=np.int)
entropy, bandwidth = cutils.kde_entropy(pos_array)
assert_msg = ('A single mutation should be 1.0 for entropy fraction '
@@ -47,7 +47,8 @@ def test_kde_entropy():
'context': 1,
'tsg_score': .05,
'processes': 1,
- 'num_permutations': 10000}
+ 'num_permutations': 10000,
+ 'kind': 'oncogene'}
mut_df = pd.read_csv(opts['mutations'], sep='\t')
# CTNNB1 should have few deleterious mutations, so check it
diff --git a/tests/test_permutation_test.py b/tests/test_permutation_test.py
index <HASH>..<HASH> 100644
--- a/tests/test_permutation_test.py
+++ b/tests/test_permutation_test.py
@@ -19,7 +19,8 @@ def test_ctnnb1_main():
'tsg_score': .05,
'processes': 1,
'num_permutations': 10000,
- 'bandwidth_permutations': 100}
+ 'bandwidth_permutations': 100,
+ 'kind': 'oncogene'}
# single nucleotide context
result = pt.main(opts)
assert result.ix[0, 'recurrent p-value'] < 0.001, 'CTNNB1 should have a very low p-value ({0}>.001)'.format(result[0][2])
@@ -57,7 +58,7 @@ def test_ctnnb1_get_aa_mut_info():
somatic_base = ['C']
# check mutation info
- aa_info = pt.get_aa_mut_info(coding_pos, somatic_base, gs)
+ aa_info = utils.get_aa_mut_info(coding_pos, somatic_base, gs)
ref_codon_msg = 'First codon should be start codon ({0})'.format(aa_info['Reference Codon'][0])
assert aa_info['Reference Codon'][0] == 'ATG', ref_codon_msg
assert aa_info['Somatic Codon'][0] == 'CTG', 'First "A" should be replaced with a "C"'
@@ -73,7 +74,8 @@ def test_100genes_main():
'tsg_score': .1,
'processes': 5,
'num_permutations': 10000,
- 'bandwidth_permutations': 100}
+ 'bandwidth_permutations': 100,
+ 'kind': 'oncogene'}
# single nucleotide context
result = pt.main(opts)
tested_result = result[result['Performed Recurrency Test']==1] | Updated unit tests to use the newly updated command line options | KarchinLab_probabilistic2020 | train |
6e3ad563f6bbfbdc1866cb802259dc7f53173e32 | diff --git a/documentation/samples/src/main/java/org/wisdom/samples/websockets/SimpleWebSocket.java b/documentation/samples/src/main/java/org/wisdom/samples/websockets/SimpleWebSocket.java
index <HASH>..<HASH> 100644
--- a/documentation/samples/src/main/java/org/wisdom/samples/websockets/SimpleWebSocket.java
+++ b/documentation/samples/src/main/java/org/wisdom/samples/websockets/SimpleWebSocket.java
@@ -37,17 +37,17 @@ public class SimpleWebSocket extends DefaultController {
@Requires
Json json;
- @Opened("ws/{name}")
+ @Opened("/ws/{name}")
public void open(@Parameter("name") String name) {
System.out.println("Web socket opened => " + name);
}
- @Closed("ws/{name}")
+ @Closed("/ws/{name}")
public void close(@Parameter("name") String name) {
System.out.println("Web socket closed => " + name);
}
- @OnMessage("ws/{name}")
+ @OnMessage("/ws/{name}")
public void onMessage(@Body Message message, @Parameter("name") String name) {
System.out.println("Receiving message on " + name + " : " + message.message);
publisher.publish("/ws/" + name, json.toJson(message.message.toUpperCase())); | Fix web socket uri in samples : The initial / is required. | wisdom-framework_wisdom | train |
c9272884035d4e70c036aa21f233bf4d033b5942 | diff --git a/src/main/com/mongodb/DBRef.java b/src/main/com/mongodb/DBRef.java
index <HASH>..<HASH> 100644
--- a/src/main/com/mongodb/DBRef.java
+++ b/src/main/com/mongodb/DBRef.java
@@ -2,12 +2,22 @@
package com.mongodb;
-import java.util.*;
-
public class DBRef {
static final boolean D = Boolean.getBoolean( "DEBUG.DBREF" );
+ /**
+ * CTOR used for testing BSON encoding. Otherwise
+ * non-functional due to a DBRef needing a parent db object,
+ * a fieldName and a db
+ *
+ * @param ns namespace to point to
+ * @param id value of _id
+ */
+ public DBRef(String ns, ObjectId id) {
+ this (null, null, null, ns, id);
+ }
+
DBRef( DBObject parent , String fieldName , DBBase db , String ns , ObjectId id ){
_parent = parent;
@@ -19,25 +29,25 @@ public class DBRef {
_id = id;
}
- private DBObject fetch(){
- if ( _loadedPointedTo )
- return _pointedTo;
-
- if ( _db == null )
- throw new RuntimeException( "no db" );
+ private DBObject fetch() {
+ if (_loadedPointedTo)
+ return _pointedTo;
+
+ if (_db == null)
+ throw new RuntimeException("no db");
- if ( D ){
- System.out.println( "following dbref. parent.field:" + _fieldName + " ref to ns:" + _ns );
+ if (D) {
+ System.out.println("following dbref. parent.field:" + _fieldName + " ref to ns:" + _ns);
Throwable t = new Throwable();
t.fillInStackTrace();
t.printStackTrace();
}
-
- final DBCollection coll = _db.getCollectionFromString( _ns );
-
- _pointedTo = coll.find( _id );
- _loadedPointedTo = true;
- return _pointedTo;
+
+ final DBCollection coll = _db.getCollectionFromString(_ns);
+
+ _pointedTo = coll.find(_id);
+ _loadedPointedTo = true;
+ return _pointedTo;
}
final DBObject _parent;
@@ -49,5 +59,4 @@ public class DBRef {
private boolean _loadedPointedTo = false;
private DBObject _pointedTo;
-
} | DBRef : add a constructor that does't require to know about
a parent object, a field name and a database (!). Not sure
if the resultant object is useful in general, but is certainly
serializable.
Also some reformatting of the code | mongodb_mongo-java-driver | train |
8a0dd932f43b5e5742308979ffea2256354a49e3 | diff --git a/scripts/make.js b/scripts/make.js
index <HASH>..<HASH> 100644
--- a/scripts/make.js
+++ b/scripts/make.js
@@ -9,7 +9,6 @@ const {
both,
prop,
replace,
- join,
omit,
merge,
forEach,
@@ -31,7 +30,6 @@ const resolvePath = (...paths) => path.resolve(__dirname, '..', ...paths)
const isDevelopment = process.env.NODE_ENV === 'development'
const SRC_MODULES = 'src'
-const ESM_MODULES = 'esm'
const CJS_MODULES = 'cjs'
const SOURCE_PATH = resolvePath('src')
@@ -65,90 +63,28 @@ const takeModules = pipe(
)
const removeSourcePath = replace(SOURCE_PATH, '')
-const toStringKeyValue = module => `'${module.key}': '${module.value}'`
-const indentLine = line => ` ${line},`
-const toStringObject = pipe(
- map(
- pipe(
- toStringKeyValue,
- indentLine,
- ),
- ),
- join('\n'),
-)
-
-const pathMappingTemplate = obj =>
- `
-"use strict"
-
-module.exports = function() {
- return {
-${toStringObject(obj)}
- }
-}
- `
const createModulePath = format => {
- const modulePath = resolvePath(DIR_PATH, format)
+ const formatPathSegment = format === CJS_MODULES ? [] : [format]
+ const modulePath = resolvePath(DIR_PATH, ...formatPathSegment)
return replace(SOURCE_PATH, modulePath)
}
-const createPathName = file => {
- const value = removeSourcePath(file)
- return endsWith('index.js', value) ? path.dirname(value) : replace('.js', '', value)
-}
-
-const createModuleName = name => `${pkg.name}${name}`
-
-const buildPathMapping = format =>
- pipe(
- map(file => {
- const name = createPathName(file)
-
- return {
- key: createModuleName(name),
- value: `${isDevelopment ? DEV_PATH : pkg.name}/${format}${name}`,
- }
- }),
- pathMappingTemplate,
- content => {
- try {
- mkdirp.sync(resolvePath(DIR_PATH, format))
- fs.writeFileSync(resolvePath(DIR_PATH, format, 'path-mapping.js'), content)
- } catch (err) {
- // eslint-disable-next-line
- console.error(err)
- }
- },
- )
-
const createFolder = dir => mkdirp.sync(resolvePath(dir))
-const configForFormat = format => ({
- overrides: [
- {
- plugins: format === CJS_MODULES ? ['@babel/plugin-transform-modules-commonjs'] : [],
- },
- ],
-})
-
const babelTransform = (format, file) => {
if (format === SRC_MODULES) {
// no transform, just return source
return fs.readFileSync(file)
}
- const config = configForFormat(format)
- const { code } = babel.transformFileSync(file, config)
+ const { code } = babel.transformFileSync(file, {})
return code
}
const paths = klaw(SOURCE_PATH)
const modules = takeModules(paths)
-const buildCjsPathMapping = buildPathMapping(CJS_MODULES)
-const buildEsmPathMapping = buildPathMapping(ESM_MODULES)
-
const buildModule = format => file => {
const modulePath = createModulePath(format)
const code = babelTransform(format, file)
@@ -161,8 +97,7 @@ const buildModule = format => file => {
const prepareJson = pipe(
omit(['scripts']),
merge({
- main: './cjs/index.js',
- module: './esm/index.js',
+ main: './index.js',
sideEffects: false,
}),
obj => prettyJson(obj),
@@ -194,20 +129,16 @@ const copyNonJavaScriptFiles = buildPath => {
if (isDevelopment) {
const buildCjsModule = buildModule(CJS_MODULES)
- const buildEsmModule = buildModule(ESM_MODULES)
const buildSrcModule = buildModule(SRC_MODULES)
const buildFile = file => {
buildSrcModule(file)
buildCjsModule(file)
- buildEsmModule(file)
}
cleanFolder(DEV_PATH)
createFolder(DEV_PATH)
copyNonJavaScriptFiles(DEV_PATH)
- buildCjsPathMapping(modules)
- buildEsmPathMapping(modules)
chokidar
.watch(resolvePath('src'), { ignored: DO_NOT_BUILD_PATHS })
@@ -227,16 +158,12 @@ if (isDevelopment) {
} else {
const buildModules = format => mapAsync(buildModule(format))
const buildCjsModules = buildModules(CJS_MODULES)
- const buildEsmModules = buildModules(ESM_MODULES)
const buildSrcModules = buildModules(SRC_MODULES)
cleanFolder(DIST_PATH)
createFolder(DIST_PATH)
copyNonJavaScriptFiles(DIST_PATH)
- buildSrcModules(modules)
- buildCjsPathMapping(modules)
- buildEsmPathMapping(modules)
- buildEsmModules(modules)
+ buildSrcModules(modules)
buildCjsModules(modules)
} | Simplify build script - don't build ESM, path mappings | Nozbe_WatermelonDB | train |
43506b78686c9e68eb1875cf428964a8644725dd | diff --git a/schemaregistry/schemaregistry_client.go b/schemaregistry/schemaregistry_client.go
index <HASH>..<HASH> 100644
--- a/schemaregistry/schemaregistry_client.go
+++ b/schemaregistry/schemaregistry_client.go
@@ -265,14 +265,21 @@ func (c *client) Register(subject string, schema SchemaInfo, normalize bool) (id
metadata := SchemaMetadata{
SchemaInfo: schema,
}
- err = c.restService.handleRequest(newRequest("POST", versionNormalize, &metadata, url.PathEscape(subject), normalize), &metadata)
- if err != nil {
- return -1, err
- }
c.schemaCacheLock.Lock()
- c.schemaCache.Put(cacheKey, metadata.ID)
+ // another goroutine could have already put it in cache
+ idValue, ok = c.schemaCache.Get(cacheKey)
+ if !ok {
+ err = c.restService.handleRequest(newRequest("POST", versionNormalize, &metadata, url.PathEscape(subject), normalize), &metadata)
+ if err == nil {
+ c.schemaCache.Put(cacheKey, metadata.ID)
+ } else {
+ metadata.ID = -1
+ }
+ } else {
+ metadata.ID = idValue.(int)
+ }
c.schemaCacheLock.Unlock()
- return metadata.ID, nil
+ return metadata.ID, err
}
// GetBySubjectAndID returns the schema identified by id
@@ -290,23 +297,29 @@ func (c *client) GetBySubjectAndID(subject string, id int) (schema SchemaInfo, e
}
metadata := SchemaMetadata{}
- if len(subject) > 0 {
- err = c.restService.handleRequest(newRequest("GET", schemasBySubject, nil, id, url.QueryEscape(subject)), &metadata)
+ newInfo := &SchemaInfo{}
+ c.idCacheLock.Lock()
+ // another goroutine could have already put it in cache
+ infoValue, ok = c.idCache.Get(cacheKey)
+ if !ok {
+ if len(subject) > 0 {
+ err = c.restService.handleRequest(newRequest("GET", schemasBySubject, nil, id, url.QueryEscape(subject)), &metadata)
+ } else {
+ err = c.restService.handleRequest(newRequest("GET", schemas, nil, id), &metadata)
+ }
+ if err == nil {
+ newInfo = &SchemaInfo{
+ Schema: metadata.Schema,
+ SchemaType: metadata.SchemaType,
+ References: metadata.References,
+ }
+ c.idCache.Put(cacheKey, newInfo)
+ }
} else {
- err = c.restService.handleRequest(newRequest("GET", schemas, nil, id), &metadata)
- }
- if err != nil {
- return SchemaInfo{}, err
+ newInfo = infoValue.(*SchemaInfo)
}
- newInfo := &SchemaInfo{
- Schema: metadata.Schema,
- SchemaType: metadata.SchemaType,
- References: metadata.References,
- }
- c.idCacheLock.Lock()
- c.idCache.Put(cacheKey, newInfo)
c.idCacheLock.Unlock()
- return *newInfo, nil
+ return *newInfo, err
}
// GetID checks if a schema has been registered with the subject. Returns ID if the registration can be found
@@ -325,18 +338,25 @@ func (c *client) GetID(subject string, schema SchemaInfo, normalize bool) (id in
if ok {
return idValue.(int), nil
}
+
metadata := SchemaMetadata{
SchemaInfo: schema,
}
-
- err = c.restService.handleRequest(newRequest("POST", subjectsNormalize, &metadata, url.PathEscape(subject), normalize), &metadata)
- if err != nil {
- return -1, err
- }
c.schemaCacheLock.Lock()
- c.schemaCache.Put(cacheKey, metadata.ID)
+ // another goroutine could have already put it in cache
+ idValue, ok = c.schemaCache.Get(cacheKey)
+ if !ok {
+ err = c.restService.handleRequest(newRequest("POST", subjectsNormalize, &metadata, url.PathEscape(subject), normalize), &metadata)
+ if err == nil {
+ c.schemaCache.Put(cacheKey, metadata.ID)
+ } else {
+ metadata.ID = -1
+ }
+ } else {
+ metadata.ID = idValue.(int)
+ }
c.schemaCacheLock.Unlock()
- return metadata.ID, nil
+ return metadata.ID, err
}
// GetLatestSchemaMetadata fetches latest version registered with the provided subject
@@ -381,18 +401,25 @@ func (c *client) GetVersion(subject string, schema SchemaInfo, normalize bool) (
if ok {
return versionValue.(int), nil
}
+
metadata := SchemaMetadata{
SchemaInfo: schema,
}
-
- err = c.restService.handleRequest(newRequest("POST", subjectsNormalize, &metadata, url.PathEscape(subject), normalize), &metadata)
- if err != nil {
- return -1, err
- }
c.versionCacheLock.Lock()
- c.versionCache.Put(cacheKey, metadata.Version)
+ // another goroutine could have already put it in cache
+ versionValue, ok = c.versionCache.Get(cacheKey)
+ if !ok {
+ err = c.restService.handleRequest(newRequest("POST", subjectsNormalize, &metadata, url.PathEscape(subject), normalize), &metadata)
+ if err == nil {
+ c.versionCache.Put(cacheKey, metadata.Version)
+ } else {
+ metadata.Version = -1
+ }
+ } else {
+ metadata.Version = versionValue.(int)
+ }
c.versionCacheLock.Unlock()
- return metadata.Version, nil
+ return metadata.Version, err
}
// Fetch all Subjects registered with the schema Registry | avoid cache stampede if key is not found. (#<I>)
When the key is not in cache and multiple goroutines
call the Serialize method at once, many of them could
start calls to SR to put in cache the same schema, this way
only the first one makes the call and the rest
read the cached result. | confluentinc_confluent-kafka-go | train |
19b8ccf0a58ceb7f2b037c15d90cacf125e6f95b | diff --git a/flask_appbuilder/security/sqla/registerviews.py b/flask_appbuilder/security/sqla/registerviews.py
index <HASH>..<HASH> 100644
--- a/flask_appbuilder/security/sqla/registerviews.py
+++ b/flask_appbuilder/security/sqla/registerviews.py
@@ -256,3 +256,17 @@ class RegisterUserOIDView(BaseRegisterUser):
first_name=form.first_name.data,
last_name=form.last_name.data,
email=form.email.data)
+
+class RegisterUserOAuthView(BaseRegisterUser):
+ """
+ View for Registering a new user, auth OID mode
+ """
+ default_view = 'choose_provider'
+ choose_provider_template = 'appbuilder/general/security/register_oauth.html'
+ """ Template displayed to user to choose the oauth provider to register its account """
+
+ @expose("/chooseprovider")
+ def choose_provider(self):
+ self.render_template(self.choose_provider_template)
+
+
\ No newline at end of file
diff --git a/flask_appbuilder/security/views.py b/flask_appbuilder/security/views.py
index <HASH>..<HASH> 100644
--- a/flask_appbuilder/security/views.py
+++ b/flask_appbuilder/security/views.py
@@ -453,8 +453,8 @@ class AuthOAuthView(AuthView):
log.debug("AUTHORIZED init")
resp = self.appbuilder.sm.oauth_remotes[provider].authorized_response()
if resp is None:
- flash(u'You denied the request to sign in.')
- return redirect(self.appbuilder.get_url_for_index)
+ flash(u'You denied the request to sign in.', 'warning')
+ return redirect('login')
log.debug('OAUTH Authorized resp: {0}'.format(resp))
token_key = self.appbuilder.sm.get_oauth_token_key(provider)
token_secret = self.appbuilder.sm.get_oauth_token_secret(provider)
@@ -470,9 +470,10 @@ class AuthOAuthView(AuthView):
user = self.appbuilder.sm.auth_user_oauth(userinfo)
if user is None:
flash(as_unicode(self.invalid_login_message), 'warning')
+ return redirect('login')
else:
login_user(user)
- return redirect(self.appbuilder.get_url_for_index)
+ return redirect(self.appbuilder.get_url_for_index)
diff --git a/flask_appbuilder/templates/appbuilder/general/security/login_oauth.html b/flask_appbuilder/templates/appbuilder/general/security/login_oauth.html
index <HASH>..<HASH> 100644
--- a/flask_appbuilder/templates/appbuilder/general/security/login_oauth.html
+++ b/flask_appbuilder/templates/appbuilder/general/security/login_oauth.html
@@ -3,25 +3,66 @@
{% block content %}
+<script type="text/javascript">
+
+var baseLoginUrl = {{url_for('AuthOAuthView.login')}};
+var baseRegisterUrl = {{url_for('AuthOAuthView.login')}};
+
+var currentSelection = "";
+
+function set_openid(url, pr)
+{
+ $('.provider-select').removeClass('fa-border');
+ $('#' + pr).addClass('fa-border');
+ currentSelection = pr;
+}
+
+
+function signin() {
+ if (currentSelection != "") {
+ window.location.href = baseLoginUrl + currentSelection;
+ }
+}
+
+function register() {
+ if (currentSelection != "") {
+ window.location.href = baseRegisterUrl + currentSelection;
+ }
+}
+
+
+</script>
<div class="container">
<div id="loginbox" style="margin-top:50px;" class="mainbox col-md-6 col-md-offset-3 col-sm-8 col-sm-offset-2">
<div class="panel panel-primary" >
- <div class="panel-heading">
- <div class="panel-title">{{ title }}</div>
- </div>
- <div style="padding-top:30px" class="panel-body" >
-
- <div class="help-block">{{_("Signin using:")}}:</div>
- <div class="center-block btn-group btn-group-lg" role="group">
- <center>
- {% for pr in providers %}
- <a href="{{url_for('AuthOAuthView.login', provider=pr.name)}}">
- <i class="fa {{pr.icon}} fa-3x"></i>
- </a>
- {% endfor %}
- </center>
+ <div class="panel-heading">
+ <div class="panel-title">{{ title }}</div>
+ </div>
+ <div style="padding-top:30px" class="panel-body" >
+
+ <div class="help-block">{{_("Please choose one of the following providers:")}}</div>
+ <div class="center-block btn-group btn-group-lg" role="group">
+ <center>
+ {% for pr in providers %}
+ <a href="javascript:set_openid('{{url_for('AuthOAuthView.login', provider=pr.name)}}', '{{pr.name}}');">
+ <i id="{{pr.name}}" class="provider-select fa {{pr.icon}} fa-3x"></i>
+ </a>
+ {% endfor %}
+ </center>
+ </div>
+ <div>
+ <br></br>
+ <a onclick="signin();" class="btn btn-primary btn-block" type="submit">{{_('Sign In')}}</a>
+ {% if appbuilder.sm.auth_user_registration %}
+ <a href="javascript:registerUser();" class="btn btn-block btn-primary" data-toggle="tooltip" rel="tooltip"
+ title="{{_('If your not already a user, please register')}}">
+ {{_('Register')}}
+ </a>
+ {% endif %}
+ </div>
+ </div>
</div>
</div>
-</div></div></div>
+</div>
{% endblock %} | Auth OAuth with self user registration, begin... | dpgaspar_Flask-AppBuilder | train |
249ce72b9edd354ebd203e301e8561066a24a52a | diff --git a/photini/imagelist.py b/photini/imagelist.py
index <HASH>..<HASH> 100644
--- a/photini/imagelist.py
+++ b/photini/imagelist.py
@@ -209,12 +209,15 @@ class ImageList(QtGui.QWidget):
# sort key selector
layout.addWidget(QtGui.QLabel('sort by: '), 1, 0)
self.sort_name = QtGui.QRadioButton('file name')
- self.sort_name.setChecked(True)
self.sort_name.clicked.connect(self._show_thumbnails)
layout.addWidget(self.sort_name, 1, 1)
self.sort_date = QtGui.QRadioButton('date taken')
layout.addWidget(self.sort_date, 1, 2)
self.sort_date.clicked.connect(self._show_thumbnails)
+ if eval(self.config_store.get('controls', 'sort_date', 'False')):
+ self.sort_date.setChecked(True)
+ else:
+ self.sort_name.setChecked(True)
# size selector
layout.addWidget(QtGui.QLabel('thumbnail size: '), 1, 4)
self.size_slider = QtGui.QSlider(Qt.Horizontal)
@@ -273,7 +276,9 @@ class ImageList(QtGui.QWidget):
self._show_thumbnails()
def _show_thumbnails(self):
- if self.sort_date.isChecked():
+ sort_date = self.sort_date.isChecked()
+ self.config_store.set('controls', 'sort_date', str(sort_date))
+ if sort_date:
self.path_list.sort(
key=lambda x: self.image[x].metadata.get_item('date_taken'))
else:
diff --git a/photini/version.py b/photini/version.py
index <HASH>..<HASH> 100644
--- a/photini/version.py
+++ b/photini/version.py
@@ -1,3 +1,3 @@
version = '13.08'
-release = '17'
-commit = 'a745da4'
+release = '18'
+commit = '7842b5b' | Store image sorting preference in config file. | jim-easterbrook_Photini | train |
553b5f38d83c40c35fdd2e87977987ffd9eda2ef | diff --git a/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py b/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py
index <HASH>..<HASH> 100644
--- a/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py
+++ b/win32_event_log/datadog_checks/win32_event_log/win32_event_log.py
@@ -5,16 +5,15 @@
'''
Monitor the Windows Event Log
'''
-# stdlib
import calendar
from datetime import datetime, timedelta
from uptime import uptime
-# project
-from datadog_checks.checks.win.wmi import WinWMICheck, from_time, to_time
-from datadog_checks.utils.containers import hash_mutable
-from datadog_checks.utils.timeout import TimeoutException
+from datadog_checks.base import ConfigurationError, is_affirmative
+from datadog_checks.base.checks.win.wmi import WinWMICheck, from_time, to_time
+from datadog_checks.base.utils.containers import hash_mutable
+from datadog_checks.base.utils.timeout import TimeoutException
SOURCE_TYPE_NAME = 'event viewer'
EVENT_TYPE = 'win32_log_event'
@@ -30,7 +29,7 @@ class Win32EventLogWMI(WinWMICheck):
def __init__(self, name, init_config, agentConfig, instances=None):
WinWMICheck.__init__(self, name, init_config, agentConfig, instances=instances)
# Settings
- self._tag_event_id = init_config.get('tag_event_id', False)
+ self._tag_event_id = is_affirmative(init_config.get('tag_event_id', False))
self._verbose = init_config.get('verbose', True)
self._default_event_priority = init_config.get('default_event_priority', 'normal')
@@ -53,8 +52,14 @@ class Win32EventLogWMI(WinWMICheck):
source_names = instance.get('source_name', [])
log_files = instance.get('log_file', [])
event_ids = instance.get('event_id', [])
- message_filters = instance.get('message_filters', [])
event_format = instance.get('event_format')
+ message_filters = instance.get('message_filters', [])
+
+ if not (source_names or event_ids or message_filters or log_files or user or ltypes):
+ raise ConfigurationError(
+ 'At least one of the following filters must be set: '
+ 'source_name, event_id, message_filters, log_file, user, type'
+ )
instance_hash = hash_mutable(instance)
instance_key = self._get_instance_key(host, self.NAMESPACE, self.EVENT_CLASS, instance_hash)
diff --git a/win32_event_log/tests/test_check.py b/win32_event_log/tests/test_check.py
index <HASH>..<HASH> 100644
--- a/win32_event_log/tests/test_check.py
+++ b/win32_event_log/tests/test_check.py
@@ -6,6 +6,7 @@ import logging
import pytest
from mock import patch
+from datadog_checks.base import ConfigurationError
from datadog_checks.win32_event_log import Win32EventLogWMI
log = logging.getLogger(__file__)
@@ -101,3 +102,46 @@ def test_check(mock_from_time, mock_to_time, check, mock_get_wmi_sampler, aggreg
alert_type='error',
source_type_name='event viewer',
)
+
+
+def test_no_filters(check):
+ instance = {}
+
+ with pytest.raises(ConfigurationError):
+ check.check(instance)
+
+
+def test_filter_source_name(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
+ instance = {'source_name': ['MSSQLSERVER']}
+
+ check.check(instance)
+
+
+def test_filter_event_id(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
+ instance = {'event_id': ['789']}
+
+ check.check(instance)
+
+
+def test_filter_message_filters(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
+ instance = {'message_filters': ['ok']}
+
+ check.check(instance)
+
+
+def test_filter_log_file(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
+ instance = {'log_file': ['log']}
+
+ check.check(instance)
+
+
+def test_filter_user(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
+ instance = {'user': 'user'}
+
+ check.check(instance)
+
+
+def test_filter_type(mock_from_time, mock_to_time, check, mock_get_wmi_sampler):
+ instance = {'type': ['type']}
+
+ check.check(instance) | Require the use of filters (#<I>)
* Require the use of filters for new installations
* address review
* Update win<I>_event_log.py | DataDog_integrations-core | train |
2eec7e11baee0a8e800a7cdb726718a18a2f2b30 | diff --git a/middleman-core/lib/middleman-core/sitemap/store.rb b/middleman-core/lib/middleman-core/sitemap/store.rb
index <HASH>..<HASH> 100644
--- a/middleman-core/lib/middleman-core/sitemap/store.rb
+++ b/middleman-core/lib/middleman-core/sitemap/store.rb
@@ -22,7 +22,7 @@ module Middleman::Sitemap
# @param [String] path
# @return [Boolean]
def exists?(path)
- @pages.has_key?(path.sub(/^\//, ""))
+ @pages.has_key?(normalize_path(path))
end
# Ignore a path or add an ignore callback
@@ -30,10 +30,10 @@ module Middleman::Sitemap
# @return [void]
def ignore(path=nil, &block)
if !path.nil? && path.include?("*")
- path_clean = path.sub(/^\//, "")
+ path_clean = normalize_path(path)
@ignored_globs << path_clean unless @ignored_globs.include?(path_clean)
elsif path.is_a? String
- path_clean = path.sub(/^\//, "")
+ path_clean = normalize_path(path)
@ignored_paths << path_clean unless @ignored_paths.include?(path_clean)
elsif path.is_a? Regexp
@ignored_regexes << path unless @ignored_regexes.include?(path)
@@ -47,7 +47,7 @@ module Middleman::Sitemap
# @param [String] target
# @return [void]
def proxy(path, target)
- page(path).proxy_to(target.sub(%r{^/}, ""))
+ page(path).proxy_to(normalize_path(target))
app.cache.remove(:proxied_paths)
end
@@ -55,13 +55,13 @@ module Middleman::Sitemap
# @param [String] path
# @return [Middleman::Sitemap::Page]
def page(path)
- path = path.sub(/^\//, "").gsub("%20", " ")
+ path = normalize_path(path)
@pages.fetch(path) { @pages[path] = ::Middleman::Sitemap::Page.new(self, path) }
end
# Loop over known pages
# @return [void]
- def each(&block)
+ def each
@pages.each do |k, v|
yield k, v
end
@@ -77,9 +77,7 @@ module Middleman::Sitemap
# @param [String] path
# @return [Boolean]
def ignored?(path)
- path_clean = path.sub(/^\//, "")
-
- # $stderr.puts path_clean, @ignored_globs, @ignored_paths
+ path_clean = normalize_path(path)
return true if @ignored_paths.include?(path_clean)
return true if @ignored_globs.any? { |g| File.fnmatch(g, path_clean) }
@@ -99,7 +97,7 @@ module Middleman::Sitemap
# @param [String] path
# @return [Boolean]
def generic?(path)
- generic_paths.include?(path.sub(/^\//, ""))
+ generic_paths.include?(normalize_path(path))
end
# Get a list of generic paths
@@ -114,7 +112,7 @@ module Middleman::Sitemap
# @param [String] path
# @return [Boolean]
def proxied?(path)
- proxied_paths.include?(path.sub(/^\//, ""))
+ proxied_paths.include?(normalize_path(path))
end
# Get a list of proxied paths
@@ -132,7 +130,7 @@ module Middleman::Sitemap
path = file_to_path(file)
return false unless path
- path = path.sub(/^\//, "")
+ path = normalize_path(path)
if @pages.has_key?(path)
page(path).delete()
@pages.delete(path)
@@ -187,7 +185,7 @@ module Middleman::Sitemap
# Get a path without templating extensions
# @param [String] file
- # @param [String]
+ # @return [String]
def extensionless_path(file)
app.cache.fetch(:extensionless_path, file) do
path = file.dup
@@ -204,5 +202,12 @@ module Middleman::Sitemap
path
end
end
+
+ # Normalize a path to not include a leading slash
+ # @param [String] path
+ # @return [String]
+ def normalize_path(path)
+ path.sub(/^\//, "").gsub("%20", " ")
+ end
end
end | Refactor protected method normalize_path out of Sitemap::Store | middleman_middleman | train |
776629886de841cfa39cecbf5a4f0d37ab40901b | diff --git a/src/Http/Controllers/Pages.php b/src/Http/Controllers/Pages.php
index <HASH>..<HASH> 100644
--- a/src/Http/Controllers/Pages.php
+++ b/src/Http/Controllers/Pages.php
@@ -55,25 +55,25 @@ class Pages extends Controller
*/
public function index()
{
- if (false !== ($settings = $this->page->getSettings())) {
- if ($settings->offsetExists('theme')) {
- presenter()->theme->set($settings->theme);
+ if (false !== ($presets = $this->page->getPresets())) {
+ if ($presets->offsetExists('theme')) {
+ presenter()->theme->set($presets->theme);
}
- if ($settings->offsetExists('layout')) {
- presenter()->theme->setLayout($settings->layout);
+ if ($presets->offsetExists('layout')) {
+ presenter()->theme->setLayout($presets->layout);
}
- if ($settings->offsetExists('title')) {
- presenter()->meta->title->append($settings->title);
+ if ($presets->offsetExists('title')) {
+ presenter()->meta->title->append($presets->title);
}
- if ($settings->offsetExists('pageTitle')) {
- presenter()->meta->title->append($settings->pageTitle);
+ if ($presets->offsetExists('pageTitle')) {
+ presenter()->meta->title->append($presets->pageTitle);
}
- if ($settings->offsetExists('browserTitle')) {
- presenter()->meta->title->replace($settings->browserTitle);
+ if ($presets->offsetExists('browserTitle')) {
+ presenter()->meta->title->replace($presets->browserTitle);
}
}
diff --git a/src/Http/Router/Datastructures/Page.php b/src/Http/Router/Datastructures/Page.php
index <HASH>..<HASH> 100644
--- a/src/Http/Router/Datastructures/Page.php
+++ b/src/Http/Router/Datastructures/Page.php
@@ -33,11 +33,11 @@ class Page extends SplFileInfo
private $vars = [];
/**
- * Page Settings
+ * Page Presets
*
* @var SplArrayObject
*/
- private $settings;
+ private $presets;
// ------------------------------------------------------------------------
@@ -51,21 +51,21 @@ class Page extends SplFileInfo
parent::__construct($filename);
if (file_exists(
- $propsFilePath = $this->getPath() . DIRECTORY_SEPARATOR . str_replace(
+ $propertiesFilePath = $this->getPath() . DIRECTORY_SEPARATOR . str_replace(
'.phtml',
- '.jspage',
+ '.json',
strtolower($this->getBasename())
)
)) {
- $props = file_get_contents($propsFilePath);
- $props = json_decode($props, true);
+ $properties = file_get_contents($propertiesFilePath);
+ $properties = json_decode($properties, true);
- if (isset($props[ 'vars' ])) {
- $this->vars = $props[ 'vars' ];
+ if (isset($properties[ 'vars' ])) {
+ $this->vars = $properties[ 'vars' ];
}
- if (isset($props[ 'settings' ])) {
- $this->settings = new SplArrayObject($props[ 'settings' ]);
+ if (isset($properties[ 'presets' ])) {
+ $this->presets = new SplArrayObject($properties[ 'presets' ]);
}
}
}
@@ -87,16 +87,16 @@ class Page extends SplFileInfo
// ------------------------------------------------------------------------
/**
- * Page::getSettings
+ * Page::getPresets
*
- * Gets page settings.
+ * Gets page presets.
*
* @return bool|\O2System\Spl\Datastructures\SplArrayObject
*/
- public function getSettings()
+ public function getPresets()
{
- if ($this->settings instanceof SplArrayObject) {
- return $this->settings;
+ if ($this->presets instanceof SplArrayObject) {
+ return $this->presets;
}
return false; | Change page properties from .jspage to pages .json | o2system_framework | train |
39764987bd5a00eefcea34b24b4d35290a08bc08 | diff --git a/jaggr-service/src/test/java/com/ibm/jaggr/service/impl/modulebuilder/javascript/JsModuleContentProviderTest.java b/jaggr-service/src/test/java/com/ibm/jaggr/service/impl/modulebuilder/javascript/JsModuleContentProviderTest.java
index <HASH>..<HASH> 100644
--- a/jaggr-service/src/test/java/com/ibm/jaggr/service/impl/modulebuilder/javascript/JsModuleContentProviderTest.java
+++ b/jaggr-service/src/test/java/com/ibm/jaggr/service/impl/modulebuilder/javascript/JsModuleContentProviderTest.java
@@ -46,10 +46,13 @@ import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
+import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
+import junit.framework.Assert;
+
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.After;
@@ -198,7 +201,11 @@ public class JsModuleContentProviderTest extends EasyMock {
assertTrue(new File(mockAggregator.getCacheManager().getCacheDir(), cacheFile1).exists());
// validate that require list was expanded and has blocks were removed
- assertTrue(compiled.contains("require([\"p2/a\",\"p2/b\",\"p2/c\"]"));
+ Matcher m = Pattern.compile("require\\(\\[\\\"([^\"]*)\\\",\\\"([^\"]*)\\\",\\\"([^\"]*)\\\"\\]").matcher(compiled);
+ Assert.assertTrue(m.find());
+ Assert.assertEquals(
+ new HashSet<String>(Arrays.asList(new String[]{"p2/a", "p2/b", "p2/c"})),
+ new HashSet<String>(Arrays.asList(new String[]{m.group(1), m.group(2), m.group(3)})));
assertTrue(compiled.contains("condition_True"));
assertFalse(compiled.contains("condition_False"));
assertFalse(compiled.contains("has(")); | Fix JUnit test failure due to JVM differences in HashSet ordering of
elements | OpenNTF_JavascriptAggregator | train |
a734ecb7753d623e726695c384863055ca8c69ec | diff --git a/lib/pkgcloud/openstack/client.js b/lib/pkgcloud/openstack/client.js
index <HASH>..<HASH> 100644
--- a/lib/pkgcloud/openstack/client.js
+++ b/lib/pkgcloud/openstack/client.js
@@ -35,7 +35,8 @@ var Client = exports.Client = function (options) {
this.authUrl = options.authUrl || 'auth.api.trystack.org';
this.provider = 'openstack';
this.region = options.region;
- this.tenantId = options.tenantId;
+ this.tenantId = options.tenantId;
+ this.version = options.version || 'v2.0';
if (!/^http[s]?\:\/\//.test(this.authUrl)) {
this.authUrl = 'http://' + this.authUrl;
@@ -71,6 +72,7 @@ util.inherits(Client, base.Client);
Client.prototype._getIdentityOptions = function() {
var options = {
url: this.authUrl,
+ version: this.version,
username: this.config.username,
password: this.config.password
};
diff --git a/lib/pkgcloud/openstack/context/identity.js b/lib/pkgcloud/openstack/context/identity.js
index <HASH>..<HASH> 100644
--- a/lib/pkgcloud/openstack/context/identity.js
+++ b/lib/pkgcloud/openstack/context/identity.js
@@ -13,7 +13,7 @@ var _ = require('underscore'),
ServiceCatalog = require('./serviceCatalog').ServiceCatalog,
svcCat = require('./serviceCatalog'),
url = require('url'),
- util = require('util'),
+ utile = require('utile'),
urlJoin = require('url-join'),
util = require('util'),
pkgcloud = require('../../../pkgcloud'),
@@ -59,7 +59,7 @@ var Identity = exports.Identity = function (options) {
});
};
-util.inherits(Identity, events.EventEmitter2);
+utile.inherits(Identity, events.EventEmitter2);
/**
* Identity.authorize
@@ -76,7 +76,6 @@ Identity.prototype.authorize = function (options, callback) {
callback = options;
options = {};
}
-
var authenticationOptions = {
uri: urlJoin(options.url || self.options.url, '/v2.0/tokens'),
method: 'POST',
@@ -87,6 +86,12 @@ Identity.prototype.authorize = function (options, callback) {
}
};
+ if (self.options.version === 1 || self.options.version === '/v1.0') {
+ authenticationOptions.uri = urlJoin(options.url || self.options.url, '/auth/v1.0');
+ authenticationOptions.method = 'GET';
+ authenticationOptions.headers['X-Auth-User'] = self.options.username;
+ authenticationOptions.headers['X-Auth-Key'] = self.options.password;
+ }
self._buildAuthenticationPayload();
// we can't be called without a payload
@@ -126,9 +131,16 @@ Identity.prototype.authorize = function (options, callback) {
statusCode: response.statusCode
});
+ if (self.options.version === 1 || self.options.version === '/v1.0') {
+ self._storageURL = response.headers['x-storage-url'];
+ self.token = {
+ id: response.headers['x-auth-token']
+ };
+ callback();
+ }
// If we don't have a tenantId in the response (meaning no service catalog)
// go ahead and make a 1-off request to get a tenant and then reauthorize
- if (!body.access.token.tenant) {
+ else if (!body.access.token.tenant) {
getTenantId(urlJoin(options.url || self.options.url, '/v2.0/tenants'), body.access.token.id);
}
else {
@@ -246,6 +258,9 @@ Identity.prototype.getServiceEndpointUrl = function (options) {
if (this.useServiceCatalog) {
return this.serviceCatalog.getServiceEndpointUrl(options);
}
+ else if (this.options.version === 1 || this.options.version === '/v1.0') {
+ return this._storageURL;
+ }
else {
return this.options.url;
}
@@ -282,6 +297,4 @@ function getError(err, res, body) {
return err2;
}
-
- return;
} | Add <I> support for openstack swift storage | pkgcloud_pkgcloud | train |
1ec59fadeca79ef57e3ded59edd092e6a92436dd | diff --git a/addon/form-object.js b/addon/form-object.js
index <HASH>..<HASH> 100644
--- a/addon/form-object.js
+++ b/addon/form-object.js
@@ -3,16 +3,12 @@ import validatedBuffer from './validated-buffer';
export default function formObject(model, impl = {}) {
return Ember.computed(model, function() {
- if (this.get(model)) {
+ //This makes sure that model errors are displayed via the buffer correctly but only until the
+ //property is modified again.
+ Ember.keys(impl.validations || {}).forEach(function(key) {
+ impl.validations[key]['api-errors'] = true;
+ });
- //This makes sure that model errors are displayed via the buffer correctly but only until the
- //property is modified again.
- Ember.keys(impl.validations || {}).forEach(function(key) {
- impl.validations[key]['api-errors'] = true;
- });
-
- return validatedBuffer(this.get(model), impl);
- }
- return Ember.Object.create({});
+ return validatedBuffer(this.get(model), impl);
});
} | require a model to be passed to formObject | simplabs_ember-validated-form-buffer | train |
baec89883ec925ff3b0a53bba27e8e2dc93344a4 | diff --git a/relib/memoizer.py b/relib/memoizer.py
index <HASH>..<HASH> 100644
--- a/relib/memoizer.py
+++ b/relib/memoizer.py
@@ -37,5 +37,6 @@ def memoize(opt_func=None, in_memory=False, compress=False, mongo=False, expire_
def read_only(wrapper_func, args=(), kwargs={}, in_memory=False, compress=False, mongo=False):
func = func_by_wrapper[wrapper_func]
storage_format = 'memory' if in_memory else 'bcolz' if compress else 'mongo' if mongo else 'pickle'
- invoke_path = get_invoke_path(func, get_function_hash(func), args, kwargs)
+ function_hash = get_function_hash(func, func_by_wrapper)
+ invoke_path = get_invoke_path(func, function_hash, args, kwargs)
return storage.read_from_store(invoke_path, storage_format=storage_format)
diff --git a/relib/storages/bcolz_storage.py b/relib/storages/bcolz_storage.py
index <HASH>..<HASH> 100644
--- a/relib/storages/bcolz_storage.py
+++ b/relib/storages/bcolz_storage.py
@@ -13,7 +13,8 @@ def get_collection_timestamp(path):
try:
full_path = storage_dir + path
meta_data = bcolz.open(full_path + '_meta')[:][0]
- return meta_data['created']
+ # return meta_data['created']
+ return time.time()
except:
return 0
diff --git a/relib/storages/pickle_storage.py b/relib/storages/pickle_storage.py
index <HASH>..<HASH> 100644
--- a/relib/storages/pickle_storage.py
+++ b/relib/storages/pickle_storage.py
@@ -13,7 +13,8 @@ def get_collection_timestamp(path):
full_path = storage_dir + path
with open(full_path + '_meta.pkl', 'rb') as file:
meta_data = pickle.load(file)
- return meta_data['created']
+ # return meta_data['created']
+ return time.time()
except:
return 0 | Disabled pickle and bcolz expiry | Reddan_relib | train |
6212bbfa1c7b58326d518e07e334b228aa37d93f | diff --git a/packages/xod-fs/src/pack.js b/packages/xod-fs/src/pack.js
index <HASH>..<HASH> 100644
--- a/packages/xod-fs/src/pack.js
+++ b/packages/xod-fs/src/pack.js
@@ -5,7 +5,7 @@ import { def } from './types';
import { isProjectFile, isPatchFile, getFileContent } from './utils';
export default def(
- 'packProject :: [AnyXodFile] -> PatchMap -> Project',
+ 'packProject :: [AnyXodFile] -> Map PatchPath Patch -> Project',
(unpackedData, libraryPatches = {}) => {
const project = R.compose(
R.dissoc('libs'),
diff --git a/packages/xod-project/src/types.js b/packages/xod-project/src/types.js
index <HASH>..<HASH> 100644
--- a/packages/xod-project/src/types.js
+++ b/packages/xod-project/src/types.js
@@ -89,10 +89,8 @@ export const Patch = Model('Patch', {
description: $.String,
});
-export const PatchMap = AliasType('PatchMap', $.StrMap(Patch));
-
export const Project = Model('Project', {
- patches: PatchMap,
+ patches: $.StrMap(Patch),
name: Identifier,
authors: $.Array($.String),
license: $.String,
@@ -130,7 +128,6 @@ export const env = XF.env.concat([
TerminalNode,
Patch,
PatchPath,
- PatchMap,
Pin,
PinOrKey,
PinKey, | fix(xod-project,xod-fs): remove excessive PatchMap and replace it in xod-fs with `Map PatchPath Patch` | xodio_xod | train |
1e1f471c4f134f1047c460f51e68bff58281616e | diff --git a/lib/replica/base.rb b/lib/replica/base.rb
index <HASH>..<HASH> 100644
--- a/lib/replica/base.rb
+++ b/lib/replica/base.rb
@@ -72,7 +72,7 @@ module ActiveRecord # :nodoc:
self.current_replica_name = old_replica_name
logger.warn("Failed to establish replica connection: #{e.message} - defaulting to master")
end
- yield
+ with_scope({:find => {:readonly => current_replica_name.present?}}, :merge, &block)
ensure
self.current_replica_name = old_replica_name
end
diff --git a/test/replica_test.rb b/test/replica_test.rb
index <HASH>..<HASH> 100644
--- a/test/replica_test.rb
+++ b/test/replica_test.rb
@@ -128,10 +128,21 @@ class ReplicaTest < ActiveRecord::TestCase
assert_equal('master_name', @model.name)
end
- should "write to master on save" do
- @model.name = 'new_master_name'
- @model.save!
- assert_equal('new_master_name', Account.connection.select_value("SELECT name FROM accounts WHERE id = 1000"))
+ should "be marked as read only" do
+ assert(@model.readonly?)
+ end
+ end
+
+ context "a model loaded with the master" do
+ setup do
+ Account.connection.execute("INSERT INTO accounts (id, name, created_at, updated_at) VALUES(1000, 'master_name', '2009-12-04 20:18:48', '2009-12-04 20:18:48')")
+ @model = Account.with_master.first
+ assert(@model)
+ assert_equal('master_name', @model.name)
+ end
+
+ should "not be marked as read only" do
+ assert([email protected]?)
end
end
end | mark model from replicas as read only | zendesk_active_record_shards | train |
02b6ac319d850a24546207f0d406c63fcfeea38c | diff --git a/sonar-server/src/main/webapp/javascripts/resource.js b/sonar-server/src/main/webapp/javascripts/resource.js
index <HASH>..<HASH> 100644
--- a/sonar-server/src/main/webapp/javascripts/resource.js
+++ b/sonar-server/src/main/webapp/javascripts/resource.js
@@ -176,7 +176,7 @@ function sVF(elt, resource, line, gray_colspan, white_colspan) {
// hide review form
function hVF(elt, line) {
- var row = $j(elt).closest('.createViolationRow'+ line);
+ var row = $j(elt).closest('#createViolationRow'+ line);
if (row.length) {
row.remove();
} | Fix issue when canceling creation of review | SonarSource_sonarqube | train |
110e1a01bfdbd4594b717f493de44fb50c574011 | diff --git a/modules/activiti-engine/src/main/java/org/activiti/engine/impl/util/ClassNameUtil.java b/modules/activiti-engine/src/main/java/org/activiti/engine/impl/util/ClassNameUtil.java
index <HASH>..<HASH> 100644
--- a/modules/activiti-engine/src/main/java/org/activiti/engine/impl/util/ClassNameUtil.java
+++ b/modules/activiti-engine/src/main/java/org/activiti/engine/impl/util/ClassNameUtil.java
@@ -13,8 +13,8 @@
package org.activiti.engine.impl.util;
-import java.util.HashMap;
import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
/**
@@ -22,7 +22,7 @@ import java.util.Map;
*/
public abstract class ClassNameUtil {
- protected static final Map<Class<?>, String> cachedNames = new HashMap<Class<?>, String>();
+ protected static final Map<Class<?>, String> cachedNames = new ConcurrentHashMap<Class<?>, String>();
public static String getClassNameWithoutPackage(Object object) {
return getClassNameWithoutPackage(object.getClass()); | ACT-<I>: Using concurrentHashmap to prevent concur-mod exceptions for cache of simple-names | Activiti_Activiti | train |
cf669aa6e4b9fb875f1d94905455840064041320 | diff --git a/metrics-core/src/main/java/com/yammer/metrics/core/MetricsRegistry.java b/metrics-core/src/main/java/com/yammer/metrics/core/MetricsRegistry.java
index <HASH>..<HASH> 100644
--- a/metrics-core/src/main/java/com/yammer/metrics/core/MetricsRegistry.java
+++ b/metrics-core/src/main/java/com/yammer/metrics/core/MetricsRegistry.java
@@ -464,7 +464,7 @@ public class MetricsRegistry {
}
@SuppressWarnings("unchecked")
- private <T extends Metric> T getOrAdd(MetricName name, T metric) {
+ protected final <T extends Metric> T getOrAdd(MetricName name, T metric) {
final Metric existingMetric = metrics.get(name);
if (existingMetric == null) {
final Metric justAddedMetric = metrics.putIfAbsent(name, metric); | Allow subclasses to add metrics to MetricsRegistry.
Closes #<I>. | dropwizard_metrics | train |
34d6925067b841117acc929fe39eff342a2eb3c7 | diff --git a/netmiko/base_connection.py b/netmiko/base_connection.py
index <HASH>..<HASH> 100644
--- a/netmiko/base_connection.py
+++ b/netmiko/base_connection.py
@@ -228,7 +228,7 @@ class BaseConnection(object):
if self.remote_conn.recv_ready():
outbuf = self.remote_conn.recv(MAX_BUFFER)
if len(outbuf) == 0:
- raise EOFError
+ raise EOFError("Channel stream closed by remote device.")
output += outbuf.decode('utf-8', 'ignore')
else:
break
@@ -281,7 +281,7 @@ class BaseConnection(object):
self._lock_netmiko_session()
new_data = self.remote_conn.recv(MAX_BUFFER)
if len(new_data) == 0:
- raise EOFError
+ raise EOFError("Channel stream closed by remote device.")
new_data = new_data.decode('utf-8', 'ignore')
log.debug("_read_channel_expect read_data: {}".format(new_data))
output += new_data | Adding text to EOFError | ktbyers_netmiko | train |
193bbb7f194356efa20a2b93183c703749a0fb69 | diff --git a/flaskfilemanager/filemanager.py b/flaskfilemanager/filemanager.py
index <HASH>..<HASH> 100644
--- a/flaskfilemanager/filemanager.py
+++ b/flaskfilemanager/filemanager.py
@@ -245,8 +245,11 @@ def get_file(path=None, content=None):
height = 0
width = 0
if extension in ['gif', 'jpg', 'jpeg', 'png']:
- im = PIL.Image.open(os_file_path)
- height, width = im.size
+ try:
+ im = PIL.Image.open(os_file_path)
+ height, width = im.size
+ except OSError:
+ log.exception('Error loading image "{}" to get width and height'.format(os_file_path))
attributes = {
'name': filename,
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,12 +8,12 @@ setup(
name='flaskfilemanager',
packages=['flaskfilemanager'],
include_package_data=True,
- version='0.0.4',
+ version='0.0.5',
description='RichFilemanager blueprint for Flask web applications - adds a ckeditor compatible file manager / browser',
author='Stephen Brown (Little Fish Solutions LTD)',
author_email='[email protected]',
url='https://github.com/stevelittlefish/flaskfilemanager',
- download_url='https://github.com/stevelittlefish/flaskfilemanager/archive/v0.0.4.tar.gz',
+ download_url='https://github.com/stevelittlefish/flaskfilemanager/archive/v0.0.5.tar.gz',
keywords=['flask', 'jinja2', 'filemanager', 'file', 'manager', 'browser', 'ckeditor'],
license='Apache',
classifiers=[ | Fixed a bug when trying to open invalid image | stevelittlefish_flaskfilemanager | train |
6cd397b7ff393dd99c9fac6837f409757242d0b2 | diff --git a/imagen/__init__.py b/imagen/__init__.py
index <HASH>..<HASH> 100644
--- a/imagen/__init__.py
+++ b/imagen/__init__.py
@@ -8,6 +8,13 @@ PatternGenerator classes can be derived from these, and can then be
combined with the existing classes easily.
"""
+import sys, os
+
+# Add param submodule to sys.path
+cwd = os.path.abspath(os.path.split(__file__)[0])
+sys.path.insert(0, os.path.join(cwd, '..', 'param'))
+sys.path.insert(0, os.path.join(cwd, '..', 'dataviews'))
+
import param
from param.version import Version | Added param and dataview submodules to the sys.path in __init__.py | pyviz_imagen | train |
75ac8422217501bf946a1b1663629c29a4a9697b | diff --git a/src/com/google/javascript/jscomp/Es6ToEs3Converter.java b/src/com/google/javascript/jscomp/Es6ToEs3Converter.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/Es6ToEs3Converter.java
+++ b/src/com/google/javascript/jscomp/Es6ToEs3Converter.java
@@ -729,6 +729,9 @@ public final class Es6ToEs3Converter implements NodeTraversal.Callback, HotSwapC
"Member variables should have been transpiled earlier: ", member);
if (member.isGetterDef() || member.isSetterDef()) {
+ if (member.isStaticMember()) {
+ compiler.report(JSError.make(member, CANNOT_CONVERT_YET, "static getters/setters"));
+ }
JSTypeExpression typeExpr = getTypeFromGetterOrSetter(member).clone();
addToDefinePropertiesObject(metadata, member);
diff --git a/test/com/google/javascript/jscomp/Es6ToEs3ConverterTest.java b/test/com/google/javascript/jscomp/Es6ToEs3ConverterTest.java
index <HASH>..<HASH> 100644
--- a/test/com/google/javascript/jscomp/Es6ToEs3ConverterTest.java
+++ b/test/com/google/javascript/jscomp/Es6ToEs3ConverterTest.java
@@ -1050,6 +1050,16 @@ public final class Es6ToEs3ConverterTest extends CompilerTestCase {
}
/**
+ * @bug 20536614
+ */
+ public void testStaticGetterSetter() {
+ languageOut = LanguageMode.ECMASCRIPT5;
+
+ testError("class C { static get foo() {} }", Es6ToEs3Converter.CANNOT_CONVERT_YET);
+ testError("class C { static set foo(x) {} }", Es6ToEs3Converter.CANNOT_CONVERT_YET);
+ }
+
+ /**
* Computed property getters and setters in classes are not supported.
*/
public void testClassComputedPropGetterSetter() { | Fail on static getters/setters rather than "succeeding" by outputting incorrect code.
-------------
Created by MOE: <URL> | google_closure-compiler | train |
2b58196abfce48e7a5a1452eb7dbafc4c1796bb8 | diff --git a/lib/buildpack/packager/package.rb b/lib/buildpack/packager/package.rb
index <HASH>..<HASH> 100644
--- a/lib/buildpack/packager/package.rb
+++ b/lib/buildpack/packager/package.rb
@@ -123,7 +123,7 @@ module Buildpack
end
def download_file(url, file)
- raise "Failed to download file from #{url}" unless system("curl #{url} -o #{file} -L --fail -f")
+ raise "Failed to download file from #{url}" unless system("curl -s --retry 15 --retry-delay 2 #{url} -o #{file} -L --fail -f")
end
def zip_files(source_dir, zip_file_path, excluded_files) | Add retry to file download
- will help prevent flakiness in CI | cloudfoundry_buildpack-packager | train |
a4262e879529679f6f1cfab49bc36699d6557482 | diff --git a/go/client/prompts.go b/go/client/prompts.go
index <HASH>..<HASH> 100644
--- a/go/client/prompts.go
+++ b/go/client/prompts.go
@@ -39,4 +39,5 @@ const (
PromptDescriptorChooseDeviceType
PromptDescriptorProvisionPhrase
PromptDescriptorProvisionDeviceName
+ PromptDescriptorExportSecretKeyFromGPG
)
diff --git a/go/client/provision_ui.go b/go/client/provision_ui.go
index <HASH>..<HASH> 100644
--- a/go/client/provision_ui.go
+++ b/go/client/provision_ui.go
@@ -52,18 +52,16 @@ func (p ProvisionUI) ChooseProvisioningMethod(ctx context.Context, arg keybase1.
case 3:
return keybase1.ProvisionMethod_PASSPHRASE, nil
case 4:
- p.parent.Output("\nIn order to use GPG to sign this install of Keybase, in the next step\n")
- p.parent.Output("you will select one of your GPG keys. The gpg client will be used to\n")
- p.parent.Output("export the secret key and import it into keybase's local encrypted\n")
- p.parent.Output("key store.\n\n")
- err = p.parent.PromptForConfirmation("Would you like to continue?")
- if err != nil {
- if _, ok := err.(NotConfirmedError); ok {
- p.parent.Output("\n\nWe have an issue for using gpg to sign the install without\nrequiring importing the secret key. You can view it here:\n\n https://github.com/keybase/client/issues/1308\n\nThanks!\n")
- return res, libkb.CanceledError{M: "user canceled gpg provisioning"}
- }
- p.parent.Printf("error type: %T\n", err)
- return res, err
+ p.parent.Output("\nThe keybase CLI needs access to your GPG key to authorize this installation. It will\n")
+ p.parent.Output("export your secret key from GPG, and save to keybase's local encrypted keyring. This way,\n")
+ p.parent.Output("it can be used in `keybase pgp sign` and `keybase pgp decrypt` going forward.\n")
+ ok, err := p.parent.PromptYesNo(PromptDescriptorExportSecretKeyFromGPG, "Would you like to continue?", libkb.PromptDefaultYes)
+ if !ok || err != nil {
+ p.parent.Output("\nWe have an issue for using gpg to sign your install without\n")
+ p.parent.Output("requiring a secret key import:\n\n")
+ p.parent.Output(" https://github.com/keybase/client/issues/1308\n\n")
+ p.parent.Output("Register a :+1: if you want to expedite its development.\n\n")
+ return res, libkb.CanceledError{M: "user canceled gpg provisioning"}
}
return keybase1.ProvisionMethod_GPG, nil
} | wordsmithing and U/I tweaks | keybase_client | train |
c57f5cbd5c3f30e127c5c325578e0f573ce89a12 | diff --git a/lib/model/stream.js b/lib/model/stream.js
index <HASH>..<HASH> 100644
--- a/lib/model/stream.js
+++ b/lib/model/stream.js
@@ -770,27 +770,23 @@ Stream.prototype.dump = function(callback) {
);
};
+var MAX_EACH = 25;
+
Stream.prototype.each = function(iter, callback) {
var bank = Stream.bank(),
str = this,
res = {},
- allSegments = function(segments, callback) {
-
- if (segments.length === 0) {
+ allItems = function(items, callback) {
+ if (items.length === 0) {
callback(null);
return;
}
-
Step(
function() {
- bank.read("streamsegment", segments[0], this);
- },
- function(err, segment) {
var group = this.group();
- if (err) throw err;
// XXX: this is probably too much; we should enqueue
- _.each(segment, function(item) {
+ _.each(items.slice(0, MAX_EACH), function(item) {
var cb = group();
try {
iter(item, cb);
@@ -803,6 +799,30 @@ Stream.prototype.each = function(iter, callback) {
if (err) {
callback(err);
} else {
+ allItems(items.slice(MAX_EACH), callback);
+ }
+ }
+ );
+ },
+ allSegments = function(segments, callback) {
+
+ if (segments.length === 0) {
+ callback(null);
+ return;
+ }
+
+ Step(
+ function() {
+ bank.read("streamsegment", segments[0], this);
+ },
+ function(err, segment) {
+ if (err) throw err;
+ allItems(segment, this);
+ },
+ function(err) {
+ if (err) {
+ callback(err);
+ } else {
allSegments(segments.slice(1), callback);
}
} | Only do <I> items at a time | pump-io_pump.io | train |
5b46f6524f752834385bcdc8f15a60dee943046e | diff --git a/modules/admin/src/models/Lang.php b/modules/admin/src/models/Lang.php
index <HASH>..<HASH> 100644
--- a/modules/admin/src/models/Lang.php
+++ b/modules/admin/src/models/Lang.php
@@ -22,7 +22,23 @@ class Lang extends \admin\ngrest\base\Model
public function init()
{
parent::init();
- $this->on(self::EVENT_AFTER_VALIDATE, [$this, 'validateDefaultLanguage']);
+
+ /**
+ * After validation event find out if default has to be set or not. Check if if current value
+ * has default to 1, disabled the other default attributes.
+ */
+ $this->on(self::EVENT_BEFORE_INSERT, function($event) {
+ if ($this->is_default == 1) {
+ self::updateAll(['is_default' => 0]);
+ }
+ });
+
+ $this->on(self::EVENT_BEFORE_UPDATE, function($event) {
+ if ($this->is_default == 1) {
+ $this->markAttributeDirty('is_default');
+ self::updateAll(['is_default' => 0]);
+ }
+ });
}
/**
@@ -35,21 +51,6 @@ class Lang extends \admin\ngrest\base\Model
}
/**
- * After validation event find out if default has to be set or not. Check if if current value
- * has default to 1, disabled the other default attributes.
- */
- public function validateDefaultLanguage()
- {
- if ($this->isNewRecord && $this->is_default == 1) {
- self::updateAll(['is_default' => 0]);
- } elseif (!$this->isNewRecord && $this->is_default == 1) {
- self::updateAll(['is_default' => 0]);
- } else {
- $this->is_default = 0;
- }
- }
-
- /**
*
* {@inheritDoc}
* @see \yii\base\Model::rules() | fixed issue where language attribute does reset closes #<I> | luyadev_luya | train |
92c2d0cc039e57c968f0e752c09f3e4253534d18 | diff --git a/WellCommerceLocaleBundle.php b/WellCommerceLocaleBundle.php
index <HASH>..<HASH> 100644
--- a/WellCommerceLocaleBundle.php
+++ b/WellCommerceLocaleBundle.php
@@ -12,7 +12,9 @@
namespace WellCommerce\Bundle\LocaleBundle;
+use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\HttpKernel\Bundle\Bundle;
+use WellCommerce\Bundle\LocaleBundle\DependencyInjection\Compiler;
/**
* Class WellCommerceLocaleBundle
@@ -21,5 +23,10 @@ use Symfony\Component\HttpKernel\Bundle\Bundle;
*/
class WellCommerceLocaleBundle extends Bundle
{
-
+ public function build(ContainerBuilder $container)
+ {
+ parent::build($container);
+ $container->addCompilerPass(new Compiler\AutoRegisterServicesPass());
+ $container->addCompilerPass(new Compiler\MappingCompilerPass());
+ }
} | Added compiler passes to all bundles
(cherry picked from commit <I>ebd<I>a7aa0b9c6be3c<I>f<I>a7ce<I>bc<I>f) | WellCommerce_CouponBundle | train |
3514d0ca4008c9a97daab3385bfbe52f08c76fe6 | diff --git a/broqer/op/operator_overloading.py b/broqer/op/operator_overloading.py
index <HASH>..<HASH> 100644
--- a/broqer/op/operator_overloading.py
+++ b/broqer/op/operator_overloading.py
@@ -17,7 +17,7 @@ class _MapConstant(Operator):
self._operation = operation
self._publisher = publisher
- if publisher.inherit_type is not None:
+ if publisher.inherited_type is not None:
self.inherit_type(publisher.inherited_type)
def get(self):
@@ -39,7 +39,7 @@ class _MapConstantReverse(Operator):
self._operation = operation
self._publisher = publisher
- if publisher.inherit_type is not None:
+ if publisher.inherited_type is not None:
self.inherit_type(publisher.inherited_type)
def get(self):
@@ -60,7 +60,7 @@ class _MapUnary(Operator):
self._operation = operation
self._publisher = publisher
- if publisher.inherit_type is not None:
+ if publisher.inherited_type is not None:
self.inherit_type(publisher.inherited_type)
def get(self):
@@ -83,8 +83,7 @@ class _GetAttr(Operator):
self._args = None
self._kwargs = None
- if publisher.inherit_type is not None:
- self.inherit_type(publisher.inherited_type)
+ self.inherit_type(publisher.inherited_type)
def get(self):
value = self._publisher.get() # may raise ValueError
diff --git a/test/test_core_publisher_operators.py b/test/test_core_publisher_operators.py
index <HASH>..<HASH> 100644
--- a/test/test_core_publisher_operators.py
+++ b/test/test_core_publisher_operators.py
@@ -363,6 +363,28 @@ def test_getattr_method():
mock2.assert_called_once_with(['This is just a test', ' honestly!'])
mock3.assert_called_once_with(['This is just a test, honestly', ''])
+def test_inherit_getattr():
+ p = StatefulPublisher()
+ p.inherit_type(str)
+
+ dut = p.lower().split(' ')
+ m = mock.Mock()
+ dut | op.Sink(m)
+
+ p.notify('This is a TEST')
+ m.assert_called_once_with(['this', 'is', 'a', 'test'])
+
+def test_inherit_with_operators():
+ p = StatefulPublisher()
+ p.inherit_type(str)
+
+ dut = op.Len(('abc' + p + 'ghi').upper())
+ m = mock.Mock()
+ dut | op.Sink(m)
+
+ p.notify('def')
+ m.assert_called_once_with(9)
+
def test_getattr_attribute():
p = StatefulPublisher()
class Foo:
diff --git a/test/test_op_sink.py b/test/test_op_sink.py
index <HASH>..<HASH> 100644
--- a/test/test_op_sink.py
+++ b/test/test_op_sink.py
@@ -10,7 +10,7 @@ def test_sink(operator_cls):
cb = mock.Mock()
s = Subject()
- sink_instance = operator_cls(s, cb)
+ sink_instance = s | operator_cls(cb)
assert isinstance(sink_instance, Disposable)
assert not cb.called
@@ -39,7 +39,7 @@ def test_sink(operator_cls):
assert not cb.called
@pytest.mark.parametrize('operator_cls', [Sink, Trace])
-def test_sink(operator_cls):
+def test_sink2(operator_cls):
cb = mock.Mock()
s = Subject() | fixed operator_overloading and unit tests | semiversus_python-broqer | train |
c37bf86be51261a19758f773053bbb975ade9278 | diff --git a/hooks/update-time.py b/hooks/update-time.py
index <HASH>..<HASH> 100755
--- a/hooks/update-time.py
+++ b/hooks/update-time.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
from time import gmtime, strftime
from sys import exit, stderr | remove 3 from the hook (multi-os support) | oasis-open_cti-documentation | train |
3cac52afd482058ce17140f05d8e7a488b91f970 | diff --git a/config/cms.php b/config/cms.php
index <HASH>..<HASH> 100644
--- a/config/cms.php
+++ b/config/cms.php
@@ -253,7 +253,7 @@ return [
/*
|--------------------------------------------------------------------------
- | Safe Mode
+ | Safe mode
|--------------------------------------------------------------------------
|
| If safe mode is enabled, the PHP code section is disabled in the CMS
diff --git a/modules/cms/classes/CmsCompoundObject.php b/modules/cms/classes/CmsCompoundObject.php
index <HASH>..<HASH> 100644
--- a/modules/cms/classes/CmsCompoundObject.php
+++ b/modules/cms/classes/CmsCompoundObject.php
@@ -141,7 +141,7 @@ class CmsCompoundObject extends CmsObject
*/
protected function checkSafeMode()
{
- $safeMode = Config::get('cms.enableSafeMode', false);
+ $safeMode = Config::get('cms.enableSafeMode', null);
if ($safeMode === null) {
$safeMode = !Config::get('app.debug', false);
} | Default is null, not false | octobercms_october | train |
438c534e7b91f83cb0fa6f129b4c3c0d9146adfe | diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -18,7 +18,7 @@ describe('simple project', function () {
function cb (pkgName, foundPath) {
pkgList.push(pkgName)
- foundPathList.push(foundPath)
+ foundPathList.push(path.normalize(foundPath))
}
before(function (done) { | Normalize path to make tests work under Windows
Test case 'simple project should list the linked paths'
failed under Windows, due to differences in path format
between foundPathList array and pathToModule.
By normalizing the path, test pass under Windows as well. | etpinard_npm-link-check | train |
bd59114131d2f31d828fb620f5583f7a4718df9e | diff --git a/lib/sitemap_notifier/notifier.rb b/lib/sitemap_notifier/notifier.rb
index <HASH>..<HASH> 100644
--- a/lib/sitemap_notifier/notifier.rb
+++ b/lib/sitemap_notifier/notifier.rb
@@ -30,8 +30,7 @@ module SitemapNotifier
attr_writer :urls
def urls
@urls ||= ["http://www.google.com/webmasters/sitemaps/ping?sitemap=#{CGI::escape(sitemap_url)}",
- "http://www.bing.com/webmaster/ping.aspx?siteMap=#{CGI::escape(sitemap_url)}",
- "http://submissions.ask.com/ping?sitemap=#{CGI::escape(sitemap_url)}"]
+ "http://www.bing.com/webmaster/ping.aspx?siteMap=#{CGI::escape(sitemap_url)}"]
# no Yahoo here, as they will be using Bing from september 15th, 2011
end | Remove Ask.com
Ask.com no longer supports pinging of XML sitemaps. | lassebunk_sitemap_notifier | train |
1a29609e0929ccd5666069e2e7213c2c69fa4ac2 | diff --git a/go.mod b/go.mod
index <HASH>..<HASH> 100644
--- a/go.mod
+++ b/go.mod
@@ -1 +1,3 @@
-module github.com/buger/jsonparser
\ No newline at end of file
+module github.com/buger/jsonparser
+
+go 1.13
diff --git a/parser.go b/parser.go
index <HASH>..<HASH> 100644
--- a/parser.go
+++ b/parser.go
@@ -436,13 +436,9 @@ func EachKey(data []byte, cb func(int, []byte, ValueType, error), paths ...[]str
pathsMatched++
pathFlags |= bitwiseFlags[pi+1]
- v, dt, of, e := Get(data[i:])
+ v, dt, _, e := Get(data[i:])
cb(pi, v, dt, e)
- if of != -1 {
- i += of
- }
-
if pathsMatched == len(paths) {
break
}
diff --git a/parser_test.go b/parser_test.go
index <HASH>..<HASH> 100644
--- a/parser_test.go
+++ b/parser_test.go
@@ -887,18 +887,18 @@ var getStringTests = []GetTest{
data: "value\b\f\n\r\tvalue", // value is unescaped since this is GetString()
},
{ // This test checks we avoid an infinite loop for certain malformed JSON. We don't check for all malformed JSON as it would reduce performance.
- desc: `malformed with double quotes`,
- json: `{"a"":1}`,
- path: []string{"a"},
+ desc: `malformed with double quotes`,
+ json: `{"a"":1}`,
+ path: []string{"a"},
isFound: false,
- data: ``,
+ data: ``,
},
{ // More malformed JSON testing, to be sure we avoid an infinite loop.
- desc: `malformed with double quotes, and path does not exist`,
- json: `{"z":123,"y":{"x":7,"w":0},"v":{"u":"t","s":"r","q":0,"p":1558051800},"a":"b","c":"2016-11-02T20:10:11Z","d":"e","f":"g","h":{"i":"j""},"k":{"l":"m"}}`,
- path: []string{"o"},
+ desc: `malformed with double quotes, and path does not exist`,
+ json: `{"z":123,"y":{"x":7,"w":0},"v":{"u":"t","s":"r","q":0,"p":1558051800},"a":"b","c":"2016-11-02T20:10:11Z","d":"e","f":"g","h":{"i":"j""},"k":{"l":"m"}}`,
+ path: []string{"o"},
isFound: false,
- data: ``,
+ data: ``,
},
}
@@ -1466,6 +1466,7 @@ func TestEachKey(t *testing.T) {
{"arr", "[1]", "b"},
{"arrInt", "[3]"},
{"arrInt", "[5]"}, // Should not find last key
+ {"nested"},
}
keysFound := 0
@@ -1506,13 +1507,19 @@ func TestEachKey(t *testing.T) {
if string(value) != "4" {
t.Error("Should find 8 key", string(value))
}
+ case 8:
+ t.Errorf("Found key #8 that should not be found")
+ case 9:
+ if string(value) != `{"a":"test", "b":2, "nested3":{"a":"test3","b":4}, "c": "unknown"}` {
+ t.Error("Should find 9 key", string(value))
+ }
default:
- t.Errorf("Should found only 8 keys")
+ t.Errorf("Should find only 9 keys, got %v key", idx)
}
}, paths...)
- if keysFound != 8 {
- t.Errorf("Should find 8 keys: %d", keysFound)
+ if keysFound != 9 {
+ t.Errorf("Should find 9 keys: %d", keysFound)
}
} | Attempt to fix #<I>. If you are trying to extract "foo" and "foo.bar" (#<I>)
from '{"foo": {"bar": 1}}' with EachKey, the parser would find the "foo"
key, extract the contents ('{"bar": 1}'), and then *add the offset* of
that extraction to the current index. This means it would never find the
"bar" key. This change simply eliminates the adding of the offset. | buger_jsonparser | train |
69a26aa8f2b148b9a6e1b5dab9c3307c6e1bc107 | diff --git a/modules/backend/behaviors/RelationController.php b/modules/backend/behaviors/RelationController.php
index <HASH>..<HASH> 100644
--- a/modules/backend/behaviors/RelationController.php
+++ b/modules/backend/behaviors/RelationController.php
@@ -1032,7 +1032,7 @@ class RelationController extends ControllerBehavior
* Has one relations will save as part of the add() call.
*/
if ($this->deferredBinding || $this->relationType != 'hasOne') {
- $newModel->save();
+ $newModel->save(null, $this->manageWidget->getSessionKey());
}
$this->relationObject->add($newModel, $sessionKey); | Commit deferred bindings on model created with RelationController (#<I>)
Fixes #<I>. Credit to @iotch. | octobercms_october | train |
321d4160991de7393d7029faceb1f7ecdd46ddc5 | diff --git a/lib/core/plugin_file.rb b/lib/core/plugin_file.rb
index <HASH>..<HASH> 100644
--- a/lib/core/plugin_file.rb
+++ b/lib/core/plugin_file.rb
@@ -11,7 +11,7 @@ class PluginFile
end
def instance_name
- plugin_name.gsub(plugin_type.capitalize, "")
+ plugin_name.gsub(StringHelper.camelize(@plugin_type), "")
end
def plugin_name
diff --git a/lib/core/plugin_manager.rb b/lib/core/plugin_manager.rb
index <HASH>..<HASH> 100644
--- a/lib/core/plugin_manager.rb
+++ b/lib/core/plugin_manager.rb
@@ -1,7 +1,15 @@
require_relative "file_helper"
+require_relative "string_helper"
# Manages the detection of plugins
module PluginManager
+ def plugin(name, type)
+ c_name = StringHelper.camelize name
+ class_module = "#{c_name}#{StringHelper.camelize type}::#{c_name}"
+ result = plugins(type).each { |file| return file if file.module_class_name == class_module }
+ result.first
+ end
+
def plugins(type)
plugin_files(type, "plugins/", "**/*_#{type}.rb")
end
diff --git a/spec/core/plugin_manager_spec.rb b/spec/core/plugin_manager_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/core/plugin_manager_spec.rb
+++ b/spec/core/plugin_manager_spec.rb
@@ -24,7 +24,7 @@ describe PluginManager do
end
end
- describe "#plugin" do
+ describe "#plugins" do
context "when plugin files exist in default directories" do
subject { plugin_manager.plugins("test") }
it "return an array of plugins of the same type" do
@@ -34,5 +34,15 @@ describe PluginManager do
end
end
end
+
+ describe "#plugin" do
+ context "when plugin exists that matches exactly" do
+ subject { plugin_manager.plugin("my_plugin", "test") }
+ it "return an array of plugins of the same type" do
+ expect(subject.module_class_name).to eq("MyPluginTest::MyPlugin")
+ expect(subject.plugin_type).to eq("test")
+ end
+ end
+ end
end
end | CX #8 ability to load single specific plugin | imaginatelabs_radial | train |
8c91f7dd02dc0b5b21c4c6ab1af539e698ec6d99 | diff --git a/Hook/FrontHook.php b/Hook/FrontHook.php
index <HASH>..<HASH> 100644
--- a/Hook/FrontHook.php
+++ b/Hook/FrontHook.php
@@ -27,12 +27,12 @@ class FrontHook extends BaseHook
$content = trim($this->render("main-footer-body.html"));
if ("" != $content) {
$event->add(
- array(
+ [
"id" => "contact-footer-body",
"class" => "contact",
- "title" => $this->trans("Contact", array(), "hookcontact"),
+ "title" => $this->trans("Contact", [], "hookcontact"),
"content" => $content
- )
+ ]
);
}
}
diff --git a/HookContact.php b/HookContact.php
index <HASH>..<HASH> 100644
--- a/HookContact.php
+++ b/HookContact.php
@@ -16,5 +16,4 @@ use Thelia\Module\BaseModule;
class HookContact extends BaseModule
{
-
}
diff --git a/I18n/en_US.php b/I18n/en_US.php
index <HASH>..<HASH> 100644
--- a/I18n/en_US.php
+++ b/I18n/en_US.php
@@ -1,5 +1,5 @@
<?php
-return array(
+return [
'Contact' => 'Contact',
-);
+];
diff --git a/I18n/ru_RU.php b/I18n/ru_RU.php
index <HASH>..<HASH> 100644
--- a/I18n/ru_RU.php
+++ b/I18n/ru_RU.php
@@ -1,5 +1,5 @@
<?php
-return array(
+return [
'Contact' => 'Контакты',
-);
+]; | Feature cs fixer (#6)
* Feature CS fixer
* fix composer script
* fix cs fixer config cache directory
* applay cs fixer | thelia-modules_HookContact | train |
af0198c39f279b7cf49d79fab2c7fa3b7212fb05 | diff --git a/egoio/db_tables/calc_ego_substation.py b/egoio/db_tables/calc_ego_substation.py
index <HASH>..<HASH> 100644
--- a/egoio/db_tables/calc_ego_substation.py
+++ b/egoio/db_tables/calc_ego_substation.py
@@ -1,6 +1,6 @@
# coding: utf-8
from sqlalchemy import BigInteger, Column, Float, Integer, SmallInteger, String, \
- Table, Text, text
+ Table, Text, text, Boolean
from geoalchemy2.types import Geometry
from sqlalchemy.ext.declarative import declarative_base | adjust calc_ego_substation
to make refactor branch work in dingo | openego_ego.io | train |
a5ae4b9e93dd15fbb11136d19350c260deb012bf | diff --git a/src/server/pfs/s3/bucket.go b/src/server/pfs/s3/bucket.go
index <HASH>..<HASH> 100644
--- a/src/server/pfs/s3/bucket.go
+++ b/src/server/pfs/s3/bucket.go
@@ -10,6 +10,7 @@ import (
glob "github.com/pachyderm/ohmyglob"
pfsClient "github.com/pachyderm/pachyderm/src/client/pfs"
pfsServer "github.com/pachyderm/pachyderm/src/server/pfs"
+ "github.com/pachyderm/pachyderm/src/server/pkg/ancestry"
"github.com/pachyderm/pachyderm/src/server/pkg/errutil"
"github.com/pachyderm/s2"
)
@@ -166,7 +167,7 @@ func (c controller) CreateBucket(r *http.Request, bucket string) error {
} else {
return s2.BucketAlreadyOwnedByYouError(r)
}
- } else if errutil.IsInvalidNameError(err) {
+ } else if ancestry.IsInvalidNameError(err) {
return s2.InvalidBucketNameError(r)
} else {
return s2.InternalError(r, err)
@@ -175,7 +176,7 @@ func (c controller) CreateBucket(r *http.Request, bucket string) error {
err = pc.CreateBranch(repo, branch, "", nil)
if err != nil {
- if errutil.IsInvalidNameError(err) {
+ if ancestry.IsInvalidNameError(err) {
return s2.InvalidBucketNameError(r)
}
return s2.InternalError(r, err)
diff --git a/src/server/pkg/ancestry/ancestry.go b/src/server/pkg/ancestry/ancestry.go
index <HASH>..<HASH> 100644
--- a/src/server/pkg/ancestry/ancestry.go
+++ b/src/server/pkg/ancestry/ancestry.go
@@ -71,9 +71,11 @@ func Add(s string, ancestors int) string {
}
var (
- valid = regexp.MustCompile("^[a-zA-Z0-9_-]+$") // Matches a valid name
- invalid = regexp.MustCompile("[^a-zA-Z0-9_-]") // matches an invalid character
- repl = []byte("_")
+ valid = regexp.MustCompile("^[a-zA-Z0-9_-]+$") // Matches a valid name
+ invalid = regexp.MustCompile("[^a-zA-Z0-9_-]") // matches an invalid character
+ invalidNameErrorRe = regexp.MustCompile(`name \(.+\) invalid: only alphanumeric characters, underscores, and dashes are allowed`)
+
+ repl = []byte("_")
)
// ValidateName validates a name to make sure that it can be used unambiguously
@@ -90,3 +92,11 @@ func ValidateName(name string) error {
func SanitizeName(name string) string {
return invalid.ReplaceAllString(name, "_")
}
+
+// IsInvalidNameError returns true if err is due to an invalid name
+func IsInvalidNameError(err error) bool {
+ if err == nil {
+ return false
+ }
+ return invalidNameErrorRe.MatchString(err.Error())
+}
diff --git a/src/server/pkg/errutil/errutil.go b/src/server/pkg/errutil/errutil.go
index <HASH>..<HASH> 100644
--- a/src/server/pkg/errutil/errutil.go
+++ b/src/server/pkg/errutil/errutil.go
@@ -30,11 +30,3 @@ func IsNotFoundError(err error) bool {
}
return strings.Contains(err.Error(), "not found")
}
-
-// IsInvalidNameError returns true if err is due to an invalid name
-func IsInvalidNameError(err error) bool {
- if err == nil {
- return false
- }
- return strings.Contains(err.Error(), "only alphanumeric characters, underscores, and dashes are allowed")
-} | Moved error checker to be in the same package where the error is defined | pachyderm_pachyderm | train |
834081c41a43c5d2ddb52bf1ffbcd99e27300099 | diff --git a/tests/test_bulk.py b/tests/test_bulk.py
index <HASH>..<HASH> 100644
--- a/tests/test_bulk.py
+++ b/tests/test_bulk.py
@@ -56,7 +56,7 @@ class TestBulk(test.TruncationTestCase):
async def test_bulk_create_mix_specified(self):
await UniqueName.bulk_create(
[UniqueName(id=id_) for id_ in range(10000, 11000)]
- + [UniqueName() for _ in range(1000)]
+ + [UniqueName(id=id_) for id_ in range(1000)]
)
all_ = await UniqueName.all().values("id", "name") | test: fix test_bulk_create_mix_specified | tortoise_tortoise-orm | train |
dc9c402b2993d9576d652018659fbef37db12898 | diff --git a/dedupe/api.py b/dedupe/api.py
index <HASH>..<HASH> 100644
--- a/dedupe/api.py
+++ b/dedupe/api.py
@@ -640,7 +640,7 @@ class ActiveMatching(Matching) :
self._addTrainingData(training_pairs)
- self._trainClassifier(0.1)
+ self._trainClassifier()
def train(self, ppc=.1, uncovered_dupes=1, index_predicates=True) : # pragma : no cover
"""Keyword arguments:
@@ -675,7 +675,7 @@ class ActiveMatching(Matching) :
self._trainClassifier()
self._trainBlocker(ppc, uncovered_dupes, index_predicates)
- def _trainClassifier(self, alpha=None) : # pragma : no cover
+ def _trainClassifier(self) : # pragma : no cover
labels = numpy.array(self.training_data['label'] == b'match',
dtype='i4')
examples = self.training_data['distances']
@@ -754,7 +754,7 @@ class ActiveMatching(Matching) :
u'distinct':[random_pair]})
- self._trainClassifier(0.1)
+ self._trainClassifier()
bias = len(self.training_pairs[u'match'])
if bias : | remove vestiges of coupled classifier and data model | dedupeio_dedupe | train |
7e00f1efd2d56031d13b1ae41c9be8c6f731256a | diff --git a/billy/models/bills.py b/billy/models/bills.py
index <HASH>..<HASH> 100644
--- a/billy/models/bills.py
+++ b/billy/models/bills.py
@@ -489,7 +489,7 @@ class Bill(Document):
if re.findall('\d+', query):
_id_filter = dict(_filter)
_id_filter['bill_id'] = fix_bill_id(query).upper()
- result = db.bills.find(_id_filter)
+ result = db.bills.find(_id_filter, fields=bill_fields)
if result:
return result
@@ -529,4 +529,4 @@ class Bill(Document):
_filter['title'] = {'$regex': query, '$options': 'i'}
# return query
- return db.bills.find(_filter, bill_fields)
+ return db.bills.find(_filter, fields=bill_fields)
diff --git a/billy/web/api/handlers.py b/billy/web/api/handlers.py
index <HASH>..<HASH> 100644
--- a/billy/web/api/handlers.py
+++ b/billy/web/api/handlers.py
@@ -237,7 +237,7 @@ class BillSearchHandler(BillyHandler):
# attach votes if necessary
bills = list(query)
bill_ids = [bill['_id'] for bill in bills]
- vote_fields = _get_vote_fields(bill_fields)
+ vote_fields = _get_vote_fields(bill_fields) or []
if 'votes' in bill_fields or vote_fields:
# add bill_id to vote_fields for relating back
votes = list(db.votes.find({'bill_id': {'$in': bill_ids}}, | fix a couple of subtle bugs in fields handling | openstates_billy | train |
d3c023e9a7ff05f9ec057cf7020058ce7edc025c | diff --git a/pysat/instruments/de2_rpa.py b/pysat/instruments/de2_rpa.py
index <HASH>..<HASH> 100644
--- a/pysat/instruments/de2_rpa.py
+++ b/pysat/instruments/de2_rpa.py
@@ -47,7 +47,7 @@ platform : string
name : string
Supports 'rpa'
sat_id : string
- Supports '' and 'duct'
+ Supports ''
tag : string
None Supported
@@ -77,16 +77,12 @@ from .methods import nasa_cdaweb as cdw
platform = 'de2'
name = 'rpa'
-tags = {'': '2 sec cadence RPA data', # this is the default
- 'duct': '16 ms cadence DUCT data'}
-sat_ids = {'': ['', 'duct']}
-_test_dates = {'': {'': pysat.datetime(1983, 1, 1),
- 'duct': pysat.datetime(1983, 1, 1)}}
+tags = {'': '2 sec cadence RPA data'} # this is the default
+sat_ids = {'': ['']}
+_test_dates = {'': {'': pysat.datetime(1983, 1, 1)}}
fname = 'de2_ion2s_rpa_{year:04d}{month:02d}{day:02d}_v01.cdf'
-fname_duct = 'de2_duct16ms_rpa_{year:04d}{month:02d}{day:02d}_v01.cdf'
-supported_tags = {'': {'': fname,
- 'duct': fname_duct}}
+supported_tags = {'': {'': fname}
# use the CDAWeb methods list files routine
list_files = functools.partial(cdw.list_files,
@@ -99,11 +95,7 @@ load = cdw.load
basic_tag = {'dir': '/pub/data/de/de2/plasma_rpa/ion2s_cdaweb',
'remote_fname': '{year:4d}/' + fname,
'local_fname': fname}
-duct_tag = {'dir': '/pub/data/de/de2/plasma_rpa/rpa16ms_cdaweb',
- 'remote_fname': '{year:4d}/' + fname_duct,
- 'local_fname': fname_duct}
-supported_tags = {'': {'': basic_tag,
- 'duct': duct_tag}}
+supported_tags = {'': {'': basic_tag}}
download = functools.partial(cdw.download, supported_tags)
# support listing files currently on CDAWeb | BUG: removing duct for now | rstoneback_pysat | train |
34a5239aec8c275aad8feb5a75fe44e404c40a8b | diff --git a/engine/src/main/java/org/camunda/bpm/engine/impl/persistence/entity/ExecutionEntity.java b/engine/src/main/java/org/camunda/bpm/engine/impl/persistence/entity/ExecutionEntity.java
index <HASH>..<HASH> 100644
--- a/engine/src/main/java/org/camunda/bpm/engine/impl/persistence/entity/ExecutionEntity.java
+++ b/engine/src/main/java/org/camunda/bpm/engine/impl/persistence/entity/ExecutionEntity.java
@@ -274,6 +274,10 @@ public class ExecutionEntity extends PvmExecutionImpl implements Execution, Proc
if (tenantId != null) {
subProcessInstance.setTenantId(tenantId);
}
+ else {
+ // if process definition has no tenant id, inherit this process instnace's tenant id
+ subProcessInstance.setTenantId(this.tenantId);
+ }
fireHistoricActivityInstanceUpdate();
diff --git a/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/TenantIdProviderTest.java b/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/TenantIdProviderTest.java
index <HASH>..<HASH> 100644
--- a/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/TenantIdProviderTest.java
+++ b/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/TenantIdProviderTest.java
@@ -264,6 +264,23 @@ public class TenantIdProviderTest extends ResourceProcessEngineTestCase {
assertThat(processInstance.getTenantId(), is(nullValue()));
}
+ public void testTenantIdInheritedFromSuperProcessInstance() {
+
+ String tenantId = "tenant1";
+ SetValueOnRootProcessInstanceTenantIdProvider tenantIdProvider = new SetValueOnRootProcessInstanceTenantIdProvider(tenantId);
+ TestTenantIdProvider.delegate = tenantIdProvider;
+
+ deployment(Bpmn.createExecutableProcess("testProcess").startEvent().userTask().done(),
+ Bpmn.createExecutableProcess("superProcess").startEvent().callActivity().calledElement("testProcess").done());
+
+ // if a process instance is started
+ runtimeService.startProcessInstanceByKey("superProcess");
+
+ // then the tenant id is inherited to the sub process instance even tough it is not set by the provider
+ ProcessInstance processInstance = runtimeService.createProcessInstanceQuery().processDefinitionKey("testProcess").singleResult();
+ assertThat(processInstance.getTenantId(), is(tenantId));
+ }
+
// helpers //////////////////////////////////////////
public static class TestTenantIdProvider implements TenantIdProvider { | feat(engine): propagate instance tenant id to sub process instance
Propagate the tenant id of the current process instance to a sub process
instance if the sub process instance's process definition has no tenant
id (is a shared resource)
related to #CAM-<I> | camunda_camunda-bpm-platform | train |
fd4fc3279881dfc7c42ad922cd4d2ea9d24d041d | diff --git a/UITable/README.md b/UITable/README.md
index <HASH>..<HASH> 100644
--- a/UITable/README.md
+++ b/UITable/README.md
@@ -157,6 +157,9 @@ __touchEnd__ | column header cell drag handle | recalculate column width with en
- __onRowInteract__ `Function`
invoked when a cell in a row is interacted with
+- __name__ `String`
+ a unique name for the dataset being consumed; pass a different name to cause the table to fully reset and pull brand new data
+
- __totalRows__ `Number`
the total length of the data set, necessary for smart scrollbar calculations
diff --git a/UITable/__tests__/unit.js b/UITable/__tests__/unit.js
index <HASH>..<HASH> 100644
--- a/UITable/__tests__/unit.js
+++ b/UITable/__tests__/unit.js
@@ -20,6 +20,10 @@ const rows = [{"id":1,"first_name":"Louise","last_name":"Fernandez","job_title":
// index 3 is for the ui-row-loading css hook test
const rowGetter = index => index === 3 ? new Promise(noop) : rows[index];
+const rowsAlt = [{"id":1,"first_name":"Lana","last_name":"Fernandez","job_title":"Database Administrator I","phone":"6-(697)972-8601","email":"[email protected]","address1":"5049 Barnett Road","city":"Nglengkir","country":"Indonesia","country_code":"ID"}];
+
+const altRowGetter = index => rowsAlt[index];
+
const columns = [{title:'FirstName',mapping:'first_name',resizable:true},{title:'LastName',mapping:'last_name',resizable:true},{defaultWidth:100,title:'JobTitle',mapping:'job_title',resizable:true},{title:'Phone',mapping:'phone',resizable:true},{title:'EmailAddress',mapping:'email',resizable:true},{title:'StreetAddress',mapping:'address1',resizable:true},{title:'City',mapping:'city',resizable:true},{title:'Country',mapping:'country',resizable:true},{title:'CountryCode',mapping:'country_code',resizable:true}];
const baseProps = {
@@ -302,4 +306,18 @@ describe('UITable', () => {
expect(xnub.style.width).toBe('500px');
});
});
+
+ describe('props.name', () => {
+ it('should fully reset the table when changed', () => {
+ let element = render(<UITable {...baseProps} />);
+ let firstCell = element.refs.body.querySelector('.ui-table-cell');
+
+ expect(firstCell.textContent).toBe('Louise');
+
+ element = render(<UITable {...baseProps} name='alternate' totalRows={rowsAlt.length} getRow={altRowGetter} />);
+ firstCell = element.refs.body.querySelector('.ui-table-cell');
+
+ expect(firstCell.textContent).toBe('Lana');
+ });
+ });
});
diff --git a/UITable/index.js b/UITable/index.js
index <HASH>..<HASH> 100644
--- a/UITable/index.js
+++ b/UITable/index.js
@@ -107,6 +107,10 @@ class UITable extends UIView {
this.captureDimensions();
}
+ componentWillReceiveProps() {
+ this.setState(this.initialState(), () => this.captureDimensions());
+ }
+
shouldComponentUpdate() {
/* so we can reuse state.rows to avoid extra array allocations in the scroll handlers - in this case a few more CPU cycles are far cheaper than running up against the GC */
return true;
@@ -725,6 +729,7 @@ UITable.propTypes = {
offscreenClass: React.PropTypes.string,
onCellInteract: React.PropTypes.func,
onRowInteract: React.PropTypes.func,
+ name: React.PropTypes.string,
totalRows: React.PropTypes.number,
}; | UITable: add new prop 'name'
Every dataset should have a unique "name". Changing the name passed to
UITable will fully reset the table automatically. | enigma-io_boundless | train |
c6ad1980a2eb2994940bdf7f79835ffdbed2b44d | diff --git a/api/server/router/image/image.go b/api/server/router/image/image.go
index <HASH>..<HASH> 100644
--- a/api/server/router/image/image.go
+++ b/api/server/router/image/image.go
@@ -34,9 +34,9 @@ func (r *imageRouter) initRoutes() {
router.NewGetRoute("/images/{name:.*}/json", r.getImagesByName),
// POST
router.NewPostRoute("/commit", r.postCommit),
- router.NewPostRoute("/images/create", r.postImagesCreate),
router.NewPostRoute("/images/load", r.postImagesLoad),
- router.NewPostRoute("/images/{name:.*}/push", r.postImagesPush),
+ router.Cancellable(router.NewPostRoute("/images/create", r.postImagesCreate)),
+ router.Cancellable(router.NewPostRoute("/images/{name:.*}/push", r.postImagesPush)),
router.NewPostRoute("/images/{name:.*}/tag", r.postImagesTag),
// DELETE
router.NewDeleteRoute("/images/{name:.*}", r.deleteImages), | use router.Cancellable for pull and push | containers_storage | train |
7bc4eeb60d93fd593e3bf0ca380eacde200bbef6 | diff --git a/store/etcdv3/service.go b/store/etcdv3/service.go
index <HASH>..<HASH> 100644
--- a/store/etcdv3/service.go
+++ b/store/etcdv3/service.go
@@ -41,6 +41,10 @@ func (m *Mercury) ServiceStatusStream(ctx context.Context) (chan []string, error
ch := make(chan []string)
go func() {
defer close(ch)
+
+ // must watch prior to get
+ watchChan := m.Watch(ctx, fmt.Sprintf(serviceStatusKey, ""), clientv3.WithPrefix())
+
resp, err := m.Get(ctx, fmt.Sprintf(serviceStatusKey, ""), clientv3.WithPrefix())
if err != nil {
log.Errorf(ctx, "[ServiceStatusStream] failed to get current services: %v", err)
@@ -52,7 +56,7 @@ func (m *Mercury) ServiceStatusStream(ctx context.Context) (chan []string, error
}
ch <- eps.ToSlice()
- for resp := range m.Watch(ctx, fmt.Sprintf(serviceStatusKey, ""), clientv3.WithPrefix()) {
+ for resp := range watchChan {
if resp.Err() != nil {
if !resp.Canceled {
log.Errorf(ctx, "[ServiceStatusStream] watch failed %v", resp.Err()) | bugfix: watch service prior to get service to avoid race (#<I>) | projecteru2_core | train |
3f1012563bff962f2c9e02d36a6db3a29363758f | diff --git a/lib/sprockets/base.rb b/lib/sprockets/base.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/base.rb
+++ b/lib/sprockets/base.rb
@@ -307,11 +307,8 @@ module Sprockets
end
# Find asset by logical path or expanded path.
- def find_asset(path, options = {})
- if filename = resolve(path)
- build_asset(filename, options)
- end
- end
+ # def find_asset(path, options = {})
+ # end
# Preferred `find_asset` shorthand.
#
diff --git a/lib/sprockets/environment.rb b/lib/sprockets/environment.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/environment.rb
+++ b/lib/sprockets/environment.rb
@@ -68,17 +68,22 @@ module Sprockets
def find_asset(path, options = {})
options[:bundle] = true unless options.key?(:bundle)
- # Ensure inmemory cached assets are still fresh on every lookup
- if asset = @assets[asset_cache_key_for(path, options)]
- paths, digest = asset.send(:dependency_paths), asset.send(:dependency_digest)
- if dependencies_hexdigest(paths) == digest
- return asset
+ index = self.index
+
+ if filename = index.resolve(path)
+ # Ensure inmemory cached assets are still fresh on every lookup
+ key = asset_cache_key_for(filename, options)
+ if asset = @assets[key]
+ paths, digest = asset.send(:dependency_paths), asset.send(:dependency_digest)
+ if dependencies_hexdigest(paths) == digest
+ return asset
+ end
end
- end
- if asset = index.find_asset(path, options)
- # Cache is pushed upstream by Index#find_asset
- return asset
+ if asset = index.find_asset(filename, options)
+ # Cache is pushed upstream by Index#find_asset
+ return asset
+ end
end
nil
diff --git a/lib/sprockets/index.rb b/lib/sprockets/index.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/index.rb
+++ b/lib/sprockets/index.rb
@@ -44,21 +44,20 @@ module Sprockets
# Cache `find_asset` calls
def find_asset(path, options = {})
options[:bundle] = true unless options.key?(:bundle)
- if asset = @assets[asset_cache_key_for(path, options)]
- asset
- elsif asset = super
- logical_path_cache_key = asset_cache_key_for(path, options)
- full_path_cache_key = asset_cache_key_for(asset.pathname, options)
- # Cache on Index
- @assets[logical_path_cache_key] = @assets[full_path_cache_key] = asset
+ if filename = resolve(path)
+ key = asset_cache_key_for(filename, options)
+ if asset = @assets[key]
+ asset
+ elsif asset = build_asset(filename, options)
+ # Cache on Index
+ @assets[key] = asset
- # Push cache upstream to Environment
- @environment.instance_eval do
- @assets[logical_path_cache_key] = @assets[full_path_cache_key] = asset
+ # Push cache upstream to Environment
+ @environment.instance_eval do
+ @assets[key] = asset
+ end
end
-
- asset
end
end | Always resolve logical path before caching | rails_sprockets | train |
f26623279ad777f4ef73dd2ecd57ea4800140303 | diff --git a/backbone.obscura.js b/backbone.obscura.js
index <HASH>..<HASH> 100644
--- a/backbone.obscura.js
+++ b/backbone.obscura.js
@@ -746,17 +746,20 @@ function lookupIterator(value) {
return _.isFunction(value) ? value : function(obj){ return obj.get(value); };
}
-function onAdd(model) {
- var index;
+function modelInsertIndex(model) {
if (!this._comparator) {
- index = this._superset.indexOf(model);
+ return this._superset.indexOf(model);
} else {
if (!this._reverse) {
- index = _.sortedIndex(this._collection.toArray(), model, lookupIterator(this._comparator));
+ return _.sortedIndex(this._collection.toArray(), model, lookupIterator(this._comparator));
} else {
- index = reverseSortedIndex(this._collection.toArray(), model, lookupIterator(this._comparator));
+ return reverseSortedIndex(this._collection.toArray(), model, lookupIterator(this._comparator));
}
}
+}
+
+function onAdd(model) {
+ var index = modelInsertIndex.call(this, model);
this._collection.add(model, { at: index });
}
@@ -767,7 +770,7 @@ function onRemove(model) {
}
function onChange(model) {
- if (this.contains(model)) {
+ if (this.contains(model) && this._collection.indexOf(model) !== modelInsertIndex.call(this, model)) {
this._collection.remove(model);
onAdd.call(this, model);
}
diff --git a/test/sorted.js b/test/sorted.js
index <HASH>..<HASH> 100644
--- a/test/sorted.js
+++ b/test/sorted.js
@@ -315,6 +315,48 @@ describe('sorted collection', function() {
assert(firstModel === sorted.last());
});
+ it('should not remove and add it back when the location doesnt change', function() {
+ sorted.removeSort();
+
+ var added = false;
+ var removed = false;
+
+ sorted.on('remove', function(eventName) {
+ removed = true;
+ });
+
+ sorted.on('add', function(eventName) {
+ added = true;
+ });
+
+ var firstModel = sorted.first();
+
+ firstModel.set({ b: 100 });
+
+ assert(added === false);
+ assert(removed === false);
+ });
+
+ it('should not remove and add it back when the location doesnt change', function() {
+ var added = false;
+ var removed = false;
+
+ sorted.on('remove', function(eventName) {
+ removed = true;
+ });
+
+ sorted.on('add', function(eventName) {
+ added = true;
+ });
+
+ var firstModel = sorted.first();
+
+ firstModel.set({ foo: 100 });
+
+ assert(added === false);
+ assert(removed === false);
+ });
+
});
describe('removing a model in the superset', function() { | Brought in the latest sorted-collection changes and updated build. | jmorrell_backbone.obscura | train |
67423b1d33d4015935af72d4e3ebb8bdcf512270 | diff --git a/test/unit/VStep.spec.js b/test/unit/VStep.spec.js
index <HASH>..<HASH> 100644
--- a/test/unit/VStep.spec.js
+++ b/test/unit/VStep.spec.js
@@ -21,39 +21,4 @@ describe('VStep.vue', () => {
expect(wrapper.text()).to.include(step.content)
})
-
- it('calls skip and finish without value', () => {
- const step = {
- target: 'v-step-0',
- content: 'This is a demo step!'
- }
-
- let i = 0
- const mockstop = () => {
- i++
- }
-
- // We don't provide skip and finish function
- const wrapper = shallowMount(VStep, {
- propsData: {
- step,
- stop: mockstop,
- labels
- }
- })
-
- expect(i).to.equal(0)
-
- // When call stop, the value of i changes
- wrapper.vm.stop()
- expect(i).to.equal(1)
-
- // When call skip, the stop function is called
- wrapper.vm.skip()
- expect(i).to.equal(2)
-
- // When call finish, the stop function is called
- wrapper.vm.finish()
- expect(i).to.equal(3)
- })
}) | test: temporary remove unit test of skip/finish callbacks | pulsardev_vue-tour | train |
deeb6b8ed2435dbb2561dcacc7df4391febc0e46 | diff --git a/lib/annotate_models/model_annotation_generator.rb b/lib/annotate_models/model_annotation_generator.rb
index <HASH>..<HASH> 100644
--- a/lib/annotate_models/model_annotation_generator.rb
+++ b/lib/annotate_models/model_annotation_generator.rb
@@ -60,7 +60,7 @@ module AnnotateModels
Dir["app/models/*.rb"].each do |path|
result = File.basename(path).scan(/^(.+)\.rb/)[0][0]
model = eval(ActiveSupport::Inflector.camelize(result))
- next if model.respond_to? :abstract_class && model.abstract_class
+ next if model.respond_to?(:abstract_class) && model.abstract_class
next unless model < ActiveRecord::Base
@annotations[model] = generate_annotation(model) unless @annotations.keys.include?(model)
end
diff --git a/lib/annotate_models/version.rb b/lib/annotate_models/version.rb
index <HASH>..<HASH> 100644
--- a/lib/annotate_models/version.rb
+++ b/lib/annotate_models/version.rb
@@ -1,3 +1,3 @@
module AnnotateModels
- VERSION = "0.1.0"
+ VERSION = "0.1.1"
end | Fix bug w/ Rails 5 support. | bitaxis_annotate_models | train |
183dfc726bf72136d539b18386b8e85ba4472629 | diff --git a/uportal-war/src/main/java/org/jasig/portal/security/provider/cas/CasAssertionSecurityContext.java b/uportal-war/src/main/java/org/jasig/portal/security/provider/cas/CasAssertionSecurityContext.java
index <HASH>..<HASH> 100644
--- a/uportal-war/src/main/java/org/jasig/portal/security/provider/cas/CasAssertionSecurityContext.java
+++ b/uportal-war/src/main/java/org/jasig/portal/security/provider/cas/CasAssertionSecurityContext.java
@@ -46,6 +46,7 @@ public class CasAssertionSecurityContext extends ChainingSecurityContext impleme
private static final String SESSION_ADDITIONAL_DESCRIPTORS_BEAN = "sessionScopeAdditionalDescriptors";
private static final String CAS_COPY_ASSERT_ATTR_TO_USER_ATTR_BEAN = "casCopyAssertionAttributesToUserAttributes";
+ // UP-4212 Transient because security contexts are serialized into HTTP Session (and webflow).
private transient ApplicationContext applicationContext;
private Assertion assertion;
private boolean copyAssertionAttributesToUserAttributes = false; | UP-<I> Added comments | Jasig_uPortal | train |
222a6f44016451dcbd2da0003e64521c06e88ba9 | diff --git a/daemon/execdriver/utils.go b/daemon/execdriver/utils.go
index <HASH>..<HASH> 100644
--- a/daemon/execdriver/utils.go
+++ b/daemon/execdriver/utils.go
@@ -1,17 +1,28 @@
package execdriver
-import "github.com/dotcloud/docker/utils"
+import (
+ "strings"
+
+ "github.com/docker/libcontainer/security/capabilities"
+ "github.com/dotcloud/docker/utils"
+)
func TweakCapabilities(basics, adds, drops []string) []string {
var caps []string
- for _, cap := range basics {
- if !utils.StringsContains(drops, cap) {
- caps = append(caps, cap)
+ if !utils.StringsContainsNoCase(drops, "all") {
+ for _, cap := range basics {
+ if !utils.StringsContainsNoCase(drops, cap) {
+ caps = append(caps, cap)
+ }
}
}
for _, cap := range adds {
- if !utils.StringsContains(caps, cap) {
+ if strings.ToLower(cap) == "all" {
+ caps = capabilities.GetAllCapabilities()
+ break
+ }
+ if !utils.StringsContainsNoCase(caps, cap) {
caps = append(caps, cap)
}
}
diff --git a/integration-cli/docker_cli_run_test.go b/integration-cli/docker_cli_run_test.go
index <HASH>..<HASH> 100644
--- a/integration-cli/docker_cli_run_test.go
+++ b/integration-cli/docker_cli_run_test.go
@@ -798,6 +798,21 @@ func TestCapDropCannotMknod(t *testing.T) {
logDone("run - test --cap-drop=MKNOD cannot mknod")
}
+func TestCapDropALLCannotMknod(t *testing.T) {
+ cmd := exec.Command(dockerBinary, "run", "--cap-drop=ALL", "busybox", "sh", "-c", "mknod /tmp/sda b 8 0 && echo ok")
+ out, _, err := runCommandWithOutput(cmd)
+ if err == nil {
+ t.Fatal(err, out)
+ }
+
+ if actual := strings.Trim(out, "\r\n"); actual == "ok" {
+ t.Fatalf("expected output not ok received %s", actual)
+ }
+ deleteAllContainers()
+
+ logDone("run - test --cap-drop=ALL cannot mknod")
+}
+
func TestCapAddCanDownInterface(t *testing.T) {
cmd := exec.Command(dockerBinary, "run", "--cap-add=NET_ADMIN", "busybox", "sh", "-c", "ip link set eth0 down && echo ok")
out, _, err := runCommandWithOutput(cmd)
@@ -813,6 +828,21 @@ func TestCapAddCanDownInterface(t *testing.T) {
logDone("run - test --cap-add=NET_ADMIN can set eth0 down")
}
+func TestCapAddALLCanDownInterface(t *testing.T) {
+ cmd := exec.Command(dockerBinary, "run", "--cap-add=ALL", "busybox", "sh", "-c", "ip link set eth0 down && echo ok")
+ out, _, err := runCommandWithOutput(cmd)
+ if err != nil {
+ t.Fatal(err, out)
+ }
+
+ if actual := strings.Trim(out, "\r\n"); actual != "ok" {
+ t.Fatalf("expected output ok received %s", actual)
+ }
+ deleteAllContainers()
+
+ logDone("run - test --cap-add=ALL can set eth0 down")
+}
+
func TestPrivilegedCanMount(t *testing.T) {
cmd := exec.Command(dockerBinary, "run", "--privileged", "busybox", "sh", "-c", "mount -t tmpfs none /tmp && echo ok")
diff --git a/utils/utils.go b/utils/utils.go
index <HASH>..<HASH> 100644
--- a/utils/utils.go
+++ b/utils/utils.go
@@ -908,9 +908,9 @@ func ValidateContextDirectory(srcPath string) error {
return finalError
}
-func StringsContains(slice []string, s string) bool {
+func StringsContainsNoCase(slice []string, s string) bool {
for _, ss := range slice {
- if s == ss {
+ if strings.ToLower(s) == strings.ToLower(ss) {
return true
}
} | add basic support for 'all'
Docker-DCO-<I>- | containers_storage | train |
b209ae1dc5d5a256a7c184722a5ec524ba3c7f13 | diff --git a/aeron-archiver/src/main/java/io/aeron/archiver/RecordingFragmentReader.java b/aeron-archiver/src/main/java/io/aeron/archiver/RecordingFragmentReader.java
index <HASH>..<HASH> 100644
--- a/aeron-archiver/src/main/java/io/aeron/archiver/RecordingFragmentReader.java
+++ b/aeron-archiver/src/main/java/io/aeron/archiver/RecordingFragmentReader.java
@@ -216,6 +216,11 @@ class RecordingFragmentReader implements AutoCloseable
interface SimplifiedControlledPoll
{
+ /**
+ * Called by the {@link RecordingFragmentReader}. Implementors need only process DATA fragments.
+ *
+ * @return true if fragment processed, false to abort.
+ */
boolean onFragment(
DirectBuffer fragmentBuffer,
int fragmentOffset, | [Java] Document only data frames are passed to SimplifiedControlledPoll | real-logic_aeron | train |
93c74c8d9f6d2b2fd0218b2a37e1ae2b6ac0f9b7 | diff --git a/lib/acts_as_api/base.rb b/lib/acts_as_api/base.rb
index <HASH>..<HASH> 100644
--- a/lib/acts_as_api/base.rb
+++ b/lib/acts_as_api/base.rb
@@ -34,7 +34,7 @@ module ActsAsApi
# be contained in the api responses.
def api_accessible(api_template, options = {}, &block)
- attributes = api_accessible_attributes(api_template) || ApiTemplate.new(api_template)
+ attributes = api_accessible_attributes(api_template).try(:dup) || ApiTemplate.create(api_template)
attributes.merge!(api_accessible_attributes(options[:extend])) if options[:extend] | fix bug that causes RuntimeError (can't add a new key into hash during iteration)
as described here: <URL> | fabrik42_acts_as_api | train |
9a8c70c0213e5d2032427e73182964cd966d7af1 | diff --git a/generators/generator-base.js b/generators/generator-base.js
index <HASH>..<HASH> 100644
--- a/generators/generator-base.js
+++ b/generators/generator-base.js
@@ -1199,7 +1199,7 @@ Generator.prototype.getAngularAppName = function () {
};
Generator.prototype.getMainClassName = function () {
- return _.capitalize(this.getAngularAppName());
+ return _.upperFirst(_.camelCase(this.getAngularAppName()));
};
Generator.prototype.askModuleName = function (generator, currentQuestion, totalQuestions) { | Replacing capitalize with upperfirst and camelcase | jhipster_generator-jhipster | train |
9d197efe19568708b7e628642388f166d85a6e63 | diff --git a/esptool.py b/esptool.py
index <HASH>..<HASH> 100755
--- a/esptool.py
+++ b/esptool.py
@@ -43,7 +43,7 @@ START_FLASH_TIMEOUT = 20 # timeout for starting flash (may perform
CHIP_ERASE_TIMEOUT = 120 # timeout for full chip erase
MAX_TIMEOUT = CHIP_ERASE_TIMEOUT * 2 # longest any command can run
SYNC_TIMEOUT = 0.1 # timeout for syncing with bootloader
-MD5_TIMEOUT_PER_MB = 5 # timeout (per megabyte) for calculating md5sum
+MD5_TIMEOUT_PER_MB = 8 # timeout (per megabyte) for calculating md5sum
ERASE_REGION_TIMEOUT_PER_MB = 30 # timeout (per megabyte) for erasing a region | md5sum: Increase md5sum timeout to 8 seconds/megabyte
This is probably extreme, I think it's only the case if the last block of a compressed written image
is a large number of zeroes, meaning the md5sum has to wait until the last write finishes. | espressif_esptool | train |
9be8338b870b4fc537c2a5809a9e9efef2201521 | diff --git a/eZ/Publish/API/Repository/Tests/SetupFactory/LegacySolr.php b/eZ/Publish/API/Repository/Tests/SetupFactory/LegacySolr.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/API/Repository/Tests/SetupFactory/LegacySolr.php
+++ b/eZ/Publish/API/Repository/Tests/SetupFactory/LegacySolr.php
@@ -18,8 +18,6 @@ use eZ\Publish\Core\FieldType;
*/
class LegacySolr extends Legacy
{
- protected static $indexed = false;
-
/**
* Returns a configured repository for testing.
*
@@ -39,9 +37,14 @@ class LegacySolr extends Legacy
$searchProperty->setAccessible( true );
$searchProperty->setValue(
$persistenceHandler,
- $this->getSearchHandler( $persistenceHandler )
+ $searchHandler = $this->getSearchHandler( $persistenceHandler )
);
+ if ( $initializeFromScratch )
+ {
+ $this->indexAll( $persistenceHandler, $searchHandler );
+ }
+
return $repository;
}
@@ -81,7 +84,7 @@ class LegacySolr extends Legacy
)
);
- $searchHandler = new Solr\Content\Search\Handler(
+ return new Solr\Content\Search\Handler(
new Solr\Content\Search\Gateway\Native(
new Solr\Content\Search\Gateway\HttpClient\Stream( getenv( "solrServer" ) ),
new Solr\Content\Search\CriterionVisitor\Aggregate(
@@ -149,19 +152,10 @@ class LegacySolr extends Legacy
$persistenceHandler->contentTypeHandler(),
$persistenceHandler->objectStateHandler()
);
-
- $this->indexAll( $persistenceHandler, $searchHandler );
-
- return $searchHandler;
}
protected function indexAll( $persistenceHandler, $searchHandler )
{
- if ( self::$indexed )
- {
- return;
- }
-
// @todo: Is there a nicer way to get access to all content objects? We
// require this to run a full index here.
$dbHandlerProperty = new \ReflectionProperty( $persistenceHandler, 'dbHandler' );
@@ -182,7 +176,5 @@ class LegacySolr extends Legacy
$persistenceHandler->contentHandler()->load( $row['id'], $row['current_version'] )
);
}
-
- self::$indexed = true;
}
} | Changed: reindex everything from db between tests
Since DB is recreated between tests, search engine should correspond to
the content as well. | ezsystems_ezpublish-kernel | train |
dda668daeacc85976af24666909650a6b786c2a7 | diff --git a/config/routes.rb b/config/routes.rb
index <HASH>..<HASH> 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -1,2 +1,4 @@
Rscratch::Engine.routes.draw do
+ get "dashboard/index"
+ root to: "dashboard#index"
end | Dashboard route added and gem root path set | avishekjana_rscratch | train |
c0231a1a4fb41d8abf5b36da6efd7371eaba9c22 | diff --git a/Game.js b/Game.js
index <HASH>..<HASH> 100644
--- a/Game.js
+++ b/Game.js
@@ -277,14 +277,14 @@
Game.prototype.isGameReady = function() {
- console.log('STAAAAAAAAAAAAAAAAATES: ' + this.gameState.is);
+ //console.log('STAAAAAAAAAAAAAAAAATES: ' + this.gameState.is);
if (this.gameState.is < GameState.iss.LOADED) return false;
// Check if there is a gameWindow obj and whether it is loading
if (node.window) {
- console.log('W ' + node.window.state);
+ //console.log('W ' + node.window.state);
return (node.window.state >= GameState.iss.LOADED) ? true : false;
}
diff --git a/GameSocketClient.js b/GameSocketClient.js
index <HASH>..<HASH> 100644
--- a/GameSocketClient.js
+++ b/GameSocketClient.js
@@ -21,13 +21,13 @@
this.servername = null;
this.game = null;
- this.io = this.connect();
-
+ this.io = null; // will be created only after the game is loaded;
this.buffer = [];
}
GameSocketClient.prototype.setGame = function(game) {
this.game = game;
+ this.io = this.connect();
};
GameSocketClient.prototype.connect = function() {
@@ -71,7 +71,7 @@
for (var i=0; i < nelem; i++) {
var msg = this.buffer.shift();
node.emit(msg.toInEvent(), msg);
- console.log('Debuffered ' + msg);
+ //console.log('Debuffered ' + msg);
}
};
@@ -130,13 +130,13 @@
if (msg) { // Parsing successful
//console.log('GM is: ' + that.game.gameState.is);
// Wait to fire the msgs if the game state is loading
- if (that.game.isGameReady()) {
+ if (that.game && that.game.isGameReady()) {
//console.log('GM is now: ' + that.game.gameState.is);
node.emit(msg.toInEvent(), msg);
}
else {
- console.log(that.game.gameState.is + ' < ' + GameState.iss.PLAYING);
- console.log('Buffering: ' + msg);
+ //console.log(that.game.gameState.is + ' < ' + GameState.iss.PLAYING);
+ //console.log('Buffering: ' + msg);
that.buffer.push(msg);
}
} | Fixed bug with loadFrame. Now onload event of frame is properly captured | nodeGame_nodegame-client | train |
ee819fac4e92a32f93c547b9c334edc6ac37be01 | diff --git a/lib/riddle/client.rb b/lib/riddle/client.rb
index <HASH>..<HASH> 100644
--- a/lib/riddle/client.rb
+++ b/lib/riddle/client.rb
@@ -132,7 +132,7 @@ module Riddle
Riddle.version_warning
@server = server || "localhost"
- @port = port || 3312
+ @port = port || 9312
@socket = nil
reset
@@ -474,9 +474,10 @@ module Riddle
# Connects to the Sphinx daemon, and yields a socket to use. The socket is
# closed at the end of the block.
def connect(&block)
- unless @socket.nil?
+ if @socket && [email protected]?
yield @socket
else
+ @socket = nil
open_socket
begin
yield @socket
diff --git a/spec/fixtures/sphinx/configuration.erb b/spec/fixtures/sphinx/configuration.erb
index <HASH>..<HASH> 100644
--- a/spec/fixtures/sphinx/configuration.erb
+++ b/spec/fixtures/sphinx/configuration.erb
@@ -5,7 +5,7 @@ indexer
searchd
{
- port = 3313
+ port = 9313
log = <%= @path %>/fixtures/sphinx/searchd.log
query_log = <%= @path %>/fixtures/sphinx/searchd.query.log
read_timeout = 5
diff --git a/spec/riddle/controller_spec.rb b/spec/riddle/controller_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/riddle/controller_spec.rb
+++ b/spec/riddle/controller_spec.rb
@@ -6,6 +6,11 @@ describe Riddle::Controller do
@controller = Riddle::Controller.new stub('controller'), 'sphinx.conf'
end
+ it "should return 0.9.9 if using 0.9.9" do
+ @controller.stub!(:` => 'Sphinx 0.9.9-release (r2117)')
+ @controller.sphinx_version.should == '0.9.9'
+ end
+
it "should return 0.9.9 if using 0.9.9 rc2" do
@controller.stub!(:` => 'Sphinx 0.9.9-rc2 (r1785)')
@controller.sphinx_version.should == '0.9.9'
diff --git a/spec/unit/client_spec.rb b/spec/unit/client_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/client_spec.rb
+++ b/spec/unit/client_spec.rb
@@ -10,8 +10,8 @@ describe Riddle::Client do
Riddle::Client.new.server.should == "localhost"
end
- it "should default to port 3312" do
- Riddle::Client.new.port.should == 3312
+ it "should default to port 9312" do
+ Riddle::Client.new.port.should == 9312
end
it "should translate anchor arguments correctly" do | Tweaks to match the proper <I> release. | pat_riddle | train |
31f58f1137cf473f6f1b650f00467e5e7ae0f867 | diff --git a/lib/spidercrawl/request.rb b/lib/spidercrawl/request.rb
index <HASH>..<HASH> 100644
--- a/lib/spidercrawl/request.rb
+++ b/lib/spidercrawl/request.rb
@@ -114,7 +114,7 @@ module Spidercrawl
page, pages = nil, []
@urls.each do |url|
- request = Typhoeus::Request.new(url, :timeout => @timeout, :followlocation => false, :headers => {"User-Agent" => UserAgents.random})
+ request = Typhoeus::Request.new(url, :timeout => @timeout, :followlocation => false, :headers => {"Accept" => "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Cache-Control" => "no-cache", "Pragma" => "no-cache", "User-Agent" => UserAgents.random})
request.on_complete do |response|
uri = URI(url)
if response.success? | include request headers for typhoeus | belsonheng_spidercrawl | train |
edf439fe2304b6dc207ac19f48d1433b70186bff | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -16,6 +16,10 @@ const internals = {
name: 'limit',
default: 25
},
+ pagination: {
+ name: 'pagination',
+ default: true
+ },
invalid: 'defaults'
},
meta: {
@@ -84,16 +88,18 @@ const internals = {
exports.register = function (server, options, next) {
-
- Hoek.assert(server.connections.length === 1,
- 'You cannot register this plugin for two connections at once. Register it for each connection on your server.');
-
- internals.uri = server.info.uri
+
+ Hoek.assert(server.connections.length === 1,
+ 'You cannot register this plugin for two connections at once. ' +
+ 'Register it for each connection on your server.');
+
+ internals.uri = server.info.uri
const config = Hoek.applyToDefaults(internals.defaults, options);
- Hoek.assert(config.query.invalid === 'defaults' || config.query.invalid === 'badRequest', 'options.query.invalid can only be: \'defaults\' or \'badRequest\' ');
+ Hoek.assert(config.query.invalid === 'defaults' || config.query.invalid === 'badRequest',
+ 'options.query.invalid can only be: \'defaults\' or \'badRequest\' ');
server.decorate('reply', config.reply.paginate, function(results, totalCount) {
Hoek.assert(Array.isArray(results), '#reply.' + config.reply.paginate + ' results must be an array.');
@@ -107,6 +113,17 @@ exports.register = function (server, options, next) {
const exclude = config.routes.exclude;
const path = request.route.path;
+ let pagination = request.query[config.query.pagination.name];
+
+ if (typeof pagination === 'undefined') {
+ pagination = config.query.pagination.default;
+ }
+
+ if (pagination === 'false') {
+ request.query[config.query.pagination.name] = pagination;
+ return reply.continue();
+ }
+
// If the route does not match, just skip this part
if (request.route.method === 'get' && (include[0] === '*' || _.includes(include, path)) &&
!_.includes(exclude, path)) {
@@ -180,6 +197,10 @@ exports.register = function (server, options, next) {
return reply.continue();
}
+ if (request.query[config.query.pagination.name] === 'false') {
+ return reply.continue();
+ }
+
const include = config.routes.include;
const exclude = config.routes.exclude;
const path = request.route.path;
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -797,7 +797,7 @@ describe('Testing pageCount', () => {
});
});
-describe('Post request', done => {
+describe('Post request', () => {
it('Should work with a post request', done => {
let server = register();
server.register(require('../'), (err) => {
@@ -815,3 +815,21 @@ describe('Post request', done => {
});
});
+describe('Pagination to false', () => {
+ it ('Should return the results with no pagination', done => {
+ let server = register();
+ server.register(require('../'), (err) => {
+ expect(err).to.be.undefined();
+ server.inject({
+ method: 'GET',
+ url: '/?pagination=false',
+ }, res => {
+ const response = res.request.response.source;
+ expect(response).to.be.an.array();
+ done();
+ });
+
+ });
+ });
+});
+ | Update to be able to disable pagination with query parameter | fknop_hapi-pagination | train |
b14b3078129a39464179b94e82dfd9d4c3edf08c | diff --git a/angr/analyses/backward_slice.py b/angr/analyses/backward_slice.py
index <HASH>..<HASH> 100644
--- a/angr/analyses/backward_slice.py
+++ b/angr/analyses/backward_slice.py
@@ -272,7 +272,7 @@ class BackwardSlice(Analysis):
taints |= new_taints
else:
- cl = CodeLocation(cfg_node, stmt_id)
+ cl = CodeLocation(cfg_node.addr, stmt_id)
taints.add(cl)
while taints:
@@ -288,7 +288,7 @@ class BackwardSlice(Analysis):
accessed_taints.add(tainted_cl)
# Pick all its data dependencies from data dependency graph
- if tainted_cl in self._ddg:
+ if self._ddg is not None and tainted_cl in self._ddg:
predecessors = self._ddg.get_predecessors(tainted_cl)
l.debug("Returned %d predecessors for %s from data dependence graph", len(predecessors), tainted_cl)
@@ -296,6 +296,8 @@ class BackwardSlice(Analysis):
if p not in accessed_taints:
taints.add(p)
+ self.taint_graph.add_edge(p, tainted_cl)
+
# Handle the control dependence
for n in self._cfg.get_all_nodes(tainted_cl.simrun_addr):
new_taints = self._handle_control_dependence(n)
@@ -306,6 +308,8 @@ class BackwardSlice(Analysis):
if taint not in accessed_taints:
taints.add(taint)
+ self.taint_graph.add_edge(taint, tainted_cl)
+
# In the end, map the taint graph onto CFG
self._map_to_cfg()
@@ -347,8 +351,10 @@ class BackwardSlice(Analysis):
exit_stmt_ids['default'] = None
# Find all paths from src_block to target_block
- all_simple_paths = networkx.all_simple_paths(self._cfg.graph, src_block, target_block)
- for simple_path in all_simple_paths:
+
+ all_shortest_paths = list(networkx.all_shortest_paths(self._cfg.graph, src_block, target_block))
+
+ for simple_path in all_shortest_paths:
if len(simple_path) <= 1:
# Oops, it looks that src_block and target_block are the same guy?
continue
@@ -357,6 +363,9 @@ class BackwardSlice(Analysis):
a, b = simple_path[0], simple_path[1]
# Get the exit statement ID from CFG
exit_stmt_id = self._cfg.get_exit_stmt_idx(a, b)
+ if exit_stmt_id is None:
+ continue
+
# Mark it!
if exit_stmt_ids[exit_stmt_id] is None:
exit_stmt_ids[exit_stmt_id] = [ b.addr ] | Resurrect BackwardSlice.
- Fill data into taint_graph. It might be renamed in the future though.
- Add proper sanity checks to some return values from other methods.
- Instead of getting all simple paths, now we only get shortest paths.
Getting all simple paths is not feasible sometimes (especially when
the program is big and loops exist). | angr_angr | train |
f8a0cc4a573ca8f023f0edc52d25a1fe2e530fd4 | diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "react-responsive-ui",
- "version": "0.8.4",
+ "version": "0.8.5",
"description": "Slick and accurate React UI components",
"main": "index.common.js",
"module": "index.es6.js",
diff --git a/source/date picker.js b/source/date picker.js
index <HASH>..<HASH> 100644
--- a/source/date picker.js
+++ b/source/date picker.js
@@ -329,8 +329,8 @@ export default class DatePicker extends PureComponent
// `event.target.value` in its `onBlur` handler.
if (onBlur)
{
- onBlur
- ({
+ const _event =
+ {
...event,
target:
{
@@ -338,6 +338,13 @@ export default class DatePicker extends PureComponent
value
}
})
+
+ // For `redux-form` event detection.
+ // https://github.com/erikras/redux-form/blob/v5/src/events/isEvent.js
+ _event.stopPropagation = event.stopPropagation
+ _event.preventDefault = event.preventDefault
+
+ onBlur(_event)
}
}
diff --git a/source/select.js b/source/select.js
index <HASH>..<HASH> 100644
--- a/source/select.js
+++ b/source/select.js
@@ -1312,8 +1312,8 @@ export default class Select extends PureComponent
// `event.target.value` in its `onBlur` handler.
if (onBlur)
{
- onBlur
- ({
+ const _event =
+ {
...event,
target:
{
@@ -1321,6 +1321,13 @@ export default class Select extends PureComponent
value
}
})
+
+ // For `redux-form` event detection.
+ // https://github.com/erikras/redux-form/blob/v5/src/events/isEvent.js
+ _event.stopPropagation = event.stopPropagation
+ _event.preventDefault = event.preventDefault
+
+ onBlur(_event)
}
} | Fixed `redux-form` event detection | catamphetamine_react-responsive-ui | train |
59b9aa7d9e92119d87f53d0ad8e5cf579992bf3d | diff --git a/src/OpenPlatform/Components/Authorizer.php b/src/OpenPlatform/Components/Authorizer.php
index <HASH>..<HASH> 100644
--- a/src/OpenPlatform/Components/Authorizer.php
+++ b/src/OpenPlatform/Components/Authorizer.php
@@ -46,12 +46,12 @@ class Authorizer extends AbstractComponent
/**
* Get authorizer options api.
*/
- const GET_AUTHORIZER_OPTION = 'https://api.weixin.qq.com/cgi-bin/component/ api_get_authorizer_option';
+ const GET_AUTHORIZER_OPTION = 'https://api.weixin.qq.com/cgi-bin/component/api_get_authorizer_option';
/**
* Set authorizer options api.
*/
- const SET_AUTHORIZER_OPTION = 'https://api.weixin.qq.com/cgi-bin/component/ api_set_authorizer_option';
+ const SET_AUTHORIZER_OPTION = 'https://api.weixin.qq.com/cgi-bin/component/api_set_authorizer_option';
/**
* Get authorizer info. | Update Authorizer.php (#<I>)
remove url spaces | overtrue_wechat | train |
1345950de2a59eff79b26a5dec2daafa5c03814f | diff --git a/src/PhpImap/DataPartInfo.php b/src/PhpImap/DataPartInfo.php
index <HASH>..<HASH> 100644
--- a/src/PhpImap/DataPartInfo.php
+++ b/src/PhpImap/DataPartInfo.php
@@ -114,6 +114,12 @@ class DataPartInfo
$this->data = $this->mail->decodeMimeStr(
(string) $this->data // Data to convert
);
+
+ $this->data = $this->mail->convertToUtf8(
+ $this->data,
+ $this->charset
+ );
+ $this->charset = 'utf-8';
}
return (null === $this->data) ? '' : $this->data; | Issue #<I>: Fix encoding issues | barbushin_php-imap | train |
a89688ecc8c2cb8b44d5c1c015bc1cf1a42b4e87 | diff --git a/niworkflows/anat/ants.py b/niworkflows/anat/ants.py
index <HASH>..<HASH> 100644
--- a/niworkflows/anat/ants.py
+++ b/niworkflows/anat/ants.py
@@ -352,7 +352,22 @@ N4BiasFieldCorrection."""
map_wmmask = pe.Node(
ApplyTransforms(interpolation="Gaussian"), name="map_wmmask", mem_gb=1,
)
- map_wmmask.inputs.input_image = str(wm_tpm)
+
+ # Add the brain stem if it is found.
+ bstem_tpm = (
+ get_template(in_template, label="BS", suffix="probseg", **common_spec) or None
+ )
+ if bstem_tpm:
+ full_wm = pe.Node(niu.Function(function=_imsum), name="full_wm")
+ full_wm.inputs.op1 = str(wm_tpm)
+ full_wm.inputs.op2 = str(bstem_tpm)
+ # fmt: off
+ wf.connect([
+ (full_wm, map_wmmask, [("out", "input_image")])
+ ])
+ # fmt: on
+ else:
+ map_wmmask.inputs.input_image = str(wm_tpm)
# fmt: off
wf.disconnect([
(map_brainmask, inu_n4_final, [("output_image", "weight_image")]),
@@ -783,26 +798,22 @@ N4BiasFieldCorrection."""
run_without_submitting=True)
overlap = pe.Node(FuzzyOverlap(), name="overlap", run_without_submitting=True)
- apply_wm_prior = pe.Node(
- MultiplyImages(
- dimension=3,
- output_product_image="regularized_wm.nii.gz",
- ),
- name="apply_wm_prior",
- )
+ apply_wm_prior = pe.Node(niu.Function(function=_improd), name="apply_wm_prior")
+
# fmt: off
wf.disconnect([
(copy_xform_wm, inu_n4_final, [("wm_map", "weight_image")]),
])
wf.connect([
- (inputnode, apply_wm_prior, [("wm_prior", "second_input")]),
+ (inputnode, apply_wm_prior, [("in_mask", "in_mask"),
+ ("wm_prior", "op2")]),
(inputnode, match_wm, [("wm_prior", "value")]),
(atropos, match_wm, [("posteriors", "reference")]),
(atropos, overlap, [("posteriors", "in_ref")]),
(match_wm, overlap, [("out", "in_tst")]),
(overlap, sel_wm, [(("class_fdi", _argmax), "index")]),
- (copy_xform_wm, apply_wm_prior, [("wm_map", "first_input")]),
- (apply_wm_prior, inu_n4_final, [("output_product_image", "weight_image")]),
+ (copy_xform_wm, apply_wm_prior, [("wm_map", "op1")]),
+ (apply_wm_prior, inu_n4_final, [("out", "weight_image")]),
])
# fmt: on
return wf
@@ -1034,3 +1045,39 @@ def _conform_mask(in_mask, in_reference):
def _matchlen(value, reference):
return [value] * len(reference)
+
+
+def _imsum(op1, op2, out_file=None):
+ import nibabel as nb
+ im1 = nb.load(op1)
+
+ data = im1.get_fdata() + nb.load(op2).get_fdata()
+ data /= data.max()
+ nii = nb.Nifti1Image(data, im1.affine, im1.header)
+
+ if out_file is None:
+ from pathlib import Path
+ out_file = str((Path() / "summap.nii.gz").absolute())
+
+ nii.to_filename(out_file)
+ return out_file
+
+
+def _improd(op1, op2, in_mask, out_file=None):
+ import nibabel as nb
+ im1 = nb.load(op1)
+
+ data = im1.get_fdata() * nb.load(op2).get_fdata()
+ mskdata = nb.load(in_mask).get_fdata() > 0
+ data[~mskdata] = 0
+ data[data < 0] = 0
+ data /= data.max()
+ data = 0.5 * (data + mskdata)
+ nii = nb.Nifti1Image(data, im1.affine, im1.header)
+
+ if out_file is None:
+ from pathlib import Path
+ out_file = str((Path() / "prodmap.nii.gz").absolute())
+
+ nii.to_filename(out_file)
+ return out_file | enh: add brainstem as part of the prior & brainmask for final N4
Addresses issues like <URL> | poldracklab_niworkflows | train |
bd0f7346757ea765c31e56605a01d5e65f3adebd | diff --git a/test/getgroupmembershipforgroup.js b/test/getgroupmembershipforgroup.js
index <HASH>..<HASH> 100644
--- a/test/getgroupmembershipforgroup.js
+++ b/test/getgroupmembershipforgroup.js
@@ -13,6 +13,7 @@ describe('ActiveDirectory', function() {
describe('#getGroupMembershipForGroup()', function() {
it('should return groups if groupName (distinguishedName) is valid', function(done) {
+ var verified = 0;
settings.groups.forEach(function(group) {
ad.getGroupMembershipForGroup(group.dn, function(err, groups) {
if (err) return(done(err));
@@ -24,11 +25,12 @@ describe('ActiveDirectory', function() {
return((result.cn || '').toLowerCase()=== lowerCaseSource);
}));
});
+ if (++verified === settings.groups.length) done();
});
});
- done();
});
it('should return groups if groupName (commonName) exists', function(done) {
+ var verified = 0;
settings.groups.forEach(function(group) {
ad.getGroupMembershipForGroup(group.cn, function(err, groups) {
if (err) return(done(err));
@@ -40,9 +42,9 @@ describe('ActiveDirectory', function() {
return((result.cn || '').toLowerCase()=== lowerCaseSource);
}));
});
+ if (++verified === settings.groups.length) done();
});
});
- done();
});
it('should return empty groups if groupName doesn\'t exist', function(done) {
ad.getGroupMembershipForGroup('!!!NON-EXISTENT GROUP!!!', function(err, groups) {
diff --git a/test/getgroupmembershipforuser.js b/test/getgroupmembershipforuser.js
index <HASH>..<HASH> 100644
--- a/test/getgroupmembershipforuser.js
+++ b/test/getgroupmembershipforuser.js
@@ -13,6 +13,7 @@ describe('ActiveDirectory', function() {
describe('#getGroupMembershipForUser()', function() {
it('should return groups if username (distinguishedName) is valid', function(done) {
+ var verified = 0;
settings.users.forEach(function(user) {
ad.getGroupMembershipForUser(user.dn, function(err, groups) {
if (err) return(done(err));
@@ -24,11 +25,12 @@ describe('ActiveDirectory', function() {
return((result.cn || '').toLowerCase()=== lowerCaseSource);
}));
});
+ if (++verified === settings.users.length) done();
});
});
- done();
});
it('should return groups if username (sAMAccountName) exists', function(done) {
+ var verified = 0;
settings.users.forEach(function(user) {
ad.getGroupMembershipForUser(user.sAMAccountName, function(err, groups) {
if (err) return(done(err));
@@ -40,11 +42,12 @@ describe('ActiveDirectory', function() {
return((result.cn || '').toLowerCase()=== lowerCaseSource);
}));
});
+ if (++verified === settings.users.length) done();
});
});
- done();
});
it('should return groups if username (userPrincipalName) exists', function(done) {
+ var verified = 0;
settings.users.forEach(function(user) {
ad.getGroupMembershipForUser(user.userPrincipalName, function(err, groups) {
if (err) return(done(err));
@@ -56,9 +59,9 @@ describe('ActiveDirectory', function() {
return((result.cn || '').toLowerCase()=== lowerCaseSource);
}));
});
+ if (++verified === settings.users.length) done();
});
});
- done();
});
it('should return empty groups if groupName doesn\'t exist', function(done) {
ad.getGroupMembershipForUser('!!!NON-EXISTENT GROUP!!!', function(err, groups) { | fix: Ensure done() is only called after all groups have been tested. | gheeres_node-activedirectory | train |
5fab1e2e38ec3948c42e6986bdb9a9bb473ab158 | diff --git a/idxmap/mem/doc.go b/idxmap/mem/doc.go
index <HASH>..<HASH> 100644
--- a/idxmap/mem/doc.go
+++ b/idxmap/mem/doc.go
@@ -22,23 +22,23 @@
// Owner and title are used for identification of the mapping. IndexFunc extracts secondary
// indexes from the stored item.
//
-// To insert a new item into the mapping execute:
+// To insert a new item into the mapping, execute:
//
// mapping.RegisterName(name, metadata)
//
-// RegisterName can be used also to overwrite existing item associated with the name.
+// RegisterName can also be used to overwrite an existing item associated with the name.
//
// To retrieve a particular item identified by name run:
//
// meta, found := mapping.Lookup(name)
//
-// To lookup items by secondary indexes execute:
+// To lookup items by secondary indexes, execute:
//
// names := mapping.LookupByMetadata(indexName, indexValue)
//
// names of all matching items are returned.
//
-// To retrieve all currently registered names run:
+// To retrieve all currently registered names, run:
//
// names := mapping.ListNames()
//
@@ -47,14 +47,14 @@
//
// mapping.UnregisterName(name)
//
-// To monitor changes run:
+// To monitor changes, run:
// callback := func(notif idxmap.NamedMappingDto) {
// // process notification
// }
//
// mapping.Watch("NameOfWatcher", callback)
//
-// If you prefer processing of changes through channels:
+// If you prefer processing changes through channels:
//
// ch := make(chan idxmap.NamedMappingDto)
// mapping.Watch("NameOfWatcher", ToChan(ch)) | Update doc.go
Proposed minor grammar edits | ligato_cn-infra | train |
86660dd7cb76c8c4c7dd5a73c091bb8eda2a08ba | diff --git a/src/android/AudioPlayer.java b/src/android/AudioPlayer.java
index <HASH>..<HASH> 100644
--- a/src/android/AudioPlayer.java
+++ b/src/android/AudioPlayer.java
@@ -130,7 +130,9 @@ public class AudioPlayer implements OnCompletionListener, OnPreparedListener, On
this.player = null;
}
if (this.recorder != null) {
- this.stopRecording(true);
+ if (this.state != STATE.MEDIA_STOPPED) {
+ this.stopRecording(true);
+ }
this.recorder.release();
this.recorder = null;
}
@@ -197,8 +199,44 @@ public class AudioPlayer implements OnCompletionListener, OnPreparedListener, On
if (size == 1) {
String logMsg = "renaming " + this.tempFile + " to " + file;
LOG.d(LOG_TAG, logMsg);
+
File f = new File(this.tempFile);
- if (!f.renameTo(new File(file))) LOG.e(LOG_TAG, "FAILED " + logMsg);
+ if (!f.renameTo(new File(file))) {
+
+ FileOutputStream outputStream = null;
+ File outputFile = null;
+ try {
+ outputFile = new File(file);
+ outputStream = new FileOutputStream(outputFile);
+ FileInputStream inputStream = null;
+ File inputFile = null;
+ try {
+ inputFile = new File(this.tempFile);
+ LOG.d(LOG_TAG, "INPUT FILE LENGTH: " + String.valueOf(inputFile.length()) );
+ inputStream = new FileInputStream(inputFile);
+ copy(inputStream, outputStream, false);
+ } catch (Exception e) {
+ LOG.e(LOG_TAG, e.getLocalizedMessage(), e);
+ } finally {
+ if (inputStream != null) try {
+ inputStream.close();
+ inputFile.delete();
+ inputFile = null;
+ } catch (Exception e) {
+ LOG.e(LOG_TAG, e.getLocalizedMessage(), e);
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ if (outputStream != null) try {
+ outputStream.close();
+ LOG.d(LOG_TAG, "OUTPUT FILE LENGTH: " + String.valueOf(outputFile.length()) );
+ } catch (Exception e) {
+ LOG.e(LOG_TAG, e.getLocalizedMessage(), e);
+ }
+ }
+ }
}
// more than one file so the user must have pause recording. We'll need to concat files.
else { | CB-<I>: checking mediaState in destroy method, and moving file by stream when renameTo failing (#<I>) | apache_cordova-plugin-media | train |
34c18877e07a46b58441055c9846de69db0152d5 | diff --git a/pyedflib/edfwriter.py b/pyedflib/edfwriter.py
index <HASH>..<HASH> 100644
--- a/pyedflib/edfwriter.py
+++ b/pyedflib/edfwriter.py
@@ -35,6 +35,7 @@ from ._extensions._pyedflib import set_birthdate, set_digital_minimum, set_techn
from ._extensions._pyedflib import set_patientcode, set_equipment, set_admincode, set_gender, set_datarecord_duration
from ._extensions._pyedflib import set_startdatetime, set_samplefrequency, set_physical_minimum, set_label, set_physical_dimension
from ._extensions._pyedflib import set_transducer, set_prefilter, write_physical_samples, close_file, write_annotation_latin1, write_annotation_utf8
+from ._extensions._pyedflib import blockwrite_physical_samples
__all__ = ['EdfWriter']
@@ -569,6 +570,28 @@ class EdfWriter(object):
"""
return write_physical_samples(self.handle, data)
+ def blockWritePhysicalSamples(self, data):
+ """
+ Writes physical samples (uV, mA, Ohm)
+ must be filled with samples from all signals
+ where each signal has n samples which is the samplefrequency of the signal.
+
+ data_vec belonging to one signal. The size must be the samplefrequency of the signal.
+
+ Notes
+ -----
+ buf must be filled with samples from all signals, starting with signal 0, 1, 2, etc.
+ one block equals one second
+ The physical samples will be converted to digital samples using the
+ values of physical maximum, physical minimum, digital maximum and digital minimum
+ The number of samples written is equal to the sum of the samplefrequencies of all signals
+ Size of buf should be equal to or bigger than sizeof(double) multiplied by the sum of the samplefrequencies of all signals
+ Returns 0 on success, otherwise -1
+
+ All parameters must be already written into the bdf/edf-file.
+ """
+ return blockwrite_physical_samples(self.handle, data)
+
def writeSamples(self, data_list):
"""
Writes physical samples (uV, mA, Ohm) from data belonging to all signals
@@ -591,26 +614,37 @@ class EdfWriter(object):
for i in np.arange(len(data_list)):
ind.append(0)
+ sampleLength = 0
for i in np.arange(len(data_list)):
if (np.size(data_list[i]) < ind[i] + self.channels[i]['sample_rate']):
notAtEnd = False
+ sampleLength += self.channels[i]['sample_rate']
+ dataOfOneSecond = np.zeros(sampleLength)
+
while notAtEnd:
+ dataOfOneSecondInd = 0
for i in np.arange(len(data_list)):
- self.writePhysicalSamples(data_list[i].flatten()[int(ind[i]):int(ind[i]+self.channels[i]['sample_rate'])])
+ dataOfOneSecond[dataOfOneSecondInd:dataOfOneSecondInd+self.channels[i]['sample_rate']] = data_list[i].flatten()[int(ind[i]):int(ind[i]+self.channels[i]['sample_rate'])]
+ # self.writePhysicalSamples(data_list[i].flatten()[int(ind[i]):int(ind[i]+self.channels[i]['sample_rate'])])
ind[i] += self.channels[i]['sample_rate']
-
+ dataOfOneSecondInd += self.channels[i]['sample_rate']
+ self.blockWritePhysicalSamples(dataOfOneSecond)
for i in np.arange(len(data_list)):
if (np.size(data_list[i]) < ind[i] + self.channels[i]['sample_rate']):
notAtEnd = False
+ dataOfOneSecondInd = 0
for i in np.arange(len(data_list)):
lastSamples = np.zeros(int(self.channels[i]['sample_rate']))
lastSampleInd = int(np.max(data_list[i].shape) - ind[i])
lastSampleInd = int(np.min((lastSampleInd,int(self.channels[i]['sample_rate']))))
if lastSampleInd > 0:
lastSamples[:lastSampleInd] = data_list[i].flatten()[-lastSampleInd:]
+ # dataOfOneSecond[dataOfOneSecondInd:dataOfOneSecondInd+self.channels[i]['sample_rate']] = lastSamples
+ # dataOfOneSecondInd += self.channels[i]['sample_rate']
self.writePhysicalSamples(lastSamples)
+ # self.blockWritePhysicalSamples(dataOfOneSecond)
def writeAnnotation(self, onset_in_seconds, duration_in_seconds, description, str_format='utf-8'):
""" | use blockwrite in order speed up edf/bdf writing with several channels | holgern_pyedflib | train |
bdda700cbe2361fb5ae11cb4899c571650dfd121 | diff --git a/nurbs/__init__.py b/nurbs/__init__.py
index <HASH>..<HASH> 100644
--- a/nurbs/__init__.py
+++ b/nurbs/__init__.py
@@ -15,4 +15,4 @@ The NURBS-Python package follows an object-oriented design as much as possible.
"""
-__version__ = "2.3.3"
+__version__ = "2.3.4" | Version bumped to <I> | orbingol_NURBS-Python | train |
6d3eda4ac5f88e081933af42661324d33dde06df | diff --git a/plugins/io.sarl.eclipse/src/io/sarl/eclipse/runtime/AbstractSREInstall.java b/plugins/io.sarl.eclipse/src/io/sarl/eclipse/runtime/AbstractSREInstall.java
index <HASH>..<HASH> 100644
--- a/plugins/io.sarl.eclipse/src/io/sarl/eclipse/runtime/AbstractSREInstall.java
+++ b/plugins/io.sarl.eclipse/src/io/sarl/eclipse/runtime/AbstractSREInstall.java
@@ -108,7 +108,15 @@ public abstract class AbstractSREInstall implements ISREInstall {
@Override
public boolean equals(Object obj) {
- if (obj instanceof ISREInstall) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null) {
+ return false;
+ }
+
+ if (this.getClass() == obj.getClass()) {
return getId().equals(((ISREInstall) obj).getId());
}
return false;
diff --git a/plugins/io.sarl.lang.core/src/io/sarl/lang/core/Address.java b/plugins/io.sarl.lang.core/src/io/sarl/lang/core/Address.java
index <HASH>..<HASH> 100644
--- a/plugins/io.sarl.lang.core/src/io/sarl/lang/core/Address.java
+++ b/plugins/io.sarl.lang.core/src/io/sarl/lang/core/Address.java
@@ -89,7 +89,15 @@ public class Address implements Serializable, Comparable<Address> {
@Override
@Pure
public boolean equals(Object obj) {
- if (obj instanceof Address) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null) {
+ return false;
+ }
+
+ if (this.getClass() == obj.getClass()) {
return equals((Address) obj);
}
return false;
diff --git a/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionParameterTypes.java b/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionParameterTypes.java
index <HASH>..<HASH> 100644
--- a/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionParameterTypes.java
+++ b/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionParameterTypes.java
@@ -74,7 +74,15 @@ public class ActionParameterTypes extends BasicEList<String> implements Comparab
@Override
public boolean equals(Object object) {
- if (super.equals(object) && object instanceof ActionParameterTypes) {
+ if (this == object) {
+ return true;
+ }
+
+ if (object == null) {
+ return false;
+ }
+
+ if (super.equals(object) && this.getClass() == object.getClass()) {
ActionParameterTypes types = (ActionParameterTypes) object;
return this.isVarargs == types.isVarargs;
}
diff --git a/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionPrototype.java b/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionPrototype.java
index <HASH>..<HASH> 100644
--- a/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionPrototype.java
+++ b/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/ActionPrototype.java
@@ -81,7 +81,12 @@ public class ActionPrototype implements Cloneable, Serializable, Comparable<Acti
if (obj == this) {
return true;
}
- if (obj instanceof ActionPrototype) {
+
+ if (obj == null) {
+ return false;
+ }
+
+ if (this.getClass() == obj.getClass()) {
ActionPrototype k = (ActionPrototype) obj;
return this.function.equals(k.function)
&& this.signature.equals(k.signature);
diff --git a/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/QualifiedActionName.java b/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/QualifiedActionName.java
index <HASH>..<HASH> 100644
--- a/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/QualifiedActionName.java
+++ b/plugins/io.sarl.lang/src/io/sarl/lang/actionprototype/QualifiedActionName.java
@@ -94,7 +94,12 @@ public class QualifiedActionName implements Cloneable, Serializable, Comparable<
if (obj == this) {
return true;
}
- if (obj instanceof QualifiedActionName) {
+
+ if (obj == null) {
+ return false;
+ }
+
+ if (this.getClass() == obj.getClass()) {
QualifiedActionName k = (QualifiedActionName) obj;
return Objects.equal(this.resourceID, k.resourceID)
&& Objects.equal( | squid:S<I> - equals methods should be symmetric and work for subclasses. | sarl_sarl | train |
223d182cf0070a606da247eac0f9d4ebe56f0a4d | diff --git a/src/main/java/com/twilio/sdk/verbs/Conference.java b/src/main/java/com/twilio/sdk/verbs/Conference.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/twilio/sdk/verbs/Conference.java
+++ b/src/main/java/com/twilio/sdk/verbs/Conference.java
@@ -126,6 +126,19 @@ public class Conference extends Verb {
}
/**
+ * Sets the beep behavior (the legacy way)
+ * <ul>
+ * <li>true - beep on enter and exit</li>
+ * <li>false - no beep</li>
+ * </ul>
+ *
+ * @param bool the new beep behavior
+ */
+ public void setBeep(Boolean bool) {
+ this.set("beep", bool.toString());
+ }
+
+ /**
* Sets the start conference on enter.
*
* @param bool the new start conference on enter
diff --git a/src/test/java/com/twilio/sdk/verbs/ConferenceTest.java b/src/test/java/com/twilio/sdk/verbs/ConferenceTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/twilio/sdk/verbs/ConferenceTest.java
+++ b/src/test/java/com/twilio/sdk/verbs/ConferenceTest.java
@@ -35,6 +35,16 @@ public class ConferenceTest {
c.setBeep(Conference.BEEP_FALSE);
assertEquals("<Conference beep=\"false\">foo_room</Conference>", c.toXML());
}
+
+ /**
+ * Test the Conference noun with beep false (legacy behavior)
+ */
+ @Test
+ public void testConferenceBeepFalseLegacy() {
+ Conference c = new Conference("foo_room");
+ c.setBeep(false);
+ assertEquals("<Conference beep=\"false\">foo_room</Conference>", c.toXML());
+ }
/**
* Test the Conference noun with beep true
@@ -47,6 +57,16 @@ public class ConferenceTest {
}
/**
+ * Test the Conference noun with beep true (legacy behavior)
+ */
+ @Test
+ public void testConferenceBeepTrueLegacy() {
+ Conference c = new Conference("foo_room");
+ c.setBeep(true);
+ assertEquals("<Conference beep=\"true\">foo_room</Conference>", c.toXML());
+ }
+
+ /**
* Test the conference record attribute.
*/
@Test | Resolve issue #<I> by adding support for legacy ```setBeep``` behavior for the ```Conference``` verb | twilio_twilio-java | train |
fbcedefccf3001deebad366170bd15e2bd29c27f | diff --git a/src/SwipeDirectionMixin.js b/src/SwipeDirectionMixin.js
index <HASH>..<HASH> 100644
--- a/src/SwipeDirectionMixin.js
+++ b/src/SwipeDirectionMixin.js
@@ -12,6 +12,13 @@ export default function SwipeDirectionMixin(Base) {
return class SwipeDirection extends Base {
/**
+ * Invokes the [symbols.goDown](symbols#goDown) method.
+ */
+ [symbols.swipeDown]() {
+ this[symbols.goDown]();
+ }
+
+ /**
* Invokes the [symbols.goRight](symbols#goRight) method.
*/
[symbols.swipeLeft]() {
@@ -25,5 +32,12 @@ export default function SwipeDirectionMixin(Base) {
this[symbols.goLeft]();
}
+ /**
+ * Invokes the [symbols.goUp](symbols#goUp) method.
+ */
+ [symbols.swipeUp]() {
+ this[symbols.goUp]();
+ }
+
}
} | SwipeDirectionMixin handles vertical swipes. | elix_elix | train |
Subsets and Splits