hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
f76c5251f7b1a83b1b72013543d2f79ed96a3ce9 | diff --git a/scripts/checkUnminifiedVariables.php b/scripts/checkUnminifiedVariables.php
index <HASH>..<HASH> 100755
--- a/scripts/checkUnminifiedVariables.php
+++ b/scripts/checkUnminifiedVariables.php
@@ -12,8 +12,8 @@ $usedVars = [];
foreach (glob(__DIR__ . '/../tests/.cache/minifier.*.js') as $filepath)
{
$file = file_get_contents($filepath);
- $file = preg_replace('(<script>.*?(?:<|\\\\x3c)/script>)s', '', $file);
- $file = preg_replace('(data-s9e-livepreview-on\\w+="[^"]++")', '', $file);
+ $file = preg_replace('(<script>.*?(?:<|\\\\x3c)/script>)s', '', $file);
+ $file = preg_replace('((?:data-s9e-livepreview-)?on\\w+="[^"]++")', '', $file);
$file = preg_replace('(\\$.)', ' ', $file);
foreach ($regexps as $regexp) | Updated script [ci skip] | s9e_TextFormatter | train |
dfe27f8a8da105b9966963b9f02eea3b5965808f | diff --git a/malcolm/core/block.py b/malcolm/core/block.py
index <HASH>..<HASH> 100644
--- a/malcolm/core/block.py
+++ b/malcolm/core/block.py
@@ -1,9 +1,9 @@
from collections import OrderedDict
-from malcolm.core.loggable import Loggable
+from malcolm.core.monitorable import Monitorable
-class Block(Loggable):
+class Block(Monitorable):
"""Object consisting of a number of Attributes and Methods"""
def __init__(self, name):
@@ -11,7 +11,7 @@ class Block(Loggable):
Args:
name (str): Block name e.g. "BL18I:ZEBRA1"
"""
- super(Block, self).__init__(logger_name=name)
+ super(Block, self).__init__(name=name)
self.name = name
self._methods = OrderedDict()
self._attributes = OrderedDict() | Change Block to derive Monitorable
Follows the design change to move subscription handling to the Process
level, instead of the Block level. | dls-controls_pymalcolm | train |
bb3b587d0ab1123728d5abed1f6f20a2cf92178a | diff --git a/src/check.js b/src/check.js
index <HASH>..<HASH> 100644
--- a/src/check.js
+++ b/src/check.js
@@ -230,16 +230,23 @@ const manifestAndPackageJsonSameVersion = {
}
const connectorExistsInRegistry = {
- fn: (info, assert) => {
+ fn: async (info, assert) => {
+ let result = false
+ try {
+ await request(
+ `https://apps-registry.cozycloud.cc/registry/${info.manifest.slug}`
+ )
+ result = true
+ } catch (err) {
+ result = false
+ }
assert(
- info.applicationsInRegistry.data.find(
- app => app.slug === info.manifest.slug
- ) !== undefined,
+ result,
`The connector with the slug ${
info.manifest.slug
} should exist in the registry`
)
- return true
+ return result
},
nickname: 'registry',
message: 'The connector must exist in the registry'
@@ -342,10 +349,6 @@ const prepareInfo = async repository => {
'https://raw.githubusercontent.com/konnectors/cozy-konnector-template/master/.travis.yml'
)
- const applicationsInRegistry = await request({
- json: true,
- url: 'https://apps-registry.cozycloud.cc/registry'
- })
return {
eslintrc,
repository,
@@ -355,7 +358,6 @@ const prepareInfo = async repository => {
webpackCopyConfig,
templateTravisConfig,
git: await prepareGitInfo(repository),
- applicationsInRegistry,
read
}
} | fix: check that connector exists in the registry in a different way to avoid pagination | konnectors_konitor | train |
7537cd9827862798e72cc74b0697188f40892cc4 | diff --git a/src/main/java/water/exec/AST.java b/src/main/java/water/exec/AST.java
index <HASH>..<HASH> 100644
--- a/src/main/java/water/exec/AST.java
+++ b/src/main/java/water/exec/AST.java
@@ -6,6 +6,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import water.*;
+import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
@@ -473,9 +474,13 @@ class ASTAssign extends AST {
// Typed as a double ==> the row & col selectors are simple constants
if( slice._t == Type.DBL ) { // Typed as a double?
assert ary_rhs==null;
- long row = (long)((ASTNum)slice._rows)._d;
- int col = (int )((ASTNum)slice._cols)._d;
- ary.vecs()[col-1].set(row-1,d);
+ long row = (long)((ASTNum)slice._rows)._d-1;
+ int col = (int )((ASTNum)slice._cols)._d-1;
+ Chunk c = ary.vecs()[col].chunkForRow(row);
+ c.set(row,d);
+ Futures fs = new Futures();
+ c.close(c.cidx(),fs);
+ fs.blockForPending();
env.push(d);
return;
}
diff --git a/src/main/java/water/fvec/Vec.java b/src/main/java/water/fvec/Vec.java
index <HASH>..<HASH> 100644
--- a/src/main/java/water/fvec/Vec.java
+++ b/src/main/java/water/fvec/Vec.java
@@ -554,58 +554,19 @@ public class Vec extends Iced {
* 2^52), AND float values are stored in Vector. In this case, there is no
* common compatible data representation.
*
- * Will close the underlying chunk immediately after writing this value.
- * DO NOT use for bulk loads to the same chunk, otherwise there would be constant inflate-compress-DKV.put.
* */
- public final long set( long i, long l) {
- Chunk c = chunkForRow(i);
- long res = c.set(i, l);
- // immediately close the chunk, since caller has no chunk to close after done with bulk update.
- Futures fs = new Futures();
- c.close(c.cidx(),fs);
- fs.blockForPending();
- return res;
- }
+ public final long set( long i, long l) {return chunkForRow(i).set(i,l);}
+
/** Write element the slow way, as a double. Double.NaN will be treated as
* a set of a missing element.
- *
- * Will close the underlying chunk immediately after writing this value.
- * DO NOT use for bulk loads to the same chunk, otherwise there would be constant inflate-compress-DKV.put
* */
- public final double set( long i, double d) {
- Chunk c = chunkForRow(i);
- double res = c.set(i, d);
- // immediately close the chunk, since caller has no chunk to close after done with bulk update.
- Futures fs = new Futures();
- c.close(c.cidx(),fs);
- fs.blockForPending();
- return res;
- }
+ public final double set( long i, double d) {return chunkForRow(i).set(i,d);}
/** Write element the slow way, as a float. Float.NaN will be treated as
* a set of a missing element.
- *
- * Will close the underlying chunk immediately after writing this value.
- * DO NOT use for bulk loads to the same chunk, otherwise there would be constant inflate-compress-DKV.put
* */
- public final float set( long i, float f) {
- Chunk c = chunkForRow(i);
- float res = c.set(i, f);
- // immediately close the chunk, since caller has no chunk to close after done with bulk update.
- Futures fs = new Futures();
- c.close(c.cidx(),fs);
- fs.blockForPending();
- return res;
- }
+ public final float set( long i, float f) {return chunkForRow(i).set(i,f);}
/** Set the element as missing the slow way. */
- public final boolean setNA( long i ) {
- Chunk c = chunkForRow(i);
- boolean res = c.setNA(i);
- // immediately close the chunk, since caller has no chunk to close after done with bulk update.
- Futures fs = new Futures();
- c.close(c.cidx(),fs);
- fs.blockForPending();
- return res;
- }
+ public final boolean setNA( long i ) { return chunkForRow(i).setNA(i);}
/** Pretty print the Vec: [#elems, min/mean/max]{chunks,...} */
@Override public String toString() { | Reverted my previous change in Vec, Vec.set now cones not close the chunk, caller is responsible to make sure it will be closed in the end.
Changed ASTAssign to explicitly close the updated chunk. | h2oai_h2o-2 | train |
d9fdcda90955215fd1e43e660887bf87174e400f | diff --git a/salt/states/file.py b/salt/states/file.py
index <HASH>..<HASH> 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -534,8 +534,8 @@ def symlink(
target,
force=False,
makedirs=False,
- user='root',
- group='root',
+ user=None,
+ group=None,
mode=None,
**kwargs):
'''
@@ -572,6 +572,14 @@ def symlink(
'result': True,
'comment': ''}
+ if user is None:
+ user = __opts__['user']
+
+ if group is None:
+ group = __salt__['file.gid_to_group'](
+ __salt__['user.info'](user).get('gid', 0)
+ )
+
preflight_errors = []
uid = __salt__['file.user_to_uid'](user)
gid = __salt__['file.group_to_gid'](group) | Use the running user and default group for symlink ownership
This commit modifies the work done in <I>b<I>d. Instead of using
root:root for the default symlink ownership if no user/group is
specified, the user under which the salt-minion is running is used when
'user' is not sspecified, and the default group of whatever is assigned
to 'user' is used (whether specified directly or derived using the
aforementioned method). | saltstack_salt | train |
2ab41ec1f71f405f7e718db2f11acbc010862b0d | diff --git a/packages/babel/src/transformation/transformers/internal/block-hoist.js b/packages/babel/src/transformation/transformers/internal/block-hoist.js
index <HASH>..<HASH> 100644
--- a/packages/babel/src/transformation/transformers/internal/block-hoist.js
+++ b/packages/babel/src/transformation/transformers/internal/block-hoist.js
@@ -26,7 +26,10 @@ export var visitor = {
var hasChange = false;
for (var i = 0; i < node.body.length; i++) {
var bodyNode = node.body[i];
- if (bodyNode && bodyNode._blockHoist != null) hasChange = true;
+ if (bodyNode && bodyNode._blockHoist != null) {
+ hasChange = true;
+ break;
+ }
}
if (!hasChange) return; | Break loop as soon as change detected. | babel_babel | train |
a85451addc4e7170673b450c33ddf4c76de7c836 | diff --git a/pkg/volume/gce_pd/gce_util.go b/pkg/volume/gce_pd/gce_util.go
index <HASH>..<HASH> 100644
--- a/pkg/volume/gce_pd/gce_util.go
+++ b/pkg/volume/gce_pd/gce_util.go
@@ -42,19 +42,34 @@ func (util *GCEDiskUtil) AttachAndMountDisk(pd *gcePersistentDisk, globalPDPath
if err := gce.(*gce_cloud.GCECloud).AttachDisk(pd.pdName, pd.readOnly); err != nil {
return err
}
- devicePath := path.Join("/dev/disk/by-id/", "google-"+pd.pdName)
+
+ devicePaths := []string{
+ path.Join("/dev/disk/by-id/", "google-"+pd.pdName),
+ path.Join("/dev/disk/by-id/", "scsi-0Google_PersistentDisk_"+pd.pdName),
+ }
+
if pd.partition != "" {
- devicePath = devicePath + "-part" + pd.partition
+ for i, path := range devicePaths {
+ devicePaths[i] = path + "-part" + pd.partition
+ }
}
//TODO(jonesdl) There should probably be better method than busy-waiting here.
numTries := 0
+ devicePath := ""
+ // Wait for the disk device to be created
for {
- _, err := os.Stat(devicePath)
- if err == nil {
- break
+ for _, path := range devicePaths {
+ _, err := os.Stat(path)
+ if err == nil {
+ devicePath = path
+ break
+ }
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
}
- if err != nil && !os.IsNotExist(err) {
- return err
+ if devicePath != "" {
+ break
}
numTries++
if numTries == 10 { | Support default udev GCE PD device path
The expected GCE PD device name is google-{name of disk}. This is
because standard GCE images contain a udev rules file which renames
the GCE PD device to google-{name of disk} <URL> | kubernetes_kubernetes | train |
be5a6f7e7dde77ac46ea35b78f4478672326142c | diff --git a/dependency-check-core/src/main/java/org/owasp/dependencycheck/analyzer/ArchiveAnalyzer.java b/dependency-check-core/src/main/java/org/owasp/dependencycheck/analyzer/ArchiveAnalyzer.java
index <HASH>..<HASH> 100644
--- a/dependency-check-core/src/main/java/org/owasp/dependencycheck/analyzer/ArchiveAnalyzer.java
+++ b/dependency-check-core/src/main/java/org/owasp/dependencycheck/analyzer/ArchiveAnalyzer.java
@@ -295,7 +295,7 @@ public class ArchiveAnalyzer extends AbstractAnalyzer implements Analyzer {
final File d = new File(destination, entry.getName());
if (!d.exists()) {
if (!d.mkdirs()) {
- final String msg = String.format("Unable to create '%s'.", d.getAbsolutePath());
+ final String msg = String.format("Unable to create directory '%s'.", d.getAbsolutePath());
throw new AnalysisException(msg);
}
}
@@ -306,6 +306,13 @@ public class ArchiveAnalyzer extends AbstractAnalyzer implements Analyzer {
BufferedOutputStream bos = null;
FileOutputStream fos;
try {
+ File parent = file.getParentFile();
+ if (!parent.isDirectory()) {
+ if (!parent.mkdirs()) {
+ final String msg = String.format("Unable to build directory '%s'.", parent.getAbsolutePath());
+ throw new AnalysisException(msg);
+ }
+ }
fos = new FileOutputStream(file);
bos = new BufferedOutputStream(fos, BUFFER_SIZE);
int count; | ensured subdirectories are built while extracting tar files - issue #<I>
Former-commit-id: af8b<I>ed9be<I>e2aad<I>e<I>cc5d3e | jeremylong_DependencyCheck | train |
310406d6a29adf70b9f837eba9465838834c2d91 | diff --git a/src/main/java/act/job/AppJobManager.java b/src/main/java/act/job/AppJobManager.java
index <HASH>..<HASH> 100644
--- a/src/main/java/act/job/AppJobManager.java
+++ b/src/main/java/act/job/AppJobManager.java
@@ -33,6 +33,8 @@ import org.joda.time.DateTime;
import org.joda.time.Seconds;
import org.osgl.$;
import org.osgl.exception.NotAppliedException;
+import org.osgl.logging.LogManager;
+import org.osgl.logging.Logger;
import org.osgl.util.C;
import org.osgl.util.E;
import org.osgl.util.S;
@@ -44,6 +46,8 @@ import java.util.concurrent.*;
public class AppJobManager extends AppServiceBase<AppJobManager> {
+ private static final Logger LOGGER = LogManager.get(AppJobManager.class);
+
private ScheduledThreadPoolExecutor executor;
private ConcurrentMap<String, _Job> jobs = new ConcurrentHashMap<String, _Job>();
private ConcurrentMap<String, ScheduledFuture> scheduled = new ConcurrentHashMap<>();
@@ -62,6 +66,7 @@ public class AppJobManager extends AppServiceBase<AppJobManager> {
@Override
protected void releaseResources() {
+ LOGGER.trace("release job manager resources");
for (_Job job : jobs.values()) {
job.destroy();
}
@@ -180,10 +185,20 @@ public class AppJobManager extends AppServiceBase<AppJobManager> {
}
public void on(AppEventId appEvent, String jobId, final Runnable runnable, boolean runImmediatelyIfEventDispatched) {
+ boolean traceEnabled = LOGGER.isTraceEnabled();
+ if (traceEnabled) {
+ LOGGER.trace("binding job[%s] to app event: %s, run immediately if event dispatched: %s", jobId, appEvent, runImmediatelyIfEventDispatched);
+ }
_Job job = jobById(appEventJobId(appEvent));
if (null == job) {
+ if (traceEnabled) {
+ LOGGER.trace("process delayed job: %s", jobId);
+ }
processDelayedJob(wrap(runnable), runImmediatelyIfEventDispatched);
} else {
+ if (traceEnabled) {
+ LOGGER.trace("schedule job: %s", jobId);
+ }
job.addPrecedenceJob(_Job.once(jobId, runnable, this));
}
}
@@ -291,7 +306,10 @@ public class AppJobManager extends AppServiceBase<AppJobManager> {
private void initExecutor(App app) {
int poolSize = app.config().jobPoolSize();
executor = new ScheduledThreadPoolExecutor(poolSize, new AppThreadFactory("jobs"), new ThreadPoolExecutor.AbortPolicy());
- //JDK1.7 API: executor.setRemoveOnCancelPolicy(true);
+ executor.setRemoveOnCancelPolicy(true);
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace("init executor with thread pool: %s", poolSize);
+ }
}
private void createAppEventListener(AppEventId appEventId) { | add trace statements to AppJobManager | actframework_actframework | train |
ed8952cc648cb470171543b8b5ab833ef85528f2 | diff --git a/couch.js b/couch.js
index <HASH>..<HASH> 100644
--- a/couch.js
+++ b/couch.js
@@ -148,8 +148,9 @@ function Database (opts) {
if(typeof opts.couch !== 'string')
throw new Error('Required "couch" option with URL of CouchDB');
+ opts.db = opts.db || "";
if(typeof opts.db !== 'string')
- throw new Error('Required "db" option with db name for queues');
+ throw new Error('Optional "db" option must be string');
self.name = opts.db;
self.couch = new Couch({'url':opts.couch});
@@ -186,7 +187,7 @@ Database.prototype.confirmed = function(cb) {
self.couch.confirmed(function() {
var state = self.couch.known_dbs[self.name];
if(state && state.secObj) {
- self.log.debug('Confirmation was cached: ' + self.name);
+ self.log.debug('Confirmation was cached: ' + lib.JS(self.name));
self.secObj = state.secObj;
return cb();
} | I'm pretty sure an empty db is okay | jhs_cqs | train |
b756b98a435adc067cc5f4fe15263dfc79acc0c6 | diff --git a/search.php b/search.php
index <HASH>..<HASH> 100644
--- a/search.php
+++ b/search.php
@@ -234,7 +234,7 @@ echo '<div id="search-page">
echo '</div>';
}
echo '<div class="label">' , WT_I18N::translate('Family trees'), '</div>
- <div class="value">';
+ <div id="search_trees" class="value">';
//-- sorting menu by gedcom filename
asort($all_gedcoms);
foreach ($all_gedcoms as $ged_id=>$gedcom) { | General Search, Select ALL/None, SVN <I> - Bug #<I> | fisharebest_webtrees | train |
0b8ca11b678816f24166b903cfb636382d8f5ebc | diff --git a/src/Context/Website/ConfigurableUrlToWebsiteMap.php b/src/Context/Website/ConfigurableUrlToWebsiteMap.php
index <HASH>..<HASH> 100644
--- a/src/Context/Website/ConfigurableUrlToWebsiteMap.php
+++ b/src/Context/Website/ConfigurableUrlToWebsiteMap.php
@@ -67,7 +67,7 @@ class ConfigurableUrlToWebsiteMap implements UrlToWebsiteMap
*/
private static function splitConfigRecord(string $mapping): array
{
- if (!preg_match('/^([^=]+)=(.+)/', $mapping, $matches)) {
+ if (! preg_match('/^([^=]+)=(.+)/', $mapping, $matches)) {
$message = sprintf('Unable to parse the website to code mapping record "%s"', $mapping);
throw new InvalidWebsiteMapConfigRecordException($message);
}
diff --git a/src/ProductRelations/ContentDelivery/ProductRelationsApiV1GetRequestHandler.php b/src/ProductRelations/ContentDelivery/ProductRelationsApiV1GetRequestHandler.php
index <HASH>..<HASH> 100644
--- a/src/ProductRelations/ContentDelivery/ProductRelationsApiV1GetRequestHandler.php
+++ b/src/ProductRelations/ContentDelivery/ProductRelationsApiV1GetRequestHandler.php
@@ -53,7 +53,7 @@ class ProductRelationsApiV1GetRequestHandler extends ApiRequestHandler
final protected function getResponse(HttpRequest $request): HttpResponse
{
- if (!$this->canProcess($request)) {
+ if (! $this->canProcess($request)) {
throw $this->getUnableToProcessRequestException($request);
} | Issue #<I>: Add one space after unary not operator | lizards-and-pumpkins_catalog | train |
bf862896f938b9ec646dfc27ad8561796240e12f | diff --git a/doc/conf.py b/doc/conf.py
index <HASH>..<HASH> 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -12,8 +12,8 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
-import jpype.imports
import jpype
+import jpype.imports
from unittest import mock
import sys
import os | Move imports down (how did this ever work???) | jpype-project_jpype | train |
5122bbb3c20cbc77b9a324c88852a9fcdcdbb333 | diff --git a/actionpack/test/dispatch/static_test.rb b/actionpack/test/dispatch/static_test.rb
index <HASH>..<HASH> 100644
--- a/actionpack/test/dispatch/static_test.rb
+++ b/actionpack/test/dispatch/static_test.rb
@@ -37,6 +37,10 @@ module StaticTests
end
def test_served_static_file_with_non_english_filename
+ if RUBY_ENGINE == 'jruby '
+ skip "Stop skiping if following bug gets fixed: " \
+ "http://jira.codehaus.org/browse/JRUBY-7192"
+ end
assert_html "means hello in Japanese\n", get("/foo/#{Rack::Utils.escape("こんにちは.html")}")
end | Skip test which is broken on jruby
This test is broken from quite a while & is expected to remain broken as
encoding issues are hardest to fix in JRuby. so lets skip this test for
now | rails_rails | train |
63d8cbb87ce685a2ab29dd0f0b84bb7c5a3fe0fe | diff --git a/Gruntfile.js b/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -3,10 +3,12 @@ var path = require('path'),
semver = require("semver"),
fs = require("fs"),
path = require("path"),
+ _ = require('underscore'),
spawn = require("child_process").spawn,
buildDirectory = path.resolve(process.cwd(), '.build'),
distDirectory = path.resolve(process.cwd(), '.dist'),
- _ = require('underscore'),
+ configLoader = require('./core/config-loader.js'),
+
configureGrunt = function (grunt) {
// load all grunt tasks
@@ -351,6 +353,13 @@ var path = require('path'),
process.env.NODE_ENV = process.env.TRAVIS ? 'travis' : 'testing';
});
+ grunt.registerTask('loadConfig', function () {
+ var done = this.async();
+ configLoader.loadConfig().then(function () {
+ done();
+ });
+ });
+
// Update the package information after changes
grunt.registerTask('updateCurrentPackageInfo', function () {
cfg.pkg = grunt.file.readJSON('package.json');
@@ -681,13 +690,12 @@ var path = require('path'),
"watch"
]);
-
// Prepare the project for development
// TODO: Git submodule init/update (https://github.com/jaubourg/grunt-update-submodules)?
grunt.registerTask("init", ["shell:bourbon", "sass:admin", 'handlebars']);
- // Run unit tests
- grunt.registerTask("test-unit", ['setTestEnv', "mochacli:all"]);
+ // Run unit tests
+ grunt.registerTask("test-unit", ['setTestEnv', 'loadConfig', "mochacli:all"]);
// Run casperjs tests only
grunt.registerTask('test-functional', ['setTestEnv', 'express:test', 'spawn-casperjs']);
diff --git a/core/test/unit/admin_spec.js b/core/test/unit/admin_spec.js
index <HASH>..<HASH> 100644
--- a/core/test/unit/admin_spec.js
+++ b/core/test/unit/admin_spec.js
@@ -38,7 +38,6 @@ describe('Admin Controller', function() {
});
});
-
describe('valid file', function() {
var clock; | Unit tests require config file to be present.
Conflicts:
Gruntfile.js
core/test/unit/admin_spec.js | TryGhost_Ghost | train |
26f5bcd725894926d0d839514cc1b69556f505a6 | diff --git a/discord/channel.py b/discord/channel.py
index <HASH>..<HASH> 100644
--- a/discord/channel.py
+++ b/discord/channel.py
@@ -245,16 +245,22 @@ class Channel(Hashable):
return Permissions.all()
member_role_ids = set(map(lambda r: r.id, member.roles))
+ denies = 0
+ allows = 0
# Apply channel specific role permission overwrites
for overwrite in self._permission_overwrites:
if overwrite.type == 'role' and overwrite.id in member_role_ids:
- base.handle_overwrite(allow=overwrite.allow, deny=overwrite.deny)
+ denies |= overwrite.deny
+ allows |= overwrite.allow
+
+ base.handle_overwrite(allow=allows, deny=denies)
# Apply member specific permission overwrites
for overwrite in self._permission_overwrites:
if overwrite.type == 'member' and overwrite.id == member.id:
base.handle_overwrite(allow=overwrite.allow, deny=overwrite.deny)
+ break
# default channels can always be read
if self.is_default: | Fix bug when permission overwrites could be applied out of order.
The bug was due to the fact that the way overwrites work is by doing
a loop of all the values and then applying deny first and then allow.
That way the overwrite is defined if role A and role B deny a
permission but role C allows it (and said member has A, B, C roles)
then the resolution should allow it rather than deny it regardless of
the order of the data it is received in. | Rapptz_discord.py | train |
1988c957e02311cb4f0f91af529b33b27a1a0a7e | diff --git a/moto/s3/responses.py b/moto/s3/responses.py
index <HASH>..<HASH> 100644
--- a/moto/s3/responses.py
+++ b/moto/s3/responses.py
@@ -372,7 +372,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
elif method == 'PUT':
return self._key_response_put(request, body, bucket_name, query, key_name, headers)
elif method == 'HEAD':
- return self._key_response_head(bucket_name, key_name, headers)
+ return self._key_response_head(bucket_name, query, key_name, headers)
elif method == 'DELETE':
return self._key_response_delete(bucket_name, query, key_name, headers)
elif method == 'POST':
@@ -468,8 +468,9 @@ class ResponseObject(_TemplateEnvironmentMixin):
headers.update(new_key.response_dict)
return 200, headers, template.render(key=new_key)
- def _key_response_head(self, bucket_name, key_name, headers):
- key = self.backend.get_key(bucket_name, key_name)
+ def _key_response_head(self, bucket_name, query, key_name, headers):
+ version_id = query.get('versionId', [None])[0]
+ key = self.backend.get_key(bucket_name, key_name, version_id=version_id)
if key:
headers.update(key.metadata)
headers.update(key.response_dict) | Pass query to _key_response_head for versioning support. | spulec_moto | train |
7e63a51cc3bc8a23ddb3a43fc80197bffe5f5f49 | diff --git a/scripts/lint-dependencies.js b/scripts/lint-dependencies.js
index <HASH>..<HASH> 100644
--- a/scripts/lint-dependencies.js
+++ b/scripts/lint-dependencies.js
@@ -89,6 +89,7 @@ const getCodeContent = async (rawPkgPath) => {
`${pkgPath}/**/*.tsx`,
`${pkgPath}/**/*.js`,
`${pkgPath}/**/*.jsx`,
+ `!${pkgPath}/webhint.js`,
`!${pkgPath}/dist/**/*`,
`!${pkgPath}/node_modules/**/*` // needed when we are inside a package like extension-vscode
], { gitignore: false }); | Chore: Exclude worker bundle from dependency linting | webhintio_hint | train |
d5e3fc20994f338358a18b33bd8818c75bf1484b | diff --git a/scapy/sendrecv.py b/scapy/sendrecv.py
index <HASH>..<HASH> 100644
--- a/scapy/sendrecv.py
+++ b/scapy/sendrecv.py
@@ -734,76 +734,48 @@ Examples:
@conf.commands.register
-def bridge_and_sniff(if1, if2, count=0, store=1, prn=None, lfilter=None,
- L2socket=None, timeout=None, stop_filter=None, *args,
- **kargs):
- """Forward traffic between two interfaces and sniff packets exchanged
-bridge_and_sniff([count=0,] [prn=None,] [store=1,] [offline=None,]
-[lfilter=None,] + L2Socket args) -> list of packets
-
- count: number of packets to capture. 0 means infinity
- store: whether to store sniffed packets or discard them
- prn: function to apply to each packet. If something is returned,
- it is displayed. Ex:
- ex: prn = lambda x: x.summary()
-lfilter: python function applied to each packet to determine
- if further action may be done
- ex: lfilter = lambda x: x.haslayer(Padding)
-timeout: stop sniffing after a given time (default: None)
-L2socket: use the provided L2socket
-stop_filter: python function applied to each packet to determine
- if we have to stop the capture after this packet
- ex: stop_filter = lambda x: x.haslayer(TCP)
+def bridge_and_sniff(if1, if2, prn=None, L2socket=None, *args, **kargs):
+ """Forward traffic between interfaces if1 and if2, sniff and return the
+exchanged packets.
+
+Arguments:
+
+ if1, if2: the interfaces to use
+
+ The other arguments are the same than for the function sniff(),
+ except for opened_socket, offline and iface that are ignored.
+ See help(sniff) for more.
+
"""
- c = 0
+ for arg in ['opened_socket', 'offline', 'iface']:
+ if arg in kargs:
+ log_runtime.warning("Argument %s cannot be used in "
+ "bridge_and_sniff() -- ignoring it.", arg)
+ del kargs[arg]
if L2socket is None:
L2socket = conf.L2socket
s1 = L2socket(iface=if1)
s2 = L2socket(iface=if2)
- peerof={s1:s2,s2:s1}
- label={s1:if1, s2:if2}
-
- lst = []
- if timeout is not None:
- stoptime = time.time()+timeout
- remain = None
- try:
- stop_event = False
- while not stop_event:
- if timeout is not None:
- remain = stoptime-time.time()
- if remain <= 0:
- break
- if conf.use_bpf:
- from scapy.arch.bpf.supersocket import bpf_select
- ins = bpf_select([s1, s2], remain)
- else:
- ins, _, _ = select([s1, s2], [], [], remain)
+ peers = {if1: s2, if2: s1}
+ def prn_send(pkt):
+ try:
+ sendsock = peers[pkt.sniffed_on]
+ except KeyError:
+ return
+ try:
+ sendsock.send(pkt.original)
+ except:
+ log_runtime.warning('Cannot forward packet [%s] received from %s',
+ pkt.summary(), pkt.sniffed_on, exc_info=True)
+ if prn is None:
+ prn = prn_send
+ else:
+ prn_orig = prn
+ def prn(pkt):
+ prn_send(pkt)
+ return prn_orig(pkt)
- for s in ins:
- p = s.recv()
- if p is not None:
- peerof[s].send(p.original)
- if lfilter and not lfilter(p):
- continue
- if store:
- p.sniffed_on = label[s]
- lst.append(p)
- c += 1
- if prn:
- r = prn(p)
- if r is not None:
- print(r)
- if stop_filter and stop_filter(p):
- stop_event = True
- break
- if 0 < count <= c:
- stop_event = True
- break
- except KeyboardInterrupt:
- pass
- finally:
- return plist.PacketList(lst,"Sniffed")
+ return sniff(opened_socket={s1: if1, s2: if2}, prn=prn, *args, **kargs)
@conf.commands.register | Implement bridge_and_sniff() using sniff()
This removes duplicated code and should make bridge_and_sniff()
function work under Windows. | secdev_scapy | train |
52b2af49281260f4940a0022446d83d828208e7f | diff --git a/lib/commands/quick.js b/lib/commands/quick.js
index <HASH>..<HASH> 100644
--- a/lib/commands/quick.js
+++ b/lib/commands/quick.js
@@ -61,7 +61,7 @@ function quick(url, options) {
});
} else if (options.count) {
script.config.phases.push({
- duration: options.duration * 1000 || Math.ceil(options.count / 50),
+ duration: options.duration || Math.ceil(options.count / 50),
arrivalCount: options.count || 1
});
} else { | Set correct duration for "quick" when used with the "-c" option | artilleryio_artillery | train |
043f62882073fe0c687987a53b192e4ddb22f0ad | diff --git a/demcoreg/compute_diff.py b/demcoreg/compute_diff.py
index <HASH>..<HASH> 100755
--- a/demcoreg/compute_diff.py
+++ b/demcoreg/compute_diff.py
@@ -94,7 +94,7 @@ def main():
print("Warping rasters to same res/extent/proj")
#This will check input param for validity, could do beforehand
- ds_list = warplib.memwarp_multi_fn(fn_list, extent=args.te, res=args.tr, t_srs=args.t_srs)
+ ds_list = warplib.memwarp_multi_fn(fn_list, extent=args.te, res=args.tr, t_srs=args.t_srs, r='cubic')
r1_ds = ds_list[0]
r2_ds = ds_list[1] | compute_diff: default resampling should be cubic | dshean_demcoreg | train |
d73e711b65a6bf2a07c97f696e276ee8d89fbc74 | diff --git a/config/rollup.config.js b/config/rollup.config.js
index <HASH>..<HASH> 100644
--- a/config/rollup.config.js
+++ b/config/rollup.config.js
@@ -43,7 +43,7 @@ export default {
autoprefixer(),
postcssModules({
globalModulePaths: [/src\/scss/, /src\/components\/(?!Button)/],
- generateScopedName: "TDS_[name]__[local]___[hash:base64:5]",
+ generateScopedName: 'TDS_[name]__[local]___[hash:base64:5]',
getJSON (id, exportTokens) {
cssExportMap[id] = exportTokens;
}
diff --git a/config/styleguide.config.js b/config/styleguide.config.js
index <HASH>..<HASH> 100644
--- a/config/styleguide.config.js
+++ b/config/styleguide.config.js
@@ -187,6 +187,7 @@ module.exports = {
loader: 'css-loader',
options: {
modules: true,
+ localIdentName: 'TDS_[name]__[local]___[hash:base64:5]',
importLoaders: 1, // Number of loaders applied before CSS loader
}
}, | docs(css-modules): Add local identifier name for CSS modules in docs site.
So that it matches the published package names. | telus_tds-core | train |
7b431a2fb37bd6a15e341f4384c64795e577ee1e | diff --git a/abydos/distance.py b/abydos/distance.py
index <HASH>..<HASH> 100644
--- a/abydos/distance.py
+++ b/abydos/distance.py
@@ -1301,7 +1301,7 @@ def sim_strcmp95(src, tar, long_strings=False):
k = n_trans = 0
for i in range(len(ying)):
if ying_flag[i] != 0:
- for j in range(k, len(yang)):
+ for j in range(k, len(yang)): # pragma: no branch
if yang_flag[j] != 0:
k = j + 1
break
@@ -1487,7 +1487,7 @@ def sim_jaro_winkler(src, tar, qval=1, mode='winkler', long_strings=False,
k = n_trans = 0
for i in range(lens):
if src_flag[i] != 0:
- for j in range(k, lent):
+ for j in range(k, lent): # pragma: no branch
if tar_flag[j] != 0:
k = j + 1
break
@@ -1508,8 +1508,7 @@ def sim_jaro_winkler(src, tar, qval=1, mode='winkler', long_strings=False,
i = 0
while (i < j) and (src[i] == tar[i]):
i += 1
- if i:
- weight += i * scaling_factor * (1.0 - weight)
+ weight += i * scaling_factor * (1.0 - weight)
# Optionally adjust for long strings.
diff --git a/tests/test_distance.py b/tests/test_distance.py
index <HASH>..<HASH> 100644
--- a/tests/test_distance.py
+++ b/tests/test_distance.py
@@ -1177,6 +1177,17 @@ class JaroWinklerTestCases(unittest.TestCase):
self.assertAlmostEqual(sim_strcmp95('MARTHA', 'MARHTA', True),
0.97083333)
+ # cover case where we don't boost, etc.
+ self.assertAlmostEqual(sim_strcmp95('A', 'ABCDEFGHIJK'), 69/99)
+ self.assertAlmostEqual(sim_strcmp95('A', 'ABCDEFGHIJK', True), 69 / 99)
+ self.assertAlmostEqual(sim_strcmp95('d', 'abcdefgh'), 0.708333333)
+ self.assertAlmostEqual(sim_strcmp95('d', 'abcdefgh', True),
+ 0.708333333)
+ self.assertAlmostEqual(sim_strcmp95('1', 'abc1efgh', True),
+ 0.708333333)
+ self.assertAlmostEqual(sim_strcmp95('12hundredths', '12hundred', True),
+ 0.916666667)
+
def test_dist_strcmp95(self):
"""Test abydos.distance.dist_strcmp95."""
self.assertEqual(dist_strcmp95('', ''), 0) | completed branch coverage for strcmp<I> & jaro_winkler | chrislit_abydos | train |
90d19b4131559955ce0604affd49fc0d6ba9e400 | diff --git a/lib/Doctrine/ORM/Version.php b/lib/Doctrine/ORM/Version.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/ORM/Version.php
+++ b/lib/Doctrine/ORM/Version.php
@@ -35,7 +35,7 @@ class Version
/**
* Current Doctrine Version
*/
- const VERSION = '2.6.0';
+ const VERSION = '2.6.1-DEV';
/**
* Compares a Doctrine version with the current one. | Bumping development version to <I>-DEV | doctrine_orm | train |
f4b24934e1813213485efd1b2953eeeae89c741d | diff --git a/certsuite/cert.py b/certsuite/cert.py
index <HASH>..<HASH> 100755
--- a/certsuite/cert.py
+++ b/certsuite/cert.py
@@ -381,9 +381,8 @@ def _run(args, logger):
# wait here to make sure marionette is running
logger.debug('Attempting to set up port forwarding for marionette')
- if dm.forward("tcp:2828", "tcp:2828") != 0:
- raise Exception("Can't use localhost:2828 for port forwarding." \
- "Is something else using port 2828?")
+ dm.forward("tcp:2828", "tcp:2828")
+
retries = 0
while retries < 5:
try: | Don't check success of dm.forward | mozilla-b2g_fxos-certsuite | train |
4098abc7ecde9db2bfd44eb333d6067c0989b561 | diff --git a/.travis.yml b/.travis.yml
index <HASH>..<HASH> 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -7,14 +7,13 @@
language: python
env:
- - CONDA="python=2.6 numpy=1.6 scipy=0.11"
- - CONDA="python=2.7 numpy scipy"
+ - CONDA="python=2.7 numpy=1.7.1 scipy=0.12"
- CONDA="python=3.3 numpy scipy"
- CONDA="python=3.4 numpy scipy"
before_install:
- sudo apt-get update -qq;
- - if [[ $CONDA == python=2.6* ]]; then
+ - if [[ $CONDA == python=3.3* ]]; then
sudo apt-get install -qq octave;
else
sudo apt-add-repository -y ppa:picaso/octave;
diff --git a/oct2py/tests/test_misc.py b/oct2py/tests/test_misc.py
index <HASH>..<HASH> 100644
--- a/oct2py/tests/test_misc.py
+++ b/oct2py/tests/test_misc.py
@@ -100,11 +100,17 @@ class MiscTests(test.TestCase):
def test_threads(self):
from oct2py import thread_check
- thread_check()
+ try:
+ thread_check()
+ except TypeError:
+ thread_check.thread_check()
def test_speed_check(self):
from oct2py import speed_check
- speed_check()
+ try:
+ speed_check()
+ except TypeError:
+ speed_check.speed_check()
def test_plot(self):
n = self.oc.figure()
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,8 +7,8 @@ MAINTAINER = 'Steven Silvester'
MAINTAINER_EMAIL = '[email protected]'
URL = 'http://github.com/blink1073/oct2py'
LICENSE = 'MIT'
-REQUIRES = ["numpy (>= 1.6.2)", "scipy (>= 0.11.0)"]
-PACKAGES = [DISTNAME, '%s.tests' % DISTNAME, '%s/ipython' % DISTNAME,
+REQUIRES = ["numpy >= 1.7.1", "scipy >= 0.12"]
+PACKAGES = [DISTNAME, '%s.tests' % DISTNAME, '%s/ipython' % DISTNAME,
'%s/ipython/tests' % DISTNAME]
PACKAGE_DATA = {DISTNAME: ['tests/*.m']}
CLASSIFIERS = """\
@@ -18,7 +18,6 @@ Intended Audience :: Science/Research
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
-Programming Language :: Python :: 2.6
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
@@ -53,5 +52,5 @@ setup(
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
classifiers=filter(None, CLASSIFIERS.split('\n')),
- requires=REQUIRES
+ install_requires=REQUIRES
) | Update dependencies and and make thread and speed checks more robust. | blink1073_oct2py | train |
1706b89edaf5cfa101bc2997e244829c1a0571f0 | diff --git a/NavigationReactNative/sample/web/Motion.js b/NavigationReactNative/sample/web/Motion.js
index <HASH>..<HASH> 100644
--- a/NavigationReactNative/sample/web/Motion.js
+++ b/NavigationReactNative/sample/web/Motion.js
@@ -29,7 +29,7 @@ class Motion extends React.Component {
var end = !dataByKey[item.key] ? leave(item.data) : update(dataByKey[item.key]);
var equal = this.areEqual(item.end, end);
var rest = equal ? item.progress === 1 : false;
- var progress = equal ? Math.max(Math.min(item.progress + ((tick - item.tick) / 500), 1), 0) : 0;
+ var progress = equal ? Math.min(item.progress + ((tick - item.tick) / 500), 1) : 0;
var interpolators = equal ? item.interpolators : this.getInterpolators(item.style, end);
var style = this.interpolateStyle(interpolators, end, progress);
if (rest && !item.rest) { | Removed max because progress can't be < 0 | grahammendick_navigation | train |
3168da0af75bd05082be4909deaa1e3eb1e78e27 | diff --git a/activesupport/lib/active_support/number_helper/rounding_helper.rb b/activesupport/lib/active_support/number_helper/rounding_helper.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/number_helper/rounding_helper.rb
+++ b/activesupport/lib/active_support/number_helper/rounding_helper.rb
@@ -40,11 +40,11 @@ module ActiveSupport
def convert_to_decimal(number)
case number
when Float, String
- number = BigDecimal(number.to_s)
+ BigDecimal(number.to_s)
when Rational
- number = BigDecimal(number, digit_count(number.to_i) + precision)
+ BigDecimal(number, digit_count(number.to_i) + precision)
else
- number = number.to_d
+ number.to_d
end
end | Don't create extra assignment, just return | rails_rails | train |
a3a22cb8b30b4bd5ecd11d325a62fa13b62e38dd | diff --git a/lib/cloudapp/drop.rb b/lib/cloudapp/drop.rb
index <HASH>..<HASH> 100644
--- a/lib/cloudapp/drop.rb
+++ b/lib/cloudapp/drop.rb
@@ -177,6 +177,13 @@ module CloudApp
self.class.recover self.href
end
+ # Is the drop an image?
+ #
+ # @return [Boolean]
+ def image?
+ item_type == 'image'
+ end
+
# Is the drop a bookmark?
#
# @return [Boolean]
@@ -184,13 +191,54 @@ module CloudApp
item_type == 'bookmark'
end
- # Is the drop an image?
+ # Is the drop a text file?
#
# @return [Boolean]
- def image?
- item_type == 'image'
+ def text?
+ item_type == 'text'
+ end
+
+ # Is the drop an archive?
+ #
+ # @return [Boolean]
+ def archive?
+ item_type == 'archive'
end
-
+
+ # Is the drop an audio file?
+ #
+ # @return [Boolean]
+ def audio?
+ item_type == 'audio'
+ end
+
+ # Is the drop a video file?
+ #
+ # @return [Boolean]
+ def video?
+ item_type == 'video'
+ end
+
+ # Is the drop an unknown file type?
+ #
+ # @return [Boolean]
+ def unknown?
+ item_type == 'unknown'
+ end
+
+ # Return the drops raw data as a string, useful for returning the text of documents
+ #
+ # @return [String]
+ def raw
+ @raw ||= HTTParty.get(content_url).to_s
+ end
+
+ private
+
+ def extension
+ File.extname(content_url)[1..-1].to_s.downcase if content_url
+ end
+
end
# The Item class is now deprecated in favour of the Drop class. | Added drop item type boolean matchers. | aaronrussell_cloudapp_api | train |
f9b0a35932ed80bd98f4121fd47b1573271f6646 | diff --git a/lenstronomy/LensModel/Solver/solver4point.py b/lenstronomy/LensModel/Solver/solver4point.py
index <HASH>..<HASH> 100644
--- a/lenstronomy/LensModel/Solver/solver4point.py
+++ b/lenstronomy/LensModel/Solver/solver4point.py
@@ -189,8 +189,8 @@ class Solver4Point(object):
kwargs_fixed = kwargs_fixed_lens_list[0]
kwargs_lens = kwargs_lens_init[0]
if self._solver_type == 'PROFILE_SHEAR':
- #pass
- kwargs_fixed_lens_list[1]['psi_ext'] = kwargs_lens_init[1]['psi_ext']
+ pass
+ #kwargs_fixed_lens_list[1]['psi_ext'] = kwargs_lens_init[1]['psi_ext']
if lens_model in ['SPEP', 'SPEMD', 'SIE', 'NIE']:
kwargs_fixed['theta_E'] = kwargs_lens['theta_E']
kwargs_fixed['e1'] = kwargs_lens['e1']
diff --git a/lenstronomy/Sampling/likelihood.py b/lenstronomy/Sampling/likelihood.py
index <HASH>..<HASH> 100644
--- a/lenstronomy/Sampling/likelihood.py
+++ b/lenstronomy/Sampling/likelihood.py
@@ -226,4 +226,4 @@ class LikelihoodModule(object):
return logL
def setup(self):
- pass
\ No newline at end of file
+ pass
diff --git a/lenstronomy/Workflow/fitting_sequence.py b/lenstronomy/Workflow/fitting_sequence.py
index <HASH>..<HASH> 100644
--- a/lenstronomy/Workflow/fitting_sequence.py
+++ b/lenstronomy/Workflow/fitting_sequence.py
@@ -291,7 +291,7 @@ class FittingSequence(object):
else:
raise ValueError('Sampler type %s not supported.' % sampler_type)
# update current best fit values
- self._update_state(means)
+ self._update_state(samples[-1])
output = [sampler_type, samples, sampler.param_names, logL,
logZ, logZ_err, results_object] | changed psi_ext to be a non-linear parameter in the 4-point PROFILE_SHEAR solver | sibirrer_lenstronomy | train |
b848b80fdea349654a2806d526043c4463e70800 | diff --git a/src/AssetManager.php b/src/AssetManager.php
index <HASH>..<HASH> 100644
--- a/src/AssetManager.php
+++ b/src/AssetManager.php
@@ -359,10 +359,11 @@ class AssetManager
Hook::execute('Wedeto.HTML.AssetManager.replaceTokens.preRender', $values);
// Do rendering
- if (!empty($values['js_files']) && empty($values['js_document']))
+ $js = $values->getArray('js_files');
+ if (!empty($js) && !$values->has('js_document'))
{
$js_doc = new DOMDocument;
- foreach ($values['js_files'] as $script)
+ foreach ($js as $script)
{
$element = $js_doc->createElement('script');
$element->setAttribute('src', $script['url']);
@@ -371,18 +372,22 @@ class AssetManager
$values['js_document'] = $js_doc;
}
- if (!empty($values['js_inline_variables']) && empty($values['js_inline_document']))
+ $jsv = $values->getArray('js_inline_variables');
+ if (!empty($jsv) && !$values->has('js_inline_document'))
{
+ var_dump($jsv);
+ die();
$js_inline_doc = new DOMDocument;
$code_lines = ['window.wdt = {};'];
- foreach ($values['js_inline_variables'] as $name => $value)
+ foreach ($jsv as $name => $value)
$code_lines[] = "window.wdt.$name = " . json_encode($value) . ";";
$variable_el = $js_inline_doc->createElement('script', implode("\n", $code_lines));
$js_inline_doc->appendChild($variable_el);
$values['js_inline_document'] = $js_inline_doc;
}
- if (!empty($values['css_files']) && empty($values['css_document']))
+ $CSS = $values->getArray('css_files');
+ if (!empty($CSS) && !$values->has('css_document'))
{
$CSS_doc = new DOMDocument;
foreach ($CSS as $stylesheet)
@@ -412,7 +417,7 @@ class AssetManager
if (class_exists("Tidy", false))
{
$tidy = new \Tidy();
- $config = array('indent' => true, 'wrap' => 120, 'markup' => true, 'doctype' => 'omit');
+ $config = array('indent' => true, 'indent-spaces' => 4, 'indent-attributes' => true, 'wrap' => 120, 'markup' => true, 'doctype' => 'omit');
$HTML = "<!DOCTYPE html>\n" . $tidy->repairString($HTML, $config, "utf8");
}
else | Empty doesnt work on dictionary subscrcipts | Wedeto_HTML | train |
25057ea345a65a1a215f37f67081da833a07a1b7 | diff --git a/tests/test_blackbox.py b/tests/test_blackbox.py
index <HASH>..<HASH> 100644
--- a/tests/test_blackbox.py
+++ b/tests/test_blackbox.py
@@ -1,3 +1,4 @@
+import datetime
import pytest
from blackbox import config
@@ -55,7 +56,23 @@ def test_backup_push_fetch(tmpdir, small_push_dir, monkeypatch, config,
config.main('backup-push', unicode(small_push_dir))
fetch_dir = tmpdir.join('fetch-to').ensure(dir=True)
- config.main('backup-fetch', unicode(fetch_dir), 'LATEST')
+
+ # Spin around backup-fetch LATEST for a while to paper over race
+ # conditions whereby a backup may not be visible to backup-fetch
+ # immediately.
+ from boto import exception
+ start = datetime.datetime.now()
+ deadline = start + datetime.timedelta(seconds=15)
+ while True:
+ try:
+ config.main('backup-fetch', unicode(fetch_dir), 'LATEST')
+ except exception.S3ResponseError:
+ if datetime.datetime.now() > deadline:
+ raise
+ else:
+ continue
+ else:
+ break
assert fetch_dir.join('arbitrary-file').read() == \
small_push_dir.join('arbitrary-file').read() | Make test_backup_push_fetch more forgiving to S3 race conditions
It's possible for a read-after-write not to see the new upload
immediately, causing spurious failure.
Spin for a *short* while attempting to make progress rather than
annoying the person running the test. | wal-e_wal-e | train |
7d0e78638d36a69e0228fe92e036bcc6503b5342 | diff --git a/conllu/__init__.py b/conllu/__init__.py
index <HASH>..<HASH> 100644
--- a/conllu/__init__.py
+++ b/conllu/__init__.py
@@ -1,11 +1,16 @@
import typing as T
from io import StringIO
-from conllu.models import SentenceGenerator, SentenceList, TokenTree
+from conllu.models import Metadata, SentenceGenerator, SentenceList, Token, TokenList, TokenTree
from conllu.parser import (
_FieldParserType, _MetadataParserType, parse_conllu_plus_fields, parse_sentences, parse_token_and_metadata,
)
+__all__ = [
+ "parse", "parse_incr", "parse_tree", "parse_tree_incr",
+ "SentenceGenerator", "SentenceList", "TokenList", "TokenTree", "Token", "Metadata",
+ "parse_conllu_plus_fields", "parse_sentences", "parse_token_and_metadata",
+]
def parse(data: str, fields: T.Optional[T.Sequence[str]] = None,
field_parsers: T.Dict[str, _FieldParserType] = None, | Hotfix for TokenList not importable from conllu. | EmilStenstrom_conllu | train |
fdf848ffa430c87d3f5fb1ee9e59380c5d75905d | diff --git a/go/vt/vtctl/grpcvtctldclient/client_test.go b/go/vt/vtctl/grpcvtctldclient/client_test.go
index <HASH>..<HASH> 100644
--- a/go/vt/vtctl/grpcvtctldclient/client_test.go
+++ b/go/vt/vtctl/grpcvtctldclient/client_test.go
@@ -23,20 +23,24 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
+ "vitess.io/vitess/go/test/utils"
+ "vitess.io/vitess/go/vt/topo"
"vitess.io/vitess/go/vt/topo/memorytopo"
"vitess.io/vitess/go/vt/vtctl/grpcvtctldserver"
"vitess.io/vitess/go/vt/vtctl/grpcvtctldserver/testutil"
"vitess.io/vitess/go/vt/vtctl/vtctldclient"
topodatapb "vitess.io/vitess/go/vt/proto/topodata"
- "vitess.io/vitess/go/vt/proto/vtctldata"
vtctldatapb "vitess.io/vitess/go/vt/proto/vtctldata"
+ vtctlservicepb "vitess.io/vitess/go/vt/proto/vtctlservice"
)
func TestFindAllShardsInKeyspace(t *testing.T) {
ctx := context.Background()
ts := memorytopo.NewServer("cell1")
- vtctld := grpcvtctldserver.NewVtctldServer(ts)
+ vtctld := testutil.NewVtctldServerWithTabletManagerClient(t, ts, nil, func(ts *topo.Server) vtctlservicepb.VtctldServer {
+ return grpcvtctldserver.NewVtctldServer(ts)
+ })
testutil.WithTestServer(t, vtctld, func(t *testing.T, client vtctldclient.VtctldClient) {
ks := &vtctldatapb.Keyspace{
@@ -67,7 +71,7 @@ func TestFindAllShardsInKeyspace(t *testing.T) {
},
}
- assert.Equal(t, expected, resp.Shards)
+ utils.MustMatch(t, expected, resp.Shards)
client.Close()
_, err = client.FindAllShardsInKeyspace(ctx, &vtctldatapb.FindAllShardsInKeyspaceRequest{Keyspace: ks.Name})
@@ -79,11 +83,13 @@ func TestGetKeyspace(t *testing.T) {
ctx := context.Background()
ts := memorytopo.NewServer("cell1")
- vtctld := grpcvtctldserver.NewVtctldServer(ts)
+ vtctld := testutil.NewVtctldServerWithTabletManagerClient(t, ts, nil, func(ts *topo.Server) vtctlservicepb.VtctldServer {
+ return grpcvtctldserver.NewVtctldServer(ts)
+ })
testutil.WithTestServer(t, vtctld, func(t *testing.T, client vtctldclient.VtctldClient) {
expected := &vtctldatapb.GetKeyspaceResponse{
- Keyspace: &vtctldata.Keyspace{
+ Keyspace: &vtctldatapb.Keyspace{
Name: "testkeyspace",
Keyspace: &topodatapb.Keyspace{
ShardingColumnName: "col1",
@@ -94,7 +100,7 @@ func TestGetKeyspace(t *testing.T) {
resp, err := client.GetKeyspace(ctx, &vtctldatapb.GetKeyspaceRequest{Keyspace: expected.Keyspace.Name})
assert.NoError(t, err)
- assert.Equal(t, expected, resp)
+ utils.MustMatch(t, expected, resp)
client.Close()
_, err = client.GetKeyspace(ctx, &vtctldatapb.GetKeyspaceRequest{})
@@ -106,7 +112,9 @@ func TestGetKeyspaces(t *testing.T) {
ctx := context.Background()
ts := memorytopo.NewServer("cell1")
- vtctld := grpcvtctldserver.NewVtctldServer(ts)
+ vtctld := testutil.NewVtctldServerWithTabletManagerClient(t, ts, nil, func(ts *topo.Server) vtctlservicepb.VtctldServer {
+ return grpcvtctldserver.NewVtctldServer(ts)
+ })
testutil.WithTestServer(t, vtctld, func(t *testing.T, client vtctldclient.VtctldClient) {
resp, err := client.GetKeyspaces(ctx, &vtctldatapb.GetKeyspacesRequest{})
@@ -121,7 +129,7 @@ func TestGetKeyspaces(t *testing.T) {
resp, err = client.GetKeyspaces(ctx, &vtctldatapb.GetKeyspacesRequest{})
assert.NoError(t, err)
- assert.Equal(t, []*vtctldatapb.Keyspace{expected}, resp.Keyspaces)
+ utils.MustMatch(t, []*vtctldatapb.Keyspace{expected}, resp.Keyspaces)
client.Close()
_, err = client.GetKeyspaces(ctx, &vtctldatapb.GetKeyspacesRequest{}) | [grpcvtctldclient] Fix vtctld server initialization in grpcvtctldclient/client_test.go (#<I>)
* Fix vtctld server initialization grpcvtctldclient/client_test.go | vitessio_vitess | train |
4bc6e6fae0147e1b3fd1f0da76486f36764de558 | diff --git a/state/internal_test.go b/state/internal_test.go
index <HASH>..<HASH> 100644
--- a/state/internal_test.go
+++ b/state/internal_test.go
@@ -7,7 +7,6 @@ import (
. "launchpad.net/gocheck"
"launchpad.net/goyaml"
"launchpad.net/gozk/zookeeper"
- "launchpad.net/juju/go/testing"
)
type TopologySuite struct {
@@ -31,7 +30,8 @@ func (s *TopologySuite) SetUpSuite(c *C) {
}
func (s *TopologySuite) TearDownSuite(c *C) {
- testing.ZkRemoveTree(c, s.zkConn, "/")
+ err := zkRemoveTree(s.zkConn, "/")
+ c.Assert(err, IsNil)
s.zkConn.Close()
}
@@ -43,7 +43,8 @@ func (s *TopologySuite) SetUpTest(c *C) {
func (s *TopologySuite) TearDownTest(c *C) {
// Clear out the topology node.
- testing.ZkRemoveTree(c, s.zkConn, "/topology")
+ err := zkRemoveTree(s.zkConn, "/topology")
+ c.Assert(err, IsNil)
}
func (s *TopologySuite) TestAddMachine(c *C) {
@@ -472,13 +473,15 @@ func (s *ConfigNodeSuite) SetUpSuite(c *C) {
}
func (s *ConfigNodeSuite) TearDownSuite(c *C) {
- testing.ZkRemoveTree(c, s.zkConn, "/")
+ err := zkRemoveTree(s.zkConn, "/")
+ c.Assert(err, IsNil)
s.zkConn.Close()
}
func (s *ConfigNodeSuite) TearDownTest(c *C) {
// Delete the config node path.
- zkRemoveTree(s.zkConn, s.path)
+ err := zkRemoveTree(s.zkConn, s.path)
+ c.Assert(err, IsNil)
}
func (s *ConfigNodeSuite) TestCreateEmptyConfigNode(c *C) {
diff --git a/state/util.go b/state/util.go
index <HASH>..<HASH> 100644
--- a/state/util.go
+++ b/state/util.go
@@ -10,6 +10,7 @@ import (
"launchpad.net/goyaml"
"launchpad.net/gozk/zookeeper"
"sort"
+ pathpkg "path"
)
var (
@@ -215,20 +216,37 @@ func (c *ConfigNode) Delete(key string) {
delete(c.cache, key)
}
-// zkRemoveTree recursively removes a tree.
+// zkRemoveTree recursively removes a zookeeper node and all its
+// children. It does not delete "/zookeeper" or the root node itself
+// and it does not consider deleting a nonexistent node to be an error.
func zkRemoveTree(zk *zookeeper.Conn, path string) error {
+ // If we try to delete the zookeeper node (for example when
+ // calling ZkRemoveTree(zk, "/")) we silently ignore it.
+ if path == "/zookeeper" {
+ return nil
+ }
// First recursively delete the children.
children, _, err := zk.Children(path)
if err != nil {
+ if zookeeper.IsError(err, zookeeper.ZNONODE) {
+ return nil
+ }
return err
}
for _, child := range children {
- if err = zkRemoveTree(zk, fmt.Sprintf("%s/%s", path, child)); err != nil {
+ if err := zkRemoveTree(zk, pathpkg.Join(path, child)); err != nil {
return err
}
}
- // Now delete the path itself.
- return zk.Delete(path, -1)
+ // Now delete the path itself unless it's the root node.
+ if path == "/" {
+ return nil
+ }
+ err = zk.Delete(path, -1)
+ if err != nil && !zookeeper.IsError(err, zookeeper.ZNONODE) {
+ return err
+ }
+ return nil
}
// copyCache copies the keys and values of one cache into a new one. | state: remove cyclic dependency internal_test->testing->state | juju_juju | train |
7057c3efa923c42c1cdd2e49ea6208c402ba2aee | diff --git a/user/index.php b/user/index.php
index <HASH>..<HASH> 100644
--- a/user/index.php
+++ b/user/index.php
@@ -506,6 +506,11 @@ if (count($rolenames) > 1) {
echo '</div>';
}
+$editlink = '';
+if ($course->id != SITEID && has_capability('moodle/course:enrolreview', $context)) {
+ $editlink = new moodle_url('/enrol/users.php', array('id' => $course->id));
+}
+
if ($roleid > 0) {
$a = new stdClass();
$a->number = $totalcount;
@@ -524,24 +529,22 @@ if ($roleid > 0) {
$heading .= ": $a->number";
- if (user_can_assign($context, $roleid)) {
- $headingurl = new moodle_url($CFG->wwwroot . '/' . $CFG->admin . '/roles/assign.php',
- array('roleid' => $roleid, 'contextid' => $context->id));
- $heading .= $OUTPUT->action_icon($headingurl, new pix_icon('t/edit', get_string('edit')));
+ if (!empty($editlink)) {
+ $editlink->param('role', $roleid);
+ $heading .= $OUTPUT->action_icon($editlink, new pix_icon('t/edit', get_string('edit')));
}
echo $OUTPUT->heading($heading, 3);
} else {
- if ($course->id != SITEID && has_capability('moodle/course:enrolreview', $context)) {
- $editlink = $OUTPUT->action_icon(new moodle_url('/enrol/users.php', array('id' => $course->id)),
- new pix_icon('t/edit', get_string('edit')));
- } else {
- $editlink = '';
- }
if ($course->id == SITEID and $roleid < 0) {
$strallparticipants = get_string('allsiteusers', 'role');
} else {
$strallparticipants = get_string('allparticipants');
}
+
+ if (!empty($editlink)) {
+ $editlink = $OUTPUT->action_icon($editlink, new pix_icon('t/edit', get_string('edit')));
+ }
+
if ($matchcount < $totalcount) {
echo $OUTPUT->heading($strallparticipants.get_string('labelsep', 'langconfig').$matchcount.'/'.$totalcount . $editlink, 3);
} else { | MDL-<I> participants: Use consistent link for editing enrollments
Previously the role assignment url was used when a role was selected, but
this page doesn't enrol users into the course (its the old <I> way
before we had the enrollments table). | moodle_moodle | train |
c45afc3168320f75c31fc43c70a5e7e4e4baf92c | diff --git a/render/Paginator.php b/render/Paginator.php
index <HASH>..<HASH> 100644
--- a/render/Paginator.php
+++ b/render/Paginator.php
@@ -26,7 +26,7 @@ class Paginator
private $args = [];
private $params = [];
- public function init($url, array $args, Resultset $model, $params = null)
+ public function __construct($url, array $args, Resultset $model, $params = null)
{
$this->url = $url;
$this->args = $args; | Squashed commit of the following:
commit 8efdc<I>a5f<I>b<I>a3da<I>a<I>b<I>e<I> | monolyth-php_frontal | train |
bfe3b917269ba7ad120b73cc11d67749bb750940 | diff --git a/aioauth_client.py b/aioauth_client.py
index <HASH>..<HASH> 100644
--- a/aioauth_client.py
+++ b/aioauth_client.py
@@ -505,7 +505,7 @@ class LichessClient(OAuth2Client):
if self.access_token:
headers['Authorization'] = "Bearer {}".format(self.access_token)
- return self._request(method, url, headers=headers, **aio_kwargs),
+ return self._request(method, url, headers=headers, **aio_kwargs)
class Meetup(OAuth1Client): | Fix TypeError: object tuple can't be used in 'await' expression | klen_aioauth-client | train |
912a2ba760c435baede88d45b8313ac2169c0787 | diff --git a/src/View/Widget/Radio.php b/src/View/Widget/Radio.php
index <HASH>..<HASH> 100644
--- a/src/View/Widget/Radio.php
+++ b/src/View/Widget/Radio.php
@@ -141,7 +141,7 @@ class Radio implements WidgetInterface {
$radio['name'] = $data['name'];
if (empty($radio['id'])) {
- $radio['id'] = mb_strtolower(Inflector::slug($radio['name'] . '_' . $radio['value'], '-'));
+ $radio['id'] = $this->_id($radio);
}
if (isset($data['val']) && strval($data['val']) === strval($radio['value'])) {
@@ -197,4 +197,15 @@ class Radio implements WidgetInterface {
return $this->_label->render($labelAttrs);
}
+/**
+ * Generate an ID attribute for a radio button.
+ *
+ * Ensures that id's for a given set of fields are unique.
+ *
+ * @param array $radio The radio properties.
+ * @return string Generated id.
+ */
+ protected function _id($radio) {
+ return mb_strtolower(Inflector::slug($radio['name'] . '_' . $radio['value'], '-'));}
+
} | Extract helper method for generating radio IDs.
FormHelper tests rely on id's being unique within a set of radio
elements. A separate method will help with that. | cakephp_cakephp | train |
8515ad409dd180cd3d3fc9687ee500da049e8bcd | diff --git a/web-bundle/src/main/java/com/graphhopper/gpx/GpxConversions.java b/web-bundle/src/main/java/com/graphhopper/gpx/GpxConversions.java
index <HASH>..<HASH> 100644
--- a/web-bundle/src/main/java/com/graphhopper/gpx/GpxConversions.java
+++ b/web-bundle/src/main/java/com/graphhopper/gpx/GpxConversions.java
@@ -31,16 +31,17 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
+import java.util.regex.Pattern;
public class GpxConversions {
private static final AngleCalc AC = AngleCalc.ANGLE_CALC;
+ private static final Pattern XML_ESCAPE_PATTERN = Pattern.compile("[\\<\\>]");
static String simpleXMLEscape(String str) {
- // We could even use the 'more flexible' CDATA section but for now do the following. The 'and' could be important sometimes:
- return str.replaceAll("&", "&").
- // but do not care for:
- replaceAll("[\\<\\>]", "_");
+ // We could even use the 'more flexible' CDATA section but for now do the following:
+ // The 'and' could be important sometimes but remove others
+ return XML_ESCAPE_PATTERN.matcher(str.replace("&", "&")).replaceAll("_");
}
public static List<GPXEntry> createGPXList(InstructionList instructions) { | Avoid Pattern compilation in GpxConversions (#<I>) | graphhopper_graphhopper | train |
0fbde97d08772d495f6f2bab80b2d79d63153793 | diff --git a/custodian/vasp/handlers.py b/custodian/vasp/handlers.py
index <HASH>..<HASH> 100644
--- a/custodian/vasp/handlers.py
+++ b/custodian/vasp/handlers.py
@@ -357,6 +357,7 @@ class VaspErrorHandler(ErrorHandler):
if "brions" in self.errors:
potim = float(vi["INCAR"].get("POTIM", 0.5)) + 0.1
actions.append({"dict": "INCAR", "action": {"_set": {"POTIM": potim}}})
+ actions.append({"file": "CONTCAR", "action": {"_file_copy": {"dest": "POSCAR"}}})
if "zbrent" in self.errors:
actions.append({"dict": "INCAR", "action": {"_set": {"IBRION": 1}}}) | Copy CONTCAR to POSCAR with brions error
Just like zbrent, brions should have the CONTCAR copied to POSCAR | materialsproject_custodian | train |
74ec0fe85a56a670b5d7cd3fcd942466105a360d | diff --git a/src/Context/UserContext.php b/src/Context/UserContext.php
index <HASH>..<HASH> 100644
--- a/src/Context/UserContext.php
+++ b/src/Context/UserContext.php
@@ -17,7 +17,7 @@ class UserContext extends RawWordpressContext
* | user_login | user_pass | user_email | role |
* | admin | admin | [email protected] | administrator |
*
- * @Given /^there are users:/
+ * @Given /^(?:there are|there is a) users?:/
*
* @param TableNode $new_users
*/ | Tweak "there are users" given statement. | paulgibbs_behat-wordpress-extension | train |
9383b28a065fae953d15e2dbb867ab3285aaaa50 | diff --git a/dvc/stage.py b/dvc/stage.py
index <HASH>..<HASH> 100644
--- a/dvc/stage.py
+++ b/dvc/stage.py
@@ -161,11 +161,17 @@ class Stage(object):
deps = [x.dumpd(self.cwd) for x in self.deps]
outs = [x.dumpd(self.cwd) for x in self.outs]
- return {
- Stage.PARAM_CMD: self.cmd,
- Stage.PARAM_DEPS: deps,
- Stage.PARAM_OUTS: outs,
- }
+ ret = {}
+ if self.cmd:
+ ret[Stage.PARAM_CMD] = self.cmd
+
+ if len(deps):
+ ret[Stage.PARAM_DEPS] = deps
+
+ if len(outs):
+ ret[Stage.PARAM_OUTS] = outs
+
+ return ret
def dump(self, fname=None):
if not fname: | dvc: stage: don't write None and empty lists
Fixes #<I> | iterative_dvc | train |
f19116c901ced53ce5422bcaefe146309e558e96 | diff --git a/vendor/k8s.io/kubernetes/staging/src/k8s.io/apiserver/pkg/endpoints/installer.go b/vendor/k8s.io/kubernetes/staging/src/k8s.io/apiserver/pkg/endpoints/installer.go
index <HASH>..<HASH> 100644
--- a/vendor/k8s.io/kubernetes/staging/src/k8s.io/apiserver/pkg/endpoints/installer.go
+++ b/vendor/k8s.io/kubernetes/staging/src/k8s.io/apiserver/pkg/endpoints/installer.go
@@ -554,9 +554,9 @@ func (a *APIInstaller) registerResourceHandlers(path string, storage rest.Storag
reqScope.MetaGroupVersion = *a.group.MetaGroupVersion
}
for _, action := range actions {
- versionedObject := storageMeta.ProducesObject(action.Verb)
- if versionedObject == nil {
- versionedObject = defaultVersionedObject
+ producedObject := storageMeta.ProducesObject(action.Verb)
+ if producedObject == nil {
+ producedObject = defaultVersionedObject
}
reqScope.Namer = action.Namer
@@ -625,8 +625,8 @@ func (a *APIInstaller) registerResourceHandlers(path string, storage rest.Storag
Param(ws.QueryParameter("pretty", "If 'true', then the output is pretty printed.")).
Operation("read"+namespaced+kind+strings.Title(subresource)+operationSuffix).
Produces(append(storageMeta.ProducesMIMETypes(action.Verb), mediaTypes...)...).
- Returns(http.StatusOK, "OK", versionedObject).
- Writes(versionedObject)
+ Returns(http.StatusOK, "OK", producedObject).
+ Writes(producedObject)
if isGetterWithOptions {
if err := addObjectParams(ws, route, versionedGetOptions); err != nil {
return nil, err
@@ -682,9 +682,9 @@ func (a *APIInstaller) registerResourceHandlers(path string, storage rest.Storag
Param(ws.QueryParameter("pretty", "If 'true', then the output is pretty printed.")).
Operation("replace"+namespaced+kind+strings.Title(subresource)+operationSuffix).
Produces(append(storageMeta.ProducesMIMETypes(action.Verb), mediaTypes...)...).
- Returns(http.StatusOK, "OK", versionedObject).
- Reads(versionedObject).
- Writes(versionedObject)
+ Returns(http.StatusOK, "OK", producedObject).
+ Reads(defaultVersionedObject).
+ Writes(producedObject)
addParams(route, action.Params)
routes = append(routes, route)
case "PATCH": // Partially update a resource
@@ -699,9 +699,9 @@ func (a *APIInstaller) registerResourceHandlers(path string, storage rest.Storag
Consumes(string(types.JSONPatchType), string(types.MergePatchType), string(types.StrategicMergePatchType)).
Operation("patch"+namespaced+kind+strings.Title(subresource)+operationSuffix).
Produces(append(storageMeta.ProducesMIMETypes(action.Verb), mediaTypes...)...).
- Returns(http.StatusOK, "OK", versionedObject).
+ Returns(http.StatusOK, "OK", producedObject).
Reads(metav1.Patch{}).
- Writes(versionedObject)
+ Writes(producedObject)
addParams(route, action.Params)
routes = append(routes, route)
case "POST": // Create a resource.
@@ -722,9 +722,9 @@ func (a *APIInstaller) registerResourceHandlers(path string, storage rest.Storag
Param(ws.QueryParameter("pretty", "If 'true', then the output is pretty printed.")).
Operation("create"+namespaced+kind+strings.Title(subresource)+operationSuffix).
Produces(append(storageMeta.ProducesMIMETypes(action.Verb), mediaTypes...)...).
- Returns(http.StatusOK, "OK", versionedObject).
- Reads(versionedObject).
- Writes(versionedObject)
+ Returns(http.StatusOK, "OK", producedObject).
+ Reads(defaultVersionedObject).
+ Writes(producedObject)
addParams(route, action.Params)
routes = append(routes, route)
case "DELETE": // Delete a resource.
@@ -819,6 +819,10 @@ func (a *APIInstaller) registerResourceHandlers(path string, storage rest.Storag
routes = append(routes, buildProxyRoute(ws, "OPTIONS", a.prefix, action.Path, kind, resource, subresource, namespaced, requestScope, hasSubresource, action.Params, proxyHandler, operationSuffix))
case "CONNECT":
for _, method := range connecter.ConnectMethods() {
+ connectProducedObject := storageMeta.ProducesObject(method)
+ if connectProducedObject == nil {
+ connectProducedObject = "string"
+ }
doc := "connect " + method + " requests to " + kind
if hasSubresource {
doc = "connect " + method + " requests to " + subresource + " of " + kind
@@ -830,7 +834,7 @@ func (a *APIInstaller) registerResourceHandlers(path string, storage rest.Storag
Operation("connect" + strings.Title(strings.ToLower(method)) + namespaced + kind + strings.Title(subresource) + operationSuffix).
Produces("*/*").
Consumes("*/*").
- Writes("string")
+ Writes(connectProducedObject)
if versionedConnectOptions != nil {
if err := addObjectParams(ws, route, versionedConnectOptions); err != nil {
return nil, err | UPSTREAM: <I>: ProducesObject should only update the returned API object resource documentation | openshift_origin | train |
7c37c7f9132028b5af9c02bcccc33bef1c33787a | diff --git a/sqlparse/keywords.py b/sqlparse/keywords.py
index <HASH>..<HASH> 100644
--- a/sqlparse/keywords.py
+++ b/sqlparse/keywords.py
@@ -167,6 +167,7 @@ KEYWORDS = {
'COMMIT': tokens.Keyword.DML,
'COMMITTED': tokens.Keyword,
'COMPLETION': tokens.Keyword,
+ 'CONCURRENTLY': tokens.Keyword,
'CONDITION_NUMBER': tokens.Keyword,
'CONNECT': tokens.Keyword,
'CONNECTION': tokens.Keyword,
diff --git a/tests/test_regressions.py b/tests/test_regressions.py
index <HASH>..<HASH> 100644
--- a/tests/test_regressions.py
+++ b/tests/test_regressions.py
@@ -343,3 +343,15 @@ def test_issue315_utf8_by_default():
if PY2:
tformatted = tformatted.decode('utf-8')
assert formatted == tformatted
+
+
+def test_issue322_concurrently_is_keyword():
+ p = sqlparse.parse('CREATE INDEX CONCURRENTLY myindex ON mytable(col1);')[0]
+
+ assert len(p.tokens) == 12
+ assert p.tokens[0].ttype is T.Keyword.DDL # CREATE
+ assert p.tokens[2].ttype is T.Keyword # INDEX
+ assert p.tokens[4].ttype is T.Keyword # CONCURRENTLY
+ assert p.tokens[4].value == 'CONCURRENTLY'
+ assert isinstance(p.tokens[6], sql.Identifier)
+ assert p.tokens[6].value == 'myindex' | CONCURRENTLY should be handled as a keyword | andialbrecht_sqlparse | train |
2cde32cf2e105f2e0a47efb58eb86c008227f394 | diff --git a/app/helpers/effective_roles_helper.rb b/app/helpers/effective_roles_helper.rb
index <HASH>..<HASH> 100644
--- a/app/helpers/effective_roles_helper.rb
+++ b/app/helpers/effective_roles_helper.rb
@@ -2,7 +2,7 @@ module EffectiveRolesHelper
# For use in formtastic forms
def effective_roles_fields(form, options = {})
if EffectiveRoles.role_descriptions.kind_of?(Hash)
- role_descriptions = EffectiveRoles.role_descriptions[form.object.class]
+ role_descriptions = EffectiveRoles.role_descriptions[form.object.class.name]
end
role_descriptions ||= (EffectiveRoles.role_descriptions || [])
diff --git a/lib/generators/templates/effective_roles.rb b/lib/generators/templates/effective_roles.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/templates/effective_roles.rb
+++ b/lib/generators/templates/effective_roles.rb
@@ -8,16 +8,16 @@ EffectiveRoles.setup do |config|
#
# config.role_descriptions = {
- # User => [
+ # 'User' => [
# "full access to everything. Can manage users and all website content.",
# "full access to website content. Cannot manage users.",
# "cannot access admin area. Can see all content in members-only sections of the website."
# ],
- # Post => [
- # "allow superadmins to see this post",
- # "allow admins to see this post",
- # "allow members to see this post"
- # ]
+ # 'Effective::Page' => [
+ # "allow superadmins to see this page",
+ # "allow admins to see this page",
+ # "allow members to see this page"
+ # ]
# }
# Or just keep it simple, and use the same Array of labels for everything | Use class names instead of classes for role_descriptions | code-and-effect_effective_roles | train |
70a1f05561965b9537db8c12ac22a5a99bc0c7d2 | diff --git a/src/python/dxpy/scripts/dx.py b/src/python/dxpy/scripts/dx.py
index <HASH>..<HASH> 100755
--- a/src/python/dxpy/scripts/dx.py
+++ b/src/python/dxpy/scripts/dx.py
@@ -3144,7 +3144,7 @@ def verify_ssh_config():
except Exception as e:
msg = RED("Warning:") + " Unable to verify configuration of your account for SSH connectivity: {}".format(e) + \
". SSH connection will likely fail. To set up your account for SSH, quit this command and run " + \
- BOLD("dx ssh-config") + ". Continue with the current command?"
+ BOLD("dx ssh_config") + ". Continue with the current command?"
if not prompt_for_yn(fill(msg), default=False):
err_exit()
@@ -3170,30 +3170,31 @@ def ssh(args, ssh_config_verified=False):
sys.stdout.write("Resolving job hostname and SSH host key...")
sys.stdout.flush()
+ host, host_key = None, None
for i in range(90):
- if 'host' in job_desc and 'ssh_host_rsa_key' in job_desc.get('properties', []):
+ host = job_desc.get('host')
+ host_key = job_desc.get('SSHHostKey') or job_desc['properties'].get('ssh_host_rsa_key')
+ if host and host_key:
break
- time.sleep(1)
- job_desc = dxpy.describe(args.job_id)
- sys.stdout.write(".")
- sys.stdout.flush()
- known_hosts_file = os.path.expanduser('~/.dnanexus_config/ssh_known_hosts')
- with open(known_hosts_file, 'a') as fh:
- line = "{job_id}.dnanexus.io {key}".format(job_id=args.job_id, key=job_desc['properties']['ssh_host_rsa_key'])
- if not line.endswith("\n"):
- line += "\n"
- fh.write(line)
+ else:
+ time.sleep(1)
+ job_desc = dxpy.describe(args.job_id)
+ sys.stdout.write(".")
+ sys.stdout.flush()
- if 'host' not in job_desc or 'ssh_host_rsa_key' not in job_desc.get('properties', []):
- msg = "Cannot resolve hostname or key for {}. Please check your permissions and run settings."
+ if not (host and host_key):
+ msg = "Cannot resolve hostname or hostkey for {}. Please check your permissions and run settings."
err_exit(msg.format(args.job_id))
- print("Connecting to", job_desc['host'])
+ known_hosts_file = os.path.expanduser('~/.dnanexus_config/ssh_known_hosts')
+ with open(known_hosts_file, 'a') as fh:
+ fh.write("{job_id}.dnanexus.io {key}\n".format(job_id=args.job_id, key=host_key.rstrip()))
+
+ print("Connecting to", host)
ssh_args = ['ssh', '-i', os.path.expanduser('~/.dnanexus_config/ssh_id'),
'-o', 'HostKeyAlias={}.dnanexus.io'.format(args.job_id),
'-o', 'UserKnownHostsFile={}'.format(known_hosts_file),
- '-l', 'dnanexus',
- job_desc['host']]
+ '-l', 'dnanexus', host]
ssh_args += args.ssh_args
exit_code = subprocess.call(ssh_args)
try:
@@ -3963,9 +3964,9 @@ parser_watch.add_argument('--no-wait', '--no-follow', action='store_false', dest
parser_watch.set_defaults(func=watch)
register_subparser(parser_watch, categories='exec')
-parser_ssh_config = subparsers.add_parser('ssh-config', help='Configure SSH keys for your DNAnexus account',
+parser_ssh_config = subparsers.add_parser('ssh_config', help='Configure SSH keys for your DNAnexus account',
description='Configure SSH access credentials for your DNAnexus account',
- prog='dx ssh-config',
+ prog='dx ssh_config',
parents=[env_args])
parser_ssh_config.add_argument('ssh_keygen_args', help='Command-line arguments to pass to ssh-keygen',
nargs=argparse.REMAINDER)
@@ -3975,7 +3976,7 @@ register_subparser(parser_ssh_config, categories='exec')
parser_ssh = subparsers.add_parser('ssh', help='Connect to a running job via SSH',
description='Use an SSH client to connect to a job being executed on the DNAnexus ' +
'platform. The job must be launched using "dx run --allow-ssh" or ' +
- 'equivalent API options. Use "dx ssh-config" or the Profile page on ' +
+ 'equivalent API options. Use "dx ssh_config" or the Profile page on ' +
'the DNAnexus website to configure SSH for your DNAnexus account.',
prog='dx ssh',
parents=[env_args]) | ssh_config: code review fixes; SSHHostKey field support | dnanexus_dx-toolkit | train |
f61afb267f62413050544e19a50b1824c477dfc8 | diff --git a/converters/toZigbee.js b/converters/toZigbee.js
index <HASH>..<HASH> 100644
--- a/converters/toZigbee.js
+++ b/converters/toZigbee.js
@@ -204,7 +204,11 @@ const converters = {
const state = hasState ? message.state.toLowerCase() : null;
if (hasState && (state === 'off' || !hasBrightness) && !hasTrasition) {
- return converters.on_off.convert('state', state, message, 'set', postfix);
+ const converted = converters.on_off.convert('state', state, message, 'set', postfix);
+ if (state === 'on') {
+ converted.readAfterWriteTime = 0;
+ }
+ return converted;
} else if (!hasState && hasBrightness && Number(brightnessValue) === 0) {
return converters.on_off.convert('state', 'off', message, 'set', postfix);
} else { | Read brightness after turning on. <URL> | Koenkk_zigbee-shepherd-converters | train |
08794848d0d8a3ed7dfc0fb0005901f1e51ee6cb | diff --git a/lib/cucumber/formatter/fail_fast.rb b/lib/cucumber/formatter/fail_fast.rb
index <HASH>..<HASH> 100644
--- a/lib/cucumber/formatter/fail_fast.rb
+++ b/lib/cucumber/formatter/fail_fast.rb
@@ -8,7 +8,7 @@ module Cucumber
def initialize(configuration)
configuration.on_event :after_test_case do |event|
- Cucumber.wants_to_quit = true unless event.result.ok?
+ Cucumber.wants_to_quit = true unless event.result.ok?(configuration.strict?)
end
end
diff --git a/spec/cucumber/formatter/fail_fast_spec.rb b/spec/cucumber/formatter/fail_fast_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/cucumber/formatter/fail_fast_spec.rb
+++ b/spec/cucumber/formatter/fail_fast_spec.rb
@@ -58,5 +58,31 @@ module Cucumber::Formatter
end
end
+ context 'undefined scenario' do
+ before(:each) do
+ @gherkin = gherkin('foo.feature') do
+ feature do
+ scenario do
+ step 'undefined'
+ end
+ end
+ end
+ end
+
+ it 'doesn\'t set Cucumber.wants_to_quit' do
+ execute([@gherkin], report, [StandardStepActions.new])
+ expect(Cucumber.wants_to_quit).to be_falsey
+ end
+
+ context 'in strict mode' do
+ let(:configuration) { Cucumber::Configuration.new strict: true }
+
+ it 'sets Cucumber.wants_to_quit' do
+ execute([@gherkin], report, [StandardStepActions.new])
+ expect(Cucumber.wants_to_quit).to be_truthy
+ end
+ end
+ end
+
end
end | Reinstate code I removed in c<I>bdca<I>b<I>c<I>c<I>a
(but this time with a test) | cucumber_cucumber-ruby | train |
51b2d9910b36c65957d96533e40de079ef4e7cd5 | diff --git a/mod/hotpot/lib.php b/mod/hotpot/lib.php
index <HASH>..<HASH> 100644
--- a/mod/hotpot/lib.php
+++ b/mod/hotpot/lib.php
@@ -49,10 +49,10 @@ $HOTPOT_OUTPUTFORMAT = array (
HOTPOT_OUTPUTFORMAT_BEST => get_string("outputformat_best", "hotpot"),
HOTPOT_OUTPUTFORMAT_V6_PLUS => get_string("outputformat_v6_plus", "hotpot"),
HOTPOT_OUTPUTFORMAT_V6 => get_string("outputformat_v6", "hotpot"),
- HOTPOT_OUTPUTFORMAT_V5_PLUS => get_string("outputformat_v5_plus", "hotpot"),
- HOTPOT_OUTPUTFORMAT_V5 => get_string("outputformat_v5", "hotpot"),
- HOTPOT_OUTPUTFORMAT_V4 => get_string("outputformat_v4", "hotpot"),
- HOTPOT_OUTPUTFORMAT_V3 => get_string("outputformat_v3", "hotpot"),
+ // HOTPOT_OUTPUTFORMAT_V5_PLUS => get_string("outputformat_v5_plus", "hotpot"),
+ // HOTPOT_OUTPUTFORMAT_V5 => get_string("outputformat_v5", "hotpot"),
+ // HOTPOT_OUTPUTFORMAT_V4 => get_string("outputformat_v4", "hotpot"),
+ // HOTPOT_OUTPUTFORMAT_V3 => get_string("outputformat_v3", "hotpot"),
// HOTPOT_OUTPUTFORMAT_FLASH => get_string("outputformat_flash", "hotpot"),
// HOTPOT_OUTPUTFORMAT_MOBILE => get_string("outputformat_mobile", "hotpot"),
);
@@ -2147,16 +2147,6 @@ function hotpot_string_id($str) {
return $id;
}
-function hotpot_flush($n=0, $time=false) {
- if ($time) {
- $t = strftime("%X",time());
- } else {
- $t = "";
- }
- echo str_repeat(" ", $n) . $t . "\n";
- flush();
-}
-
if (!function_exists('html_entity_decode')) {
// add this function for php version<4.3
function html_entity_decode($str) { | switch output_formats v3, v4, v5 and moved "hotpot_flush" function to "db/update_to_v2.php" | moodle_moodle | train |
4235cacba39919cb6b53a245b17104076e66fc34 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,6 @@ setup(
install_requires=[
'twisted==12.0',
'zope.interface',
- 'txzmq',
],
author="Erik Allik", | Removed the txzmq dependency because it's only needed when using the ZeroMQ transport components | eallik_spinoff | train |
8b340157e8bfdc056e81b7e32539765fdf8974d9 | diff --git a/imhotep/http_test.py b/imhotep/http_test.py
index <HASH>..<HASH> 100644
--- a/imhotep/http_test.py
+++ b/imhotep/http_test.py
@@ -1,4 +1,6 @@
+import json
import mock
+
from imhotep.http import BasicAuthRequester
@@ -14,19 +16,20 @@ def test_get():
with mock.patch('requests.get') as g:
g.return_value.status_code = 200
ghr.get('url')
- g.assert_called_with_args('url', auth=mock.ANY)
+ g.assert_called_with('url', auth=mock.ANY)
def test_delete():
ghr = BasicAuthRequester('user', 'pass')
with mock.patch('requests.delete') as g:
ghr.delete('url')
- g.assert_called_with_args('url', auth=mock.ANY)
+ g.assert_called_with('url', auth=mock.ANY)
def test_post():
ghr = BasicAuthRequester('user', 'pass')
with mock.patch('requests.post') as g:
g.return_value.status_code = 200
- ghr.post('url', {"a": 2})
- g.assert_called_with_args('url', data='{"a":2}', auth=mock.ANY)
+ payload = {"a": 2}
+ ghr.post('url', payload)
+ g.assert_called_with('url', data=json.dumps(payload), auth=mock.ANY)
diff --git a/imhotep/tools_test.py b/imhotep/tools_test.py
index <HASH>..<HASH> 100644
--- a/imhotep/tools_test.py
+++ b/imhotep/tools_test.py
@@ -1,6 +1,7 @@
import re
import mock
+import pytest
from .tools import Tool
from .testing_utils import calls_matching_re
@@ -111,3 +112,9 @@ def test_invoke_removes_dirname_prefix():
retval = t.invoke('/my/full/path')
assert 'and/extras' in retval.keys()
+
+
+def test_process_line_no_response_format():
+ t = Tool(command_executor='')
+ with pytest.raises(NotImplementedError):
+ t.process_line(dirname='/my/full/path', line='my line') | fix build and test Tool.process_line | justinabrahms_imhotep | train |
8f4bf75fa717a69f9e82e2788527971f08d1f690 | diff --git a/perceval/errors.py b/perceval/errors.py
index <HASH>..<HASH> 100644
--- a/perceval/errors.py
+++ b/perceval/errors.py
@@ -49,6 +49,12 @@ class CacheError(BaseError):
message = "%(cause)s"
+class HttpClientError(BaseError):
+ """Generic error for HTTP Cient"""
+
+ message = "%(cause)s"
+
+
class RepositoryError(BaseError):
"""Generic error for repositories""" | [client] Add error for the generic HTTP client
A new type of error, tailored to the generic HTTP client, is
added to the list of errors. | chaoss_grimoirelab-perceval | train |
21546a93cccbc6cf422ef9a6353a537f6d0ee324 | diff --git a/bin/simulate_performance.py b/bin/simulate_performance.py
index <HASH>..<HASH> 100644
--- a/bin/simulate_performance.py
+++ b/bin/simulate_performance.py
@@ -83,10 +83,17 @@ def calc_performance(df, opts):
entropy_num_signif]},
index=['{0} recurrent'.format(opts['kind']),
'{0} entropy'.format(opts['kind'])])
- else:
+ elif opts['kind'] == 'tsg':
deleterious_num_signif = len(permutation_df[permutation_df['deleterious BH q-value']<.1])
results = pd.DataFrame({'count': [deleterious_num_signif]},
index=['{0} deleterious'.format(opts['kind'])])
+ elif opts['kind'] == 'effect':
+ combined_num_signif = len(permutation_df[permutation_df['combined BH q-value']<.1])
+ effect_num_signif = len(permutation_df[permutation_df['entropy-on-effect BH q-value']<.1])
+ results = pd.DataFrame({'count': [effect_num_signif,
+ combined_num_signif]},
+ index=['entropy-on-effect',
+ 'combined'])
return results
diff --git a/tests/test_simulate_performance.py b/tests/test_simulate_performance.py
index <HASH>..<HASH> 100644
--- a/tests/test_simulate_performance.py
+++ b/tests/test_simulate_performance.py
@@ -44,3 +44,9 @@ def test_100genes_main():
tsg_path = 'output/100genes_deleterious_sim_perf_chasm_output.txt'
opts['text_output'] = os.path.join(file_dir, tsg_path)
result = sp.main(opts)
+
+ # check effect simulation
+ opts['kind'] = 'effect'
+ effect_path = 'output/100genes_effect_sim_perf_chasm_output.txt'
+ opts['text_output'] = os.path.join(file_dir, effect_path)
+ result = sp.main(opts) | Added entropy-on-effect to tests that are evaluated for power | KarchinLab_probabilistic2020 | train |
c7fb83c9aca68d020c9f887fae8cbed331af073d | diff --git a/src/helpers.php b/src/helpers.php
index <HASH>..<HASH> 100644
--- a/src/helpers.php
+++ b/src/helpers.php
@@ -1,5 +1,39 @@
<?php
+function component_args_from_shortcode($tag, $content)
+{
+ $pattern = get_shortcode_regex();
+
+ if (preg_match_all('/'.$pattern.'/s', $content, $matches) && isset($matches[2]) && in_array($tag, $matches[2])) {
+ $components = app('components');
+ $component = $components->get($tag);
+ $result = [];
+
+ if ($component) {
+ if ($component->has('data')) {
+ $dataDef = require $component->get('data');
+ }
+
+ foreach ($matches[0] as $index => $match) {
+ // Set component arguments from shortcode attributes
+ $atts = shortcode_parse_atts($matches[3][$index]);
+ $data = shortcode_atts($dataDef ?? [], $atts);
+
+ // Set content to slot
+ if (empty($data['slot']) && ! empty($matches[5][$index])) {
+ $data['slot'] = $matches[5][$index];
+ }
+
+ $result[] = $data;
+ }
+ }
+
+ return $result;
+ }
+
+ return null;
+}
+
function get_url_by_template($template)
{
$id = get_id_by_template($template);
@@ -590,7 +624,7 @@ function is_local_file($uri)
if (empty($uri)) {
return false;
}
-
+
$homeInfo = parse_url(home_url());
$uriInfo = parse_url($uri); | Added function to get component args from shortcode | dynamis-wp_framework | train |
ae3d771aa046c59cbcd398b2d3aa550140eb2698 | diff --git a/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/react-dnd_v2.x.x.js b/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/react-dnd_v2.x.x.js
index <HASH>..<HASH> 100644
--- a/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/react-dnd_v2.x.x.js
+++ b/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/react-dnd_v2.x.x.js
@@ -198,6 +198,16 @@ declare module "react-dnd" {
// Drag Drop Context
// ----------------------------------------------------------------------
+ declare type ProviderProps = {
+ backend: mixed,
+ children: React$Element<any>,
+ window?: Object
+ };
+
+ declare class DragDropContextProvider<ProviderProps> extends React$Component<ProviderProps> {
+ props: ProviderProps;
+ }
+
declare function DragDropContext<OP: {}, CP: {}>(
backend: mixed
): Connector<$Supertype<OP & CP>, CP>;
diff --git a/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/test_react-dnd-v2.js b/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/test_react-dnd-v2.js
index <HASH>..<HASH> 100644
--- a/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/test_react-dnd-v2.js
+++ b/definitions/npm/react-dnd_v2.x.x/flow_v0.53.x-/test_react-dnd-v2.js
@@ -111,7 +111,6 @@ const dndKnight: DndKnight = (null: any);
(<DndKnight title="foo" color="red" />: React.Element<typeof DndKnight>);
-// $ExpectError: Invalid default prop
(<DndKnight title="foo" color={3} />: React.Element<typeof DndKnight>);
(<DndKnight title="foo" color="red" extra="x" />: React.Element<
@@ -131,7 +130,6 @@ const dndKnight: DndKnight = (null: any);
typeof DndKnight
>);
-// $ExpectError: Invalid collect prop
(<DndKnight title="foo" color="red" isDragging="false" />: React.Element<
typeof DndKnight
>);
@@ -225,7 +223,6 @@ const dndKnight2: DndKnight2 = (null: any);
typeof DndKnight2
>);
-// $ExpectError: Invalid collect prop
(<DndKnight2 title="foo" color="red" isDragging="false" />: React.Element<
typeof DndKnight2
>);
@@ -288,7 +285,6 @@ const dndKnight3: DndKnight3 = (null: any);
typeof DndKnight3
>);
-// $ExpectError: Invalid collect prop
(<DndKnight3 title="foo" color="red" isDragging="false" />: React.Element<
typeof DndKnight3
>);
@@ -410,7 +406,6 @@ const DndBoardSquare = DropTarget(
(<DndBoardSquare y={5} x={5} />: React.Element<typeof DndBoardSquare>);
-// $ExpectError: Invalid default prop
(<DndBoardSquare y={5} x="5" />: React.Element<typeof DndBoardSquare>);
(<DndBoardSquare y={5} count={5} />: React.Element<typeof DndBoardSquare>); | react-dnd definition: Add DragDropContextProvider (#<I>)
* react-dnd tests: Remove unused suppression comments
* react-dnd definition: Add DragDropContextProvider | flow-typed_flow-typed | train |
59c6ace8ef7245819dec7de94c2eab270ddb8bae | diff --git a/salt/modules/file.py b/salt/modules/file.py
index <HASH>..<HASH> 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -973,7 +973,7 @@ def get_managed(
if template and source:
sfn = __salt__['cp.cache_file'](source, env)
if not os.path.exists(sfn):
- return sfn, {}, 'File "{0}" could not be found'.format(sfn)
+ return sfn, {}, 'Source file {0} not found'.format(source)
if template in salt.utils.templates.TEMPLATE_REGISTRY:
context_dict = defaults if defaults else {}
if context: | Fix bad comment when no source template file is found
Fix #<I> | saltstack_salt | train |
5654c0ee35a74e9fac961f2221e6b60d754f4c4c | diff --git a/lib/CrEOF/WKB/Reader.php b/lib/CrEOF/WKB/Reader.php
index <HASH>..<HASH> 100644
--- a/lib/CrEOF/WKB/Reader.php
+++ b/lib/CrEOF/WKB/Reader.php
@@ -61,12 +61,7 @@ class Reader
*/
public function unpackInput($format)
{
- if (version_compare(PHP_VERSION, '5.5.0-dev', '>=')) {
- $code = 'a';
- } else {
- $code = 'A';
- }
-
+ $code = version_compare(PHP_VERSION, '5.5.0-dev', '>=') ? 'a' : 'A';
$result = unpack(sprintf('%sresult/%s*input', $format, $code), $this->input);
$this->input = $result['input']; | Replace if/else with ternary operator | creof_wkb-parser | train |
29cb48832ef394994c44518101f3024f94172f56 | diff --git a/src/IsoCodes/Siren.php b/src/IsoCodes/Siren.php
index <HASH>..<HASH> 100644
--- a/src/IsoCodes/Siren.php
+++ b/src/IsoCodes/Siren.php
@@ -32,6 +32,16 @@ class Siren implements IsoCodeInterface
return false;
}
+ /**
+ * La poste support (French mail company)
+ * @link https://fr.wikipedia.org/wiki/SIRET#Calcul_et_validit%C3%A9_d'un_num%C3%A9ro_SIRET
+ * @link https://blog.pagesd.info/2012/09/05/verifier-numero-siret-poste/
+ */
+ $laPosteSiren = '356000000';
+ if(strpos($insee, $laPosteSiren) === 0){
+ return $laPosteSiren === (string) $insee ? true : array_sum(str_split($insee)) % 5 === 0;
+ }
+
$sum = 0;
for ($i = 0; $i < $length; ++$i) {
$indice = ($length - $i);
@@ -41,7 +51,6 @@ class Siren implements IsoCodeInterface
}
$sum += $tmp;
}
-
return ($sum % 10) == 0;
}
}
diff --git a/src/IsoCodes/Siret.php b/src/IsoCodes/Siret.php
index <HASH>..<HASH> 100644
--- a/src/IsoCodes/Siret.php
+++ b/src/IsoCodes/Siret.php
@@ -18,6 +18,7 @@ class Siret extends Siren implements IsoCodeInterface
* @author ronan.guilloux
*
* @link http://fr.wikipedia.org/wiki/SIRET
+ * @link https://fr.wikipedia.org/wiki/SIRET#Calcul_et_validit%C3%A9_d'un_num%C3%A9ro_SIRET
*
* @return bool
*/
diff --git a/tests/IsoCodes/Tests/SirenTest.php b/tests/IsoCodes/Tests/SirenTest.php
index <HASH>..<HASH> 100755
--- a/tests/IsoCodes/Tests/SirenTest.php
+++ b/tests/IsoCodes/Tests/SirenTest.php
@@ -20,7 +20,8 @@ class SirenTest extends AbstractIsoCodeInterfaceTest
array('417625639'),
array('334932720'),
array('440288371'),
- array('517439543')
+ array('517439543'),
+ array('356000000')// La poste
);
}
diff --git a/tests/IsoCodes/Tests/SiretTest.php b/tests/IsoCodes/Tests/SiretTest.php
index <HASH>..<HASH> 100755
--- a/tests/IsoCodes/Tests/SiretTest.php
+++ b/tests/IsoCodes/Tests/SiretTest.php
@@ -20,7 +20,9 @@ class SiretTest extends AbstractIsoCodeInterfaceTest
array('41762563900030'),
array('33493272000017'),
array('44028837100014'),
- array('51743954300011')
+ array('51743954300011'),
+ array('35600000049837'),// La poste
+ array('35600000087349')// La poste
);
}
@@ -39,7 +41,9 @@ class SiretTest extends AbstractIsoCodeInterfaceTest
array('33493272000018'),
array('44028837100015'),
array('51743954300012'),
- array('azertyuiopqsdf')
+ array('azertyuiopqsdf'),
+ array('35600000049838'),// La poste
+ array('35600000013374') // La poste
);
}
} | Added support for French mail company "La Poste" to SIRE(T|N) validators | ronanguilloux_IsoCodes | train |
681c6666133e48c61bdf254a8eac0a37b27ebb19 | diff --git a/albumentations/imgaug/transforms.py b/albumentations/imgaug/transforms.py
index <HASH>..<HASH> 100644
--- a/albumentations/imgaug/transforms.py
+++ b/albumentations/imgaug/transforms.py
@@ -13,27 +13,30 @@ __all__ = ['BasicIAATransform', 'DualIAATransform', 'ImageOnlyIAATransform', 'IA
class BasicIAATransform(BasicTransform):
def __init__(self, always_apply=False, p=0.5):
super(BasicIAATransform, self).__init__(always_apply, p)
- self.deterministic_processor = iaa.Noop()
@property
def processor(self):
return iaa.Noop()
def __call__(self, **kwargs):
- self.deterministic_processor = self.processor.to_deterministic()
return super(BasicIAATransform, self).__call__(**kwargs)
- def apply(self, img, **params):
- return self.deterministic_processor.augment_image(img)
+ def update_params(self, params, **kwargs):
+ params = super().update_params(params, **kwargs)
+ params['deterministic_processor'] = self.processor.to_deterministic()
+ return params
+
+ def apply(self, img, deterministic_processor=None, **params):
+ return deterministic_processor.augment_image(img)
class DualIAATransform(DualTransform, BasicIAATransform):
- def apply_to_bboxes(self, bboxes, rows=0, cols=0, **params):
+ def apply_to_bboxes(self, bboxes, deterministic_processor=None, rows=0, cols=0, **params):
if len(bboxes):
bboxes = convert_bboxes_from_albumentations(bboxes, 'pascal_voc', rows=rows, cols=cols)
bboxes_t = ia.BoundingBoxesOnImage([ia.BoundingBox(*bbox[:4]) for bbox in bboxes], (rows, cols))
- bboxes_t = self.deterministic_processor.augment_bounding_boxes([bboxes_t])[0].bounding_boxes
+ bboxes_t = deterministic_processor.augment_bounding_boxes([bboxes_t])[0].bounding_boxes
bboxes_t = [[bbox.x1, bbox.y1, bbox.x2, bbox.y2] + list(bbox_orig[4:]) for (bbox, bbox_orig) in
zip(bboxes_t, bboxes)]
diff --git a/tests/test_transforms.py b/tests/test_transforms.py
index <HASH>..<HASH> 100644
--- a/tests/test_transforms.py
+++ b/tests/test_transforms.py
@@ -1,3 +1,5 @@
+from multiprocessing.pool import Pool
+
import numpy as np
import cv2
import pytest
@@ -136,3 +138,30 @@ def test_semantic_mask_interpolation(augmentation_cls, params):
data = aug(image=image, mask=mask)
assert np.array_equal(np.unique(data['mask']), np.array([0, 64, 128, 192]))
+
+
+def __test_multiprocessing_support_proc(args):
+ x, transform = args
+ return transform(image=x)
+
+
[email protected](['augmentation_cls', 'params'], [
+ [ElasticTransform, {}],
+ [GridDistortion, {}],
+ [ShiftScaleRotate, {'rotate_limit': 45}],
+ [RandomScale, {'scale_limit': 0.5}],
+ [RandomSizedCrop, {'min_max_height': (80, 90), 'height': 100, 'width': 100}],
+ [LongestMaxSize, {'max_size': 50}],
+ [Rotate, {}],
+ [OpticalDistortion, {}],
+ [IAAAffine, {'scale': 1.5}],
+ [IAAPiecewiseAffine, {'scale': 1.5}],
+ [IAAPerspective, {}],
+])
+def test_multiprocessing_support(augmentation_cls, params):
+ """Checks whether we can use augmetnations in multi-threaded environments"""
+ aug = augmentation_cls(p=1, **params)
+ image = np.random.randint(low=0, high=256, size=(100, 100, 3), dtype=np.uint8)
+
+ with Pool(8) as pool:
+ pool.map(__test_multiprocessing_support_proc, map(lambda x: (x, aug), [image] * 100)) | IAA augmentations now supports multi-threading | albu_albumentations | train |
d1dfa0cefbe670ea10999bce8cf99379724ce373 | diff --git a/ntfy/backends/pushover.py b/ntfy/backends/pushover.py
index <HASH>..<HASH> 100644
--- a/ntfy/backends/pushover.py
+++ b/ntfy/backends/pushover.py
@@ -1,10 +1,10 @@
import requests
-def notify(title, message, api_token, user_key, device=None, **kwargs):
+def notify(title, message, user_key,
+ api_token='aUnsraBiEZVsmrG89AZp47K3S2dX2a', device=None, **kwargs):
"""
Required config keys:
- * 'api_token'
* 'user_key'
""" | include default pushover key
No need for user to set up their own pushover app. | dschep_ntfy | train |
eb84398498a26d2a399b8d509a2cb21a9e54924b | diff --git a/src/Drawer.js b/src/Drawer.js
index <HASH>..<HASH> 100644
--- a/src/Drawer.js
+++ b/src/Drawer.js
@@ -35,17 +35,6 @@ const Base =
*/
class Drawer extends Base {
- componentDidMount() {
- if (super.componentDidMount) { super.componentDidMount(); }
-
- // Implicitly close on background clicks.
- this.$.backdrop.addEventListener('click', async () => {
- this[symbols.raiseChangeEvents] = true;
- await this.close();
- this[symbols.raiseChangeEvents] = false;
- });
- }
-
get defaultState() {
return Object.assign(super.defaultState, {
fromEdge: 'start',
@@ -75,6 +64,18 @@ class Drawer extends Base {
this.setState({ fromEdge });
}
+ [symbols.populate](state, changed) {
+ if (super[symbols.populate]) { super[symbols.populate](state, changed); }
+ if (changed.backdropRole) {
+ // Implicitly close on background clicks.
+ this.$.backdrop.addEventListener('click', async () => {
+ this[symbols.raiseChangeEvents] = true;
+ await this.close();
+ this[symbols.raiseChangeEvents] = false;
+ });
+ }
+ }
+
[symbols.render](state, changed) {
super[symbols.render](state, changed);
if (changed.effect || changed.effectPhase || changed.enableEffects ||
diff --git a/src/ExpandableSection.js b/src/ExpandableSection.js
index <HASH>..<HASH> 100644
--- a/src/ExpandableSection.js
+++ b/src/ExpandableSection.js
@@ -22,15 +22,6 @@ const Base =
*/
class ExpandableSection extends Base {
- componentDidMount() {
- if (super.componentDidMount) { super.componentDidMount(); }
- this.$.header.addEventListener('click', () => {
- this[symbols.raiseChangeEvents] = true;
- this.toggle();
- this[symbols.raiseChangeEvents] = false;
- });
- }
-
get defaultState() {
return Object.assign(super.defaultState, {
headerRole: SeamlessButton,
diff --git a/src/Popup.js b/src/Popup.js
index <HASH>..<HASH> 100644
--- a/src/Popup.js
+++ b/src/Popup.js
@@ -22,22 +22,24 @@ const Base =
*/
class Popup extends Base {
- componentDidMount() {
- if (super.componentDidMount) { super.componentDidMount(); }
- const mousedownHandler = async event => {
- this[symbols.raiseChangeEvents] = true;
- await this.close();
- this[symbols.raiseChangeEvents] = false;
- event.preventDefault();
- event.stopPropagation();
- };
- this.$.backdrop.addEventListener('mousedown', mousedownHandler);
-
- // Mobile Safari doesn't seem to generate a mousedown handler on the
- // backdrop in some cases that Mobile Chrome handles. For completeness, we
- // also listen to touchend.
- if (!('PointerEvent' in window)) {
- this.$.backdrop.addEventListener('touchend', mousedownHandler);
+ [symbols.populate](state, changed) {
+ if (super[symbols.populate]) { super[symbols.populate](state, changed); }
+ if (changed.backdropRole) {
+ const mousedownHandler = async event => {
+ this[symbols.raiseChangeEvents] = true;
+ await this.close();
+ this[symbols.raiseChangeEvents] = false;
+ event.preventDefault();
+ event.stopPropagation();
+ };
+ this.$.backdrop.addEventListener('mousedown', mousedownHandler);
+
+ // Mobile Safari doesn't seem to generate a mousedown handler on the
+ // backdrop in some cases that Mobile Chrome handles. For completeness, we
+ // also listen to touchend.
+ if (!('PointerEvent' in window)) {
+ this.$.backdrop.addEventListener('touchend', mousedownHandler);
+ }
}
} | Fix some components with roles that wired up event handlers in componentDidMount instead of populate. | elix_elix | train |
e9f5760946c5c15bd39216840d37314c7fb92e5a | diff --git a/tests/lib/Integration/Implementation/Common/Slot/TestChildUpdatesParent.php b/tests/lib/Integration/Implementation/Common/Slot/TestChildUpdatesParent.php
index <HASH>..<HASH> 100644
--- a/tests/lib/Integration/Implementation/Common/Slot/TestChildUpdatesParent.php
+++ b/tests/lib/Integration/Implementation/Common/Slot/TestChildUpdatesParent.php
@@ -41,7 +41,8 @@ class TestChildUpdatesParent extends Slot
return;
}
- $parentLocation = $this->persistenceHandler->locationHandler()->load($contentInfo->mainLocationId);
+ $location = $this->persistenceHandler->locationHandler()->load($contentInfo->mainLocationId);
+ $parentLocation = $this->persistenceHandler->locationHandler()->load($location->parentId);
$parentContentInfo = $contentHandler->loadContentInfo($parentLocation->contentId);
$parentContentType = $this->persistenceHandler->contentTypeHandler()->load($parentContentInfo->contentTypeId); | NGSTACK-<I> Fix signal slot | netgen_ezplatform-search-extra | train |
62c4d15c71bf92063d3bed9fc8684914d08ee1aa | diff --git a/test/session.js b/test/session.js
index <HASH>..<HASH> 100644
--- a/test/session.js
+++ b/test/session.js
@@ -740,6 +740,26 @@ describe('session()', function(){
.expect(shouldSetCookie('connect.sid'))
.expect(200, done)
})
+
+ describe('when mounted at "/foo"', function () {
+ before(function () {
+ this.server = createServer(mountAt('/foo'), { cookie: { path: '/foo/bar' } })
+ })
+
+ it('should set cookie for "/foo/bar" request', function (done) {
+ request(this.server)
+ .get('/foo/bar')
+ .expect(shouldSetCookie('connect.sid'))
+ .expect(200, done)
+ })
+
+ it('should not set cookie for "/foo/foo/bar" request', function (done) {
+ request(this.server)
+ .get('/foo/foo/bar')
+ .expect(shouldNotHaveHeader('Set-Cookie'))
+ .expect(200, done)
+ })
+ })
})
describe('when "secure" set to "auto"', function () {
@@ -2136,6 +2156,15 @@ function expires (res) {
return header && parseSetCookie(header).expires
}
+function mountAt (path) {
+ return function (req, res) {
+ if (req.url.indexOf(path) === 0) {
+ req.originalUrl = req.url
+ req.url = req.url.slice(path.length)
+ }
+ }
+}
+
function parseSetCookie (header) {
var match
var pairs = [] | tests: add test for mounted middleware | expressjs_session | train |
d876cae40cae45ca42f3beb6f04ee248aed062c2 | diff --git a/openhtf/util/threads.py b/openhtf/util/threads.py
index <HASH>..<HASH> 100644
--- a/openhtf/util/threads.py
+++ b/openhtf/util/threads.py
@@ -60,7 +60,7 @@ class ExceptionSafeThread(threading.Thread):
self._thread_proc()
except Exception: # pylint: disable=broad-except
if not self._thread_exception(*sys.exc_info()):
- logging.exception('Thread raised an exception: %s', self.name)
+ _LOG.exception('Thread raised an exception: %s', self.name)
raise
finally:
self._thread_finished()
@@ -83,6 +83,14 @@ class KillableThread(ExceptionSafeThread):
"""Killable thread raises an internal exception when killed.
Based on recipe available at http://tomerfiliba.com/recipes/Thread2/
+
+ Note: To fully address race conditions involved with the use of
+ PyThreadState_SetAsyncExc, the GIL must be held from when the thread is
+ checked to when it's async-raised. In this case, we're not doing that, and
+ there remains the remote possibility that a thread identifier is reused and we
+ accidentally kill the wrong thread.
+
+
"""
def kill(self):
@@ -92,15 +100,25 @@ class KillableThread(ExceptionSafeThread):
def async_raise(self, exc_type):
"""Raise the exception."""
- assert self.is_alive(), 'Only running threads have a thread identity'
+ # Should only be called on a started thread so raise otherwise
+ assert self.ident is not None, 'Only started threads have thread identifier'
+
+ # If the thread has died we don't want to raise an exception so log.
+ if not self.is_alive():
+ _LOG.debug('Not raising %s because thread %s (%s) is not alive',
+ exc_type, self.name, self.ident)
+ return
+
result = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(self.ident), ctypes.py_object(exc_type))
- if result == 0:
+ if result == 0 and self.is_alive():
+ # Don't raise an exception an error unnecessarily if the thread is dead.
raise ValueError('Thread ID was invalid.', self.ident)
- elif result != 1:
+ elif result > 1:
# Something bad happened, call with a NULL exception to undo.
ctypes.pythonapi.PyThreadState_SetAsyncExc(self.ident, None)
- raise SystemError('PyThreadState_SetAsyncExc failed.', self.ident)
+ raise RuntimeError('Error: PyThreadState_SetAsyncExc %s %s (%s) %s',
+ exc_type, self.name, self.ident, result)
def _thread_exception(self, exc_type, exc_val, exc_tb):
"""Suppress the exception when we're kill()'d.""" | Make killable threads more robust to race condition (#<I>)
* Make killable threads more robust to race condition
There is a very small chance that a killable thread will end on it's own before it is killed. Make us robust to this race condition by not raising an error if the thread is no longer alive.
* cleaning up based on code review comments
* Adding extra protection around async_raise on dead thread
* adding comment about possible race conditions
* better documenting changes | google_openhtf | train |
f359e3c292bbccd8cfe421c948c5045387662c0a | diff --git a/encoder.go b/encoder.go
index <HASH>..<HASH> 100644
--- a/encoder.go
+++ b/encoder.go
@@ -21,7 +21,7 @@ const (
func NewComponent() *Component {
return &Component{
Elements: make([]Componenter, 0),
- Properties: make(map[string]string),
+ Properties: make(map[string][]string),
}
}
@@ -30,22 +30,24 @@ func NewComponent() *Component {
type Component struct {
Tipo string
Elements []Componenter
- Properties map[string]string
+ Properties map[string][]string
}
// Writes the component to the Writer
func (c *Component) Write(w *ICalEncode) {
w.WriteLine("BEGIN:" + c.Tipo + CRLF)
- // Iterate over component properites
+ // Iterate over component properties
var keys []string
for k := range c.Properties {
keys = append(keys, k)
}
sort.Strings(keys)
for _, key := range keys {
- val := c.Properties[key]
- w.WriteLine(WriteStringField(key, val))
+ vals := c.Properties[key]
+ for _, val := range vals {
+ w.WriteLine(WriteStringField(key, val))
+ }
}
for _, xc := range c.Elements {
@@ -67,9 +69,9 @@ func (c *Component) AddComponent(cc Componenter) {
c.Elements = append(c.Elements, cc)
}
-// AddProperty ads a property to the component
+// AddProperty adds a property to the component.
func (c *Component) AddProperty(key string, val string) {
- c.Properties[key] = val
+ c.Properties[key] = append(c.Properties[key], val)
}
// ICalEncode is the real writer, that wraps every line,
diff --git a/encoder_test.go b/encoder_test.go
index <HASH>..<HASH> 100644
--- a/encoder_test.go
+++ b/encoder_test.go
@@ -16,13 +16,14 @@ func TestComponentCreation(t *testing.T) {
c.AddProperty("CALSCAL", "GREGORIAN")
c.AddProperty("PRODID", "-//tmpo.io/src/goics")
- if c.Properties["CALSCAL"] != "GREGORIAN" {
- t.Error("Error setting property")
+ if c.Properties["CALSCAL"][0] != "GREGORIAN" {
+ t.Error("Error adding property")
}
m := goics.NewComponent()
m.SetType("VEVENT")
- m.AddProperty("UID", "testing")
+ m.AddProperty("UID", "testing1")
+ m.AddProperty("UID", "testing2") // Not that you'd ever _want_ to have multiple UIDs but for testing this is fine.
c.AddComponent(m)
@@ -30,6 +31,20 @@ func TestComponentCreation(t *testing.T) {
t.Error("Error adding a component")
}
+ ins := &EventTest{
+ component: c,
+ }
+
+ w := &bytes.Buffer{}
+ enc := goics.NewICalEncode(w)
+ enc.Encode(ins)
+
+ want := "BEGIN:VCALENDAR\r\nCALSCAL:GREGORIAN\r\nPRODID:-//tmpo.io/src/goics\r\nBEGIN:VEVENT\r\nUID:testing1\r\nUID:testing2\r\nEND:VEVENT\r\nEND:VCALENDAR\r\n"
+ got := w.String()
+
+ if got != want {
+ t.Errorf("encoded value mismatch:\ngot:\n%s\n\nwant:\n%s", got, want)
+ }
}
type EventTest struct { | Add support for encoding multiple properties with the same key | jordic_goics | train |
771ad46643c6f50d1bc5099e2370fb705f6f0987 | diff --git a/lib/file-stream.js b/lib/file-stream.js
index <HASH>..<HASH> 100644
--- a/lib/file-stream.js
+++ b/lib/file-stream.js
@@ -21,7 +21,10 @@ function FileStream (file, opts) {
this._torrent = file._torrent
var start = (opts && opts.start) || 0
- var end = (opts && opts.end) || (file.length - 1)
+ var end = (opts && opts.end && opts.end < file.length)
+ ? opts.end
+ : file.length - 1
+
var pieceLength = file._torrent.pieceLength
this._startPiece = (start + file.offset) / pieceLength | 0 | Clamp file.createReadStream({ end: num }) value
`render-media` and `videostream` may get passed an `end` value that’s
too large. Clamp it. | webtorrent_webtorrent | train |
d02158c0effa009fa4bb61421a868882aad0e470 | diff --git a/protocol.go b/protocol.go
index <HASH>..<HASH> 100644
--- a/protocol.go
+++ b/protocol.go
@@ -490,8 +490,9 @@ func (c *rawConnection) handleIndexUpdate(im IndexMessage) {
func filterIndexMessageFiles(fs []FileInfo) []FileInfo {
var out []FileInfo
for i, f := range fs {
- if f.Name == "" {
- l.Infoln("Dropping nil filename from incoming index")
+ switch f.Name {
+ case "", ".", "..", "/": // A few obviously invalid filenames
+ l.Infof("Dropping invalid filename %q from incoming index", f.Name)
if out == nil {
// Most incoming updates won't contain anything invalid, so we
// delay the allocation and copy to output slice until we
@@ -500,8 +501,10 @@ func filterIndexMessageFiles(fs []FileInfo) []FileInfo {
out = make([]FileInfo, i, len(fs)-1)
copy(out, fs)
}
- } else if out != nil {
- out = append(out, f)
+ default:
+ if out != nil {
+ out = append(out, f)
+ }
}
}
if out != nil { | Also filter out some other obviously invalid filenames (ref #<I>) | syncthing_syncthing | train |
3f2a56e062f69d0428e7f46c8f3c752bd85a5289 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -162,7 +162,7 @@ function Texture(opts) {
'vec4 fourTapSample(vec2 tileOffset, //Tile offset in the atlas ',
' vec2 tileUV, //Tile coordinate (as above)',
-' sampler2D atlas) {',
+' sampler2D atlas) {', // }
' //Initialize accumulators',
' vec4 color = vec4(0.0, 0.0, 0.0, 0.0);',
' float totalWeight = 0.0;', | vim hack to fix bouncing on percent key to jump between matching brace pairs; gets confused by embedded glsl | deathcap_voxel-texture-shader | train |
92f267a17a536764f7cefba76d0cee8639814c1f | diff --git a/src/RelyingParty.js b/src/RelyingParty.js
index <HASH>..<HASH> 100644
--- a/src/RelyingParty.js
+++ b/src/RelyingParty.js
@@ -284,9 +284,38 @@ class RelyingParty extends JSONDocument {
}
/**
- * UserInfo
+ * userinfo
+ *
+ * @description Promises the authenticated user's claims.
+ * @returns {Promise}
*/
- userinfo () {}
+ userinfo () {
+ try {
+ let configuration = this.provider.configuration
+
+ assert(configuration, 'OpenID Configuration is not initialized.')
+ assert(configuration.registration_endpoint, 'OpenID Configuration is missing registration_endpoint.')
+
+ let uri = configuration.userinfo_endpoint
+ let access_token = this.session.access_token
+
+ assert(access_token, 'Missing access token.')
+
+ let headers = new Headers({
+ 'Content-Type': 'application/json',
+ 'Authorization': `Bearer ${access_token}`
+ })
+
+ return fetch(uri, {headers})
+ .then(status(200))
+ .then(response => {
+ return response.json()
+ })
+
+ } catch (error) {
+ return Promise.reject(error)
+ }
+ }
/**
* Is Authenticated | wip(RelyingParty): draft userinfo method | anvilresearch_oidc-rp | train |
13a84381d5c5220488b85be83f9b8ab0ab058e25 | diff --git a/fixtures/test_configs.go b/fixtures/test_configs.go
index <HASH>..<HASH> 100644
--- a/fixtures/test_configs.go
+++ b/fixtures/test_configs.go
@@ -33,7 +33,7 @@ func SetupTestConfigs() *TestConfigs {
content := `---
github.com:
- user: jingweno
+- user: jingweno
oauth_token: 123
protocol: http`
ioutil.WriteFile(file.Name(), []byte(content), os.ModePerm)
diff --git a/github/config.go b/github/config.go
index <HASH>..<HASH> 100644
--- a/github/config.go
+++ b/github/config.go
@@ -22,7 +22,7 @@ type yamlHost struct {
Protocol string `yaml:"protocol"`
}
-type yamlConfig map[string]yamlHost
+type yamlConfig map[string][]yamlHost
type Host struct {
Host string `toml:"host"`
@@ -165,11 +165,7 @@ func configsFile() string {
func CurrentConfig() *Config {
c := &Config{}
-
- err := newConfigService().Load(configsFile(), c)
- if err != nil {
- panic(err)
- }
+ newConfigService().Load(configsFile(), c)
return c
}
diff --git a/github/config_decoder.go b/github/config_decoder.go
index <HASH>..<HASH> 100644
--- a/github/config_decoder.go
+++ b/github/config_decoder.go
@@ -37,11 +37,12 @@ func (y *yamlConfigDecoder) Decode(r io.Reader, c *Config) error {
}
for h, v := range yc {
+ vv := v[0]
host := Host{
Host: h,
- User: v.User,
- AccessToken: v.OAuthToken,
- Protocol: v.Protocol,
+ User: vv.User,
+ AccessToken: vv.OAuthToken,
+ Protocol: vv.Protocol,
}
c.Hosts = append(c.Hosts, host)
}
diff --git a/github/config_encoder.go b/github/config_encoder.go
index <HASH>..<HASH> 100644
--- a/github/config_encoder.go
+++ b/github/config_encoder.go
@@ -25,10 +25,12 @@ type yamlConfigEncoder struct {
func (y *yamlConfigEncoder) Encode(w io.Writer, c *Config) error {
yc := make(yamlConfig)
for _, h := range c.Hosts {
- yc[h.Host] = yamlHost{
- User: h.User,
- OAuthToken: h.AccessToken,
- Protocol: h.Protocol,
+ yc[h.Host] = []yamlHost{
+ {
+ User: h.User,
+ OAuthToken: h.AccessToken,
+ Protocol: h.Protocol,
+ },
}
}
diff --git a/github/config_service_test.go b/github/config_service_test.go
index <HASH>..<HASH> 100644
--- a/github/config_service_test.go
+++ b/github/config_service_test.go
@@ -99,7 +99,7 @@ func TestConfigService_YamlSave(t *testing.T) {
b, _ := ioutil.ReadFile(file.Name())
content := `github.com:
- user: jingweno
+- user: jingweno
oauth_token: "123"
protocol: https`
assert.Equal(t, content, strings.TrimSpace(string(b))) | Turns out Host is an array for the YAML format | github_hub | train |
a212f4b113b0be34bdf4a1b30af99f8dedcb7cd5 | diff --git a/src/main/java/com/belladati/sdk/report/ReportInfo.java b/src/main/java/com/belladati/sdk/report/ReportInfo.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/belladati/sdk/report/ReportInfo.java
+++ b/src/main/java/com/belladati/sdk/report/ReportInfo.java
@@ -66,4 +66,12 @@ public interface ReportInfo extends ResourceInfo<Report>, Localizable {
* @param text text of the comment to post
*/
void postComment(String text);
+
+ /**
+ * Deletes a comment with the specified ID.
+ *
+ * @param id ID of the comment to delete
+ */
+ void deleteComment(String id);
+
} | [BDSDK-<I>] Resolved & Tests | BellaDati_belladati-sdk-api | train |
57eabadb531ed6ac1b90e08d0f828da0c5f7c406 | diff --git a/flask_mongoalchemy/__init__.py b/flask_mongoalchemy/__init__.py
index <HASH>..<HASH> 100644
--- a/flask_mongoalchemy/__init__.py
+++ b/flask_mongoalchemy/__init__.py
@@ -30,6 +30,10 @@ def _get_mongo_uri(app, key=lambda x:'MONGOALCHEMY_%s' % x):
auth = ''
database = ''
+ uri = app.config.get(key('CONNECTION_STRING'))
+ if uri:
+ return uri
+
if app.config.get(key('USER')) is not None:
auth = app.config.get(key('USER'))
if app.config.get(key('PASSWORD')) is not None:
diff --git a/tests/test_mongodb_uri.py b/tests/test_mongodb_uri.py
index <HASH>..<HASH> 100644
--- a/tests/test_mongodb_uri.py
+++ b/tests/test_mongodb_uri.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014 flask-mongoalchemy authors. All rights reserved.
+# Copyright 2015 flask-mongoalchemy authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
@@ -43,3 +43,8 @@ class MongoDBURITestCase(BaseTestCase):
from flask.ext.mongoalchemy import _get_mongo_uri
self.assertEqual(_get_mongo_uri(self.app),
'mongodb://database.lukehome.com:27017/?safe=true')
+
+ def test_mongodb_uri_connection_string(self):
+ self.app.config['MONGOALCHEMY_CONNECTION_STRING'] = uri = 'mongodb://luke@rhost:27018/test'
+ from flask.ext.mongoalchemy import _get_mongo_uri
+ self.assertEqual(_get_mongo_uri(self.app), uri) | all: support the MONGOALCHEMY_CONNECTION_STRING setting
Related to #<I>. | cobrateam_flask-mongoalchemy | train |
5527e595a7dab058e49884ac8a201376834cf873 | diff --git a/idx/bigtable/bigtable.go b/idx/bigtable/bigtable.go
index <HASH>..<HASH> 100644
--- a/idx/bigtable/bigtable.go
+++ b/idx/bigtable/bigtable.go
@@ -466,6 +466,9 @@ LOOP:
}
case req, ok := <-b.writeQueue:
if !ok {
+ // writeQueue was closed. Flush and exit.
+ timer.Stop()
+ flush()
break LOOP
}
buffer = append(buffer, req) | flush buffer when writeQueue is closed
The writeQueue will be closed when the plugin is shutdown.
Before exiting we should flush any pending writes that are in the
buffer. | grafana_metrictank | train |
169ba7988750cf48ce6f07c20344d39c81db7b92 | diff --git a/tinkergraph-gremlin/src/test/java/org/apache/tinkerpop/gremlin/tinkergraph/structure/TinkerGraphTest.java b/tinkergraph-gremlin/src/test/java/org/apache/tinkerpop/gremlin/tinkergraph/structure/TinkerGraphTest.java
index <HASH>..<HASH> 100644
--- a/tinkergraph-gremlin/src/test/java/org/apache/tinkerpop/gremlin/tinkergraph/structure/TinkerGraphTest.java
+++ b/tinkergraph-gremlin/src/test/java/org/apache/tinkerpop/gremlin/tinkergraph/structure/TinkerGraphTest.java
@@ -177,54 +177,52 @@ public class TinkerGraphTest {
}
@Test
- @Ignore
- public void testPlay5() throws Exception {
- Graph graph = TinkerGraph.open();
+ public void shouldHaveSizeOfStarGraphLessThanDetached() throws Exception {
+ final Graph graph = TinkerGraph.open();
final Random random = new Random();
final Vertex vertex = graph.addVertex("person");
- System.out.println(randomString(4));
for(int i=0;i<100000;i++) {
vertex.addEdge("knows",graph.addVertex("person"),"since",random.nextLong(),"created","blah");
}
+
StarGraph starGraph = StarGraph.of(vertex);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
GryoWriter.build().create().writeObject(outputStream, starGraph);
outputStream.flush();
- System.out.println("Size of star graph (1): " + outputStream.size());
+ final int starGraph1 = outputStream.size();
+ System.out.println("Size of star graph (1): " + starGraph1);
///
starGraph = GryoReader.build().create().readObject(new ByteArrayInputStream(outputStream.toByteArray()));
outputStream = new ByteArrayOutputStream();
GryoWriter.build().create().writeObject(outputStream, starGraph);
outputStream.flush();
- System.out.println("Size of star graph (2): " + outputStream.size());
+ final int starGraph2 = outputStream.size();
+ System.out.println("Size of star graph (2): " + starGraph2);
+
+ assertEquals(starGraph1, starGraph2);
+
//
starGraph = GryoReader.build().create().readObject(new ByteArrayInputStream(outputStream.toByteArray()));
outputStream = new ByteArrayOutputStream();
GryoWriter.build().create().writeObject(outputStream, starGraph);
outputStream.flush();
- System.out.println("Size of star graph (3): " + outputStream.size());
+ final int starGraph3 = outputStream.size();
+ System.out.println("Size of star graph (3): " + starGraph3);
+
+ assertEquals(starGraph1, starGraph3);
+ assertEquals(starGraph2, starGraph3);
- //starGraph.getStarVertex().edges(Direction.OUT).forEachRemaining(System.out::println);
+ //starGraph.getStarVertex().edges(Direction.OUT).forEachRemaining(System.out::println);
/////////
outputStream = new ByteArrayOutputStream();
GryoWriter.build().create().writeVertex(outputStream, vertex, Direction.BOTH);
outputStream.flush();
- System.out.println("Size of detached vertex (1): " + outputStream.size());
+ final int detached = outputStream.size();
+ System.out.println("Size of detached vertex (1): " + detached);
-
-
- }
-
- private String randomString(final int length) {
- Random random = new Random();
- String randomString = new String();
- for(int i =0;i<length;i++) {
- final String temp = String.valueOf((char) random.nextInt());
- randomString = randomString + (temp.equals("~") ? "a" : temp);
- }
- return randomString;
+ assertTrue(starGraph1 < detached);
}
@Test | Converted stargraph serialization sizer into a test. | apache_tinkerpop | train |
3175f336c1c03ed0dd2cbd9698143cbf4e38ec4a | diff --git a/app/controllers/crowdblog/admin/posts_controller.rb b/app/controllers/crowdblog/admin/posts_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/crowdblog/admin/posts_controller.rb
+++ b/app/controllers/crowdblog/admin/posts_controller.rb
@@ -7,6 +7,7 @@ module Crowdblog
def new
@post = Post.new
+ @post.state = :drafted
@post.author = current_user
@post.save!
redirect_to edit_admin_post_path(@post)
@@ -44,7 +45,7 @@ module Crowdblog
end
def update
- @post.update_attributes(post_params, updated_by: current_user)
+ @post.update_attributes(post_params)
if @post.allowed_to_update_permalink?
@post.regenerate_permalink
@post.save!
diff --git a/lib/crowdblog/rspec/crowdblog_shared_examples.rb b/lib/crowdblog/rspec/crowdblog_shared_examples.rb
index <HASH>..<HASH> 100644
--- a/lib/crowdblog/rspec/crowdblog_shared_examples.rb
+++ b/lib/crowdblog/rspec/crowdblog_shared_examples.rb
@@ -1,7 +1,7 @@
shared_examples_for "a crowdblog", :type => :feature do
let(:post) do
- Crowdblog::Post.create :title => 'A post title', :body => 'A post body'
+ Crowdblog::Post.create :title => 'A post title', :body => 'A post body', :state => :drafted
end
describe "Home" do | Fix an issue where state_machine is not setting the initial state | crowdint_crowdblog | train |
6034edb7503a71a270276ea9e9fb42594d8a3035 | diff --git a/tasks/scp.js b/tasks/scp.js
index <HASH>..<HASH> 100644
--- a/tasks/scp.js
+++ b/tasks/scp.js
@@ -15,8 +15,7 @@ module.exports = function(grunt) {
grunt.registerMultiTask('scp', 'copy files to remote server.', function() {
var options = this.options({
host: 'localhost',
- username: 'lepture',
- password: '123'
+ username: 'admin'
});
var done = this.async(); | delete password option be default, change username to admin. fix #4 | spmjs_grunt-scp | train |
14ec0e6c9f93ece0ff7ee80a736596ddbe601cd3 | diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -59,7 +59,7 @@ gulp.task('doc', ['jsdoc']);
gulp.task('jsdoc', ['clean'], function(cb) {
gulp.src('')
.pipe(shell([
- './node_modules/.bin/jsdoc --verbose --template ./node_modules/jsdoc-baseline --readme ./README.md --destination ./doc/ ./lib/*.js'
+ './node_modules/.bin/jsdoc --verbose --readme ./README.md --destination ./doc/ ./lib/*.js'
]));
cb(null);
});
@@ -89,7 +89,7 @@ gulp.task('publish-npm', function() {
var npm = require("npm");
npm.load(function (er, npm) {
// NPM
- npm.commands.publish();
+ npm.commands.publish();
});
});
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -25,8 +25,8 @@
"url": "git+https://github.com/berkeleybop/bbop-graph-noctua.git"
},
"engines": {
- "node": ">= 0.12.2",
- "npm": ">= 2.7.4"
+ "node": ">= 8.11.1",
+ "npm": ">= 5.6.0"
},
"dependencies": {
"bbop-core": "0.0.5",
@@ -35,19 +35,19 @@
"underscore": "1.8.3"
},
"devDependencies": {
- "chai": "^2.3.0",
- "del": "^1.1.1",
+ "chai": "^4.1.2",
+ "del": "^3.0.0",
"gulp": "^3.8.11",
- "gulp-bump": "^0.3.0",
- "gulp-git": "^1.2.3",
+ "gulp-bump": "^3.1.0",
+ "gulp-git": "^2.5.2",
"gulp-jsdoc": "^0.1.4",
- "gulp-mocha": "^2.0.1",
+ "gulp-mocha": "^5.0.0",
"gulp-pandoc": "^0.2.1",
"gulp-rename": "^1.2.2",
- "gulp-shell": "^0.4.2",
- "gulp-uglify": "^1.2.0",
+ "gulp-shell": "^0.6.5",
+ "gulp-uglify": "^3.0.0",
"jsdoc": "^3.3.0",
- "jsdoc-baseline": "git://github.com/hegemonic/jsdoc-baseline.git#74d1dc8075"
+ "npm-check-updates": "^2.14.1"
},
"bundleDependencies": [],
"private": false,
diff --git a/tests/trivial.test.js b/tests/trivial.test.js
index <HASH>..<HASH> 100644
--- a/tests/trivial.test.js
+++ b/tests/trivial.test.js
@@ -7,18 +7,18 @@
var assert = require('chai').assert;
describe('our testing environment is sane', function(){
-
+
// State.
var thingy = null;
- // Pre-run.
+ // Pre-run.
before(function() {
thingy = 1;
});
-
+
// Trivially works two ways.
it('works at all (thingy)', function(){
- thingy.should.equal(1);
+ //thingy.should.equal(1);
assert.equal(thingy, 1);
});
@@ -27,7 +27,7 @@ describe('our testing environment is sane', function(){
var model = require('..');
assert.typeOf(model, 'object');
});
-
+
// Post-run.
after(function(){
thingy = null; | update packages to work with node 8 | berkeleybop_bbop-graph-noctua | train |
ffc9ed925661ff2906b9e0f0f2784dbd631fce09 | diff --git a/chef/spec/unit/application_spec.rb b/chef/spec/unit/application_spec.rb
index <HASH>..<HASH> 100644
--- a/chef/spec/unit/application_spec.rb
+++ b/chef/spec/unit/application_spec.rb
@@ -136,6 +136,7 @@ end
describe Chef::Application, "class method: fatal!" do
before do
+ Chef::Log.stub!(:fatal).with("blah").and_return(true)
Process.stub!(:exit).and_return(true)
end
diff --git a/chef/spec/unit/provider/package/apt_spec.rb b/chef/spec/unit/provider/package/apt_spec.rb
index <HASH>..<HASH> 100644
--- a/chef/spec/unit/provider/package/apt_spec.rb
+++ b/chef/spec/unit/provider/package/apt_spec.rb
@@ -138,8 +138,7 @@ describe Chef::Provider::Package::Apt, "install_package" do
@provider.should_receive(:run_command).with({
:command => "apt-get -q -y install emacs=1.0",
:environment => {
- "DEBIAN_FRONTEND" => "noninteractive",
- "LANG" => "en_US"
+ "DEBIAN_FRONTEND" => "noninteractive"
}
})
@provider.install_package("emacs", "1.0")
@@ -183,8 +182,7 @@ describe Chef::Provider::Package::Apt, "remove_package" do
@provider.should_receive(:run_command).with({
:command => "apt-get -q -y remove emacs",
:environment => {
- "DEBIAN_FRONTEND" => "noninteractive",
- "LANG" => "en_US"
+ "DEBIAN_FRONTEND" => "noninteractive"
}
})
@provider.remove_package("emacs", "1.0")
@@ -208,8 +206,7 @@ describe Chef::Provider::Package::Apt, "purge_package" do
@provider.should_receive(:run_command).with({
:command => "apt-get -q -y purge emacs",
:environment => {
- "DEBIAN_FRONTEND" => "noninteractive",
- "LANG" => "en_US"
+ "DEBIAN_FRONTEND" => "noninteractive"
}
})
@provider.purge_package("emacs", "1.0")
@@ -241,8 +238,7 @@ describe Chef::Provider::Package::Apt, "preseed_package" do
@provider.should_receive(:run_command).with({
:command => "debconf-set-selections /tmp/emacs-10.seed",
:environment => {
- "DEBIAN_FRONTEND" => "noninteractive",
- "LANG" => "en_US"
+ "DEBIAN_FRONTEND" => "noninteractive"
}
}).and_return(true)
@provider.preseed_package("emacs", "10")
diff --git a/chef/spec/unit/rest_spec.rb b/chef/spec/unit/rest_spec.rb
index <HASH>..<HASH> 100644
--- a/chef/spec/unit/rest_spec.rb
+++ b/chef/spec/unit/rest_spec.rb
@@ -92,6 +92,7 @@ end
describe Chef::REST, "run_request method" do
before(:each) do
+ Chef::REST::CookieJar.stub!(:instance).and_return({})
@r = Chef::REST.new("url")
@url_mock = mock("URI", :null_object => true)
@url_mock.stub!(:host).and_return("one") | Fixing Chef::REST tests that broke from my CookieJar addition | chef_chef | train |
3b8623b2d443f0c3cde15bea5c6ed43b96a9ae11 | diff --git a/assets/app/scripts/services/login.js b/assets/app/scripts/services/login.js
index <HASH>..<HASH> 100644
--- a/assets/app/scripts/services/login.js
+++ b/assets/app/scripts/services/login.js
@@ -42,10 +42,12 @@ angular.module('openshiftConsole')
var deferred = $q.defer();
var uri = new URI(_oauth_authorize_uri);
+ // Never send a local fragment to remote servers
+ var returnUri = new URI($location.url()).fragment("");
uri.query({
client_id: _oauth_client_id,
response_type: 'token',
- state: $location.url(),
+ state: returnUri.toString(),
redirect_uri: _oauth_redirect_uri,
});
authLogger.log("RedirectLoginService.login(), redirecting", uri.toString());
diff --git a/pkg/assets/bindata.go b/pkg/assets/bindata.go
index <HASH>..<HASH> 100644
--- a/pkg/assets/bindata.go
+++ b/pkg/assets/bindata.go
@@ -21089,11 +21089,11 @@ if ("" == c) return e.reject({
error:"invalid_request",
error_description:"RedirectLoginServiceProvider.OAuthRedirectURI not set"
});
-var f = e.defer(), h = new URI(b);
+var f = e.defer(), h = new URI(b), i = new URI(d.url()).fragment("");
return h.query({
client_id:a,
response_type:"token",
-state:d.url(),
+state:i.toString(),
redirect_uri:c
}), g.log("RedirectLoginService.login(), redirecting", h.toString()), window.location.href = h.toString(), f.promise;
}, | Prevent local fragment from being sent to a remote server | openshift_origin | train |
99d4d545da9cb980d066d144047f205b64c404a7 | diff --git a/acf-exports/acf-user-author-image.json b/acf-exports/acf-user-author-image.json
index <HASH>..<HASH> 100644
--- a/acf-exports/acf-user-author-image.json
+++ b/acf-exports/acf-user-author-image.json
@@ -5,7 +5,7 @@
{
"key": "field_56c714f8d0a42",
"label": "Profile picture",
- "name": "user_profile_picture",
+ "name": "user_profile_picture_id",
"type": "image",
"instructions": "",
"required": 0,
diff --git a/library/Admin/General.php b/library/Admin/General.php
index <HASH>..<HASH> 100644
--- a/library/Admin/General.php
+++ b/library/Admin/General.php
@@ -10,6 +10,14 @@ class General
// Post status private rename to "Only for logged in users"
add_action('current_screen', array($this, 'renamePrivate'));
+
+ add_action('profile_update', function ($userId) {
+ $imageId = get_user_meta($userId, 'user_profile_picture_id', true);
+ $imageUrl = wp_get_attachment_image_src($imageId, array(250, 250));
+ $imageUrl = isset($imageUrl[0]) ? $imageUrl[0] : null;
+
+ update_user_meta($userId, 'user_profile_picture', $imageUrl);
+ });
}
/**
diff --git a/views/partials/timestamps.blade.php b/views/partials/timestamps.blade.php
index <HASH>..<HASH> 100644
--- a/views/partials/timestamps.blade.php
+++ b/views/partials/timestamps.blade.php
@@ -3,8 +3,8 @@
<li>
<strong><?php echo apply_filters('Municipio/author_display/title', __('Published by', 'municipio')); ?>:</strong>
<span class="post-author post-author-margin-left">
- @if (get_field('page_show_author_image', 'option') !== false && get_field('post_show_author_image', get_the_id()) !== false && !empty(get_field('user_profile_picture', 'user_' . get_the_author_meta('ID'))))
- <span class="post-author-image" style="background-image:url('{{ get_field('user_profile_picture', 'user_' . get_the_author_meta('ID')) }}');"><img src="{{ get_field('user_profile_picture', 'user_' . get_the_author_meta('ID')) }}" alt="{{ (!empty(get_the_author_meta('first_name')) && !empty(get_the_author_meta('last_name'))) ? get_the_author_meta('first_name') . ' ' . get_the_author_meta('last_name') : get_the_author() }}"></span>
+ @if (get_field('page_show_author_image', 'option') !== false && get_field('post_show_author_image', get_the_id()) !== false && !empty(get_the_author_meta('user_profile_picture')))
+ <span class="post-author-image" style="background-image:url('{{ get_the_author_meta('user_profile_picture') }}');"><img src="{{ get_the_author_meta('user_profile_picture') }}" alt="{{ (!empty(get_the_author_meta('first_name')) && !empty(get_the_author_meta('last_name'))) ? get_the_author_meta('first_name') . ' ' . get_the_author_meta('last_name') : get_the_author() }}"></span>
@endif
@if (!empty(get_the_author_meta('first_name')) && !empty(get_the_author_meta('last_name'))) | Author image as url instead of id | helsingborg-stad_Municipio | train |
7bd0e6017bbab820ad2f6c67863bde2cb22a13c3 | diff --git a/test/extended/prometheus/prometheus.go b/test/extended/prometheus/prometheus.go
index <HASH>..<HASH> 100644
--- a/test/extended/prometheus/prometheus.go
+++ b/test/extended/prometheus/prometheus.go
@@ -213,7 +213,7 @@ var _ = g.Describe("[Feature:Prometheus][Conformance] Prometheus", func() {
tests := map[string][]metricTest{
// should be checking there is no more than 1 alerts firing.
// Checking for specific alert is done in "should have a Watchdog alert in firing state".
- `sum(ALERTS{alertstate="firing"})`: {metricTest{greaterThanEqual: false, value: 2}},
+ `ALERTS{alertstate="firing"}`: {metricTest{greaterThanEqual: false, value: 2}},
}
runQueries(tests, oc, ns, execPod.Name, url, bearerToken)
}) | Make the metric more readable when fails | openshift_origin | train |
63fcb1a3275bdf18de0314ed16947709f8013406 | diff --git a/ca/testutils/cautils.go b/ca/testutils/cautils.go
index <HASH>..<HASH> 100644
--- a/ca/testutils/cautils.go
+++ b/ca/testutils/cautils.go
@@ -144,12 +144,28 @@ func NewTestCAFromAPIRootCA(t *testing.T, tempBaseDir string, apiRootCA api.Root
if External {
// Start the CA API server - ensure that the external server doesn't have any intermediates
- externalSigningServer, err = NewExternalSigningServer(rootCA, tempBaseDir)
- assert.NoError(t, err)
+ var extRootCA ca.RootCA
+ if apiRootCA.RootRotation != nil {
+ extRootCA, err = ca.NewRootCA(
+ apiRootCA.RootRotation.CACert, apiRootCA.RootRotation.CACert, apiRootCA.RootRotation.CAKey, ca.DefaultNodeCertExpiration, nil)
+ // remove the key from the API root CA so that once the CA server starts up, it won't have a local signer
+ apiRootCA.RootRotation.CAKey = nil
+ } else {
+ extRootCA, err = ca.NewRootCA(
+ apiRootCA.CACert, apiRootCA.CACert, apiRootCA.CAKey, ca.DefaultNodeCertExpiration, nil)
+ // remove the key from the API root CA so that once the CA server starts up, it won't have a local signer
+ apiRootCA.CAKey = nil
+ }
+ require.NoError(t, err)
+
+ externalSigningServer, err = NewExternalSigningServer(extRootCA, tempBaseDir)
+ require.NoError(t, err)
+
externalCAs = []*api.ExternalCA{
{
Protocol: api.ExternalCA_CAProtocolCFSSL,
URL: externalSigningServer.URL,
+ CACert: extRootCA.Certs,
},
}
} | Ensure that the test CA, if an external CA is desired, does not have a local signer. | docker_swarmkit | train |
a002143d86454a15d64ae1428e3d6d3ebd323959 | diff --git a/bin/release.py b/bin/release.py
index <HASH>..<HASH> 100755
--- a/bin/release.py
+++ b/bin/release.py
@@ -216,7 +216,7 @@ def upload_javadocs(base_dir, version):
os.rename("apidocs", "%s/apidocs" % version_short)
## rsync this stuff to filemgmt.jboss.org
- uploader.upload_rsync(version_short, "[email protected]:/docs_htdocs/infinispan", flags = ['-rv', '--protocol=28'])
+ uploader.upload_rsync(version_short, "[email protected]:/docs_htdocs/infinispan")
os.chdir(base_dir)
def upload_schema(base_dir, version):
@@ -224,7 +224,7 @@ def upload_schema(base_dir, version):
os.chdir("%s/target/distribution/infinispan-%s/etc/schema" % (base_dir, version))
## rsync this stuff to filemgmt.jboss.org
- uploader.upload_rsync('.', "[email protected]:/docs_htdocs/infinispan/schemas", flags = ['-rv', '--protocol=28'])
+ uploader.upload_rsync('.', "[email protected]:/docs_htdocs/infinispan/schemas")
os.chdir(base_dir)
def do_task(target, args, async_processes):
diff --git a/bin/utils.py b/bin/utils.py
index <HASH>..<HASH> 100755
--- a/bin/utils.py
+++ b/bin/utils.py
@@ -322,11 +322,11 @@ class Git(object):
class DryRun(object):
location_root = "%s/%s" % (os.getenv("HOME"), "infinispan_release_dry_run")
- flags = "-r"
+ flags = "-r --protocol=28"
def __init__(self):
if settings['verbose']:
- self.flags = "-rv"
+ self.flags = "-rv --protocol=28"
def find_version(self, url):
return os.path.split(url)[1]
@@ -343,16 +343,16 @@ class Uploader(object):
def __init__(self):
if settings['verbose']:
self.scp_cmd = ['scp', '-rv']
- self.rsync_cmd = ['rsync', '-rv']
+ self.rsync_cmd = ['rsync', '-rv', '--protocol=28']
else:
self.scp_cmd = ['scp', '-r']
- self.rsync_cmd = ['rsync', '-r']
+ self.rsync_cmd = ['rsync', '-r', '--protocol=28']
def upload_scp(self, fr, to, flags = []):
- self.upload(fr, to, flags, self.scp_cmd)
+ self.upload(fr, to, flags, list(self.scp_cmd))
def upload_rsync(self, fr, to, flags = []):
- self.upload(fr, to, flags, self.rsync_cmd)
+ self.upload(fr, to, flags, list(self.rsync_cmd))
def upload(self, fr, to, flags, cmd):
for e in flags: | Fix release scripts, defensive copy of rsync parameters | infinispan_infinispan | train |
4b80e2b10a600fcbdb8923ea4fb82c77d6c91bb2 | diff --git a/blueflood-core/src/main/java/com/rackspacecloud/blueflood/tools/ops/Migration.java b/blueflood-core/src/main/java/com/rackspacecloud/blueflood/tools/ops/Migration.java
index <HASH>..<HASH> 100644
--- a/blueflood-core/src/main/java/com/rackspacecloud/blueflood/tools/ops/Migration.java
+++ b/blueflood-core/src/main/java/com/rackspacecloud/blueflood/tools/ops/Migration.java
@@ -64,6 +64,7 @@ public class Migration {
private static final String READ_THREADS = "readthreads";
private static final String BATCH_SIZE = "batchsize";
private static final String VERIFY = "verify";
+ private static final String DISCOVER = "discover";
private static final PrintStream out = System.out;
@@ -80,6 +81,7 @@ public class Migration {
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] number of write threads to use. default=1").create(WRITE_THREADS));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] number of rows to read per query. default=100").create(BATCH_SIZE));
cliOptions.addOption(OptionBuilder.withDescription("[optional] verify a sampling 0.5% of data copied").create(VERIFY));
+ cliOptions.addOption(OptionBuilder.withDescription("[optional] discover and query other cassandra nodes").create(DISCOVER));
}
public static void main(String args[]) {
@@ -93,15 +95,16 @@ public class Migration {
final int batchSize = (Integer)options.get(BATCH_SIZE);
final int skip = (Integer)options.get(SKIP);
final int ttl = (Integer)options.get(TTL);
+ NodeDiscoveryType discovery = (NodeDiscoveryType)options.get(DISCOVER);
// connect to src cluster.
String[] srcParts = options.get(SRC).toString().split(":", -1);
- final AstyanaxContext<Keyspace> srcContext = connect(srcParts[0], Integer.parseInt(srcParts[1]), srcParts[2], readThreads);
+ final AstyanaxContext<Keyspace> srcContext = connect(srcParts[0], Integer.parseInt(srcParts[1]), srcParts[2], readThreads, discovery);
final Keyspace srcKeyspace = srcContext.getEntity();
// connect to dst cluster.
String[] dstParts = options.get(DST).toString().split(":", -1);
- final AstyanaxContext<Keyspace> dstContext = connect(dstParts[0], Integer.parseInt(dstParts[1]), dstParts[2], writeThreads);
+ final AstyanaxContext<Keyspace> dstContext = connect(dstParts[0], Integer.parseInt(dstParts[1]), dstParts[2], writeThreads, discovery);
final Keyspace dstKeyspace = dstContext.getEntity();
// establish column range.
@@ -337,12 +340,11 @@ public class Migration {
}
}
- private static AstyanaxContext<Keyspace> connect(String host, int port, String keyspace, int threads) {
+ private static AstyanaxContext<Keyspace> connect(String host, int port, String keyspace, int threads, NodeDiscoveryType discovery) {
AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder()
.forKeyspace(keyspace)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl()
- .setDiscoveryType(NodeDiscoveryType.NONE))
- //.setDiscoveryType(NodeDiscoveryType.RING_DESCRIBE))
+ .setDiscoveryType(discovery))
.withConnectionPoolConfiguration(new ConnectionPoolConfigurationImpl(host + ":" + keyspace)
.setMaxConns(threads * 2)
@@ -393,6 +395,8 @@ public class Migration {
options.put(VERIFY, line.hasOption(VERIFY));
+ options.put(DISCOVER, line.hasOption(DISCOVER) ? NodeDiscoveryType.RING_DESCRIBE : NodeDiscoveryType.NONE);
+
} catch (ParseException ex) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp("bf-migrate", cliOptions); | A way to discover (and use) other cassandra nodes. | rackerlabs_blueflood | train |
d7fb1f7ed2ae904e0435dcf23a82e026ae3f12e7 | diff --git a/core/src/main/java/hudson/model/ComputerSet.java b/core/src/main/java/hudson/model/ComputerSet.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/ComputerSet.java
+++ b/core/src/main/java/hudson/model/ComputerSet.java
@@ -47,7 +47,6 @@ import org.kohsuke.stapler.export.ExportedBean;
import org.kohsuke.stapler.interceptor.RequirePOST;
import javax.servlet.ServletException;
-import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import java.io.File;
import java.io.IOException;
import java.util.AbstractList;
@@ -236,12 +235,11 @@ public final class ComputerSet extends AbstractModelObject implements Describabl
Node src = app.getNode(from);
if(src==null) {
- rsp.setStatus(SC_BAD_REQUEST);
- if(Util.fixEmpty(from)==null)
- sendError(Messages.ComputerSet_SpecifySlaveToCopy(),req,rsp);
- else
- sendError(Messages.ComputerSet_NoSuchSlave(from),req,rsp);
- return;
+ if (Util.fixEmpty(from) == null) {
+ throw new Failure(Messages.ComputerSet_SpecifySlaveToCopy());
+ } else {
+ throw new Failure(Messages.ComputerSet_NoSuchSlave(from));
+ }
}
// copy through XStream
@@ -260,9 +258,8 @@ public final class ComputerSet extends AbstractModelObject implements Describabl
rsp.sendRedirect2(result.getNodeName()+"/configure");
} else {
// proceed to step 2
- if(mode==null) {
- rsp.sendError(SC_BAD_REQUEST);
- return;
+ if (mode == null) {
+ throw new Failure("No mode given");
}
NodeDescriptor d = NodeDescriptor.all().findByName(mode); | Simpler and better to throw Failure than to hand-craft an error result. | jenkinsci_jenkins | train |
2f618d9fd506fcf94991ec38f98530d192c26626 | diff --git a/scripts/client/utils.js b/scripts/client/utils.js
index <HASH>..<HASH> 100644
--- a/scripts/client/utils.js
+++ b/scripts/client/utils.js
@@ -15,6 +15,7 @@ Utils.prototype.hash = function (password, salt) {
var self = this;
return new Promise(function (resolve, reject) {
self._bcrypt.hash(password, salt, function (err, hash) {
+ /* istanbul ignore next */
if (err) {
reject(err);
}
@@ -27,6 +28,7 @@ Utils.prototype.genSalt = function () {
var self = this;
return new Promise(function (resolve, reject) {
self._bcrypt.genSalt(10, function (err, salt) {
+ /* istanbul ignore next */
if (err) {
reject(err);
}
@@ -98,12 +100,12 @@ Utils.prototype.timeout = function (ms) {
};
// Executes promise and then resolves after event emitted once
-Utils.prototype.doAndOnce = function (promise, emitter, evnt) {
- var once = this.once(emitter, evnt);
- return promise().then(function () {
- return once;
- });
-};
+// Utils.prototype.doAndOnce = function (promise, emitter, evnt) {
+// var once = this.once(emitter, evnt);
+// return promise().then(function () {
+// return once;
+// });
+// };
Utils.prototype.once = function (emitter, evnt) {
return new Promise(function (resolve) { | refactor(all): common utils | delta-db_deltadb-server | train |
5e7f3fd2316c09aa505a7cddef1e5d5f108fc7eb | diff --git a/src/Models/User.php b/src/Models/User.php
index <HASH>..<HASH> 100644
--- a/src/Models/User.php
+++ b/src/Models/User.php
@@ -114,6 +114,17 @@ class User extends Model implements AuthenticatableContract, CanResetPasswordCon
}
/**
+ * An alias attribute for the character_id.
+ *
+ * @return mixed
+ */
+ public function getCharacterIdAttribute()
+ {
+
+ return $this->id;
+ }
+
+ /**
* Get the group the current user belongs to.
*
* @return \Illuminate\Database\Eloquent\Relations\BelongsToMany | Add a property alias for character_id on a User. | eveseat_web | train |
138c241337a8baa4b5ce0ce18b2f01f74227b132 | diff --git a/src/Models/User.php b/src/Models/User.php
index <HASH>..<HASH> 100644
--- a/src/Models/User.php
+++ b/src/Models/User.php
@@ -559,6 +559,30 @@ class User extends Entry
}
/**
+ * Returns the password last set unix timestamp.
+ *
+ * @return float|null
+ */
+ public function getPasswordLastSetTimestamp()
+ {
+ if ($time = $this->getPasswordLastSet()) {
+ return Utilities::convertWindowsTimeToUnixTime($time);
+ }
+ }
+
+ /**
+ * Returns the formatted timestamp of the password last set date.
+ *
+ * @return string|null
+ */
+ public function getPasswordLastSetDate()
+ {
+ if ($timestamp = $this->getPasswordLastSetTimestamp()) {
+ return (new DateTime())->setTimestamp($timestamp)->format($this->dateFormat);
+ }
+ }
+
+ /**
* Returns the users lockout time.
*
* @return string
@@ -907,7 +931,7 @@ class User extends Entry
$unixTime = Utilities::convertWindowsTimeToUnixTime($accountExpiry);
- return new \DateTime(date($this->dateFormat, $unixTime));
+ return new DateTime(date($this->dateFormat, $unixTime));
}
/**
@@ -919,7 +943,7 @@ class User extends Entry
*/
public function isExpired(DateTime $date = null)
{
- $date = ($date ?: new DateTime());
+ $date = $date ?: new DateTime();
$expirationDate = $this->expirationDate(); | Added ability to get the last password set date and timestamp | Adldap2_Adldap2 | train |
98143351ae02bd2fa220e568ce831b1c7a33fe53 | diff --git a/lib/expression-api/Post.js b/lib/expression-api/Post.js
index <HASH>..<HASH> 100644
--- a/lib/expression-api/Post.js
+++ b/lib/expression-api/Post.js
@@ -631,7 +631,7 @@
callback(new UT.ResizeEvent(currentSize.width, currentSize.height));
};
}
- UT.Expression._callAPI('container.resizeHeight', [height, true], fn);
+ UT.Expression._callAPI('container.resizeHeight', [height], fn);
return this;
} else {
var event = new UT.ResizeEvent(currentSize.width, currentSize.height); | Fix a bug in resize height property - callback never called and error in sandbox. | urturn_urturn-expression-api | train |
6204ef5e30e766319f3eb9b689f6606d3c530fad | diff --git a/core/src/main/java/net/time4j/PrettyTime.java b/core/src/main/java/net/time4j/PrettyTime.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/net/time4j/PrettyTime.java
+++ b/core/src/main/java/net/time4j/PrettyTime.java
@@ -32,6 +32,7 @@ import net.time4j.format.TextWidth;
import net.time4j.format.UnitPatterns;
import net.time4j.tz.TZID;
import net.time4j.tz.Timezone;
+import net.time4j.tz.ZonalOffset;
import java.text.DecimalFormatSymbols;
import java.text.MessageFormat;
@@ -676,7 +677,7 @@ public final class PrettyTime {
// fill values-array from duration
boolean negative = duration.isNegative();
long[] values = new long[8];
- pushDuration(values, duration, this.weekToDays);
+ pushDuration(values, duration, this.refClock, this.weekToDays);
// format duration items
List<Object> parts = new ArrayList<Object>();
@@ -911,6 +912,7 @@ public final class PrettyTime {
private static void pushDuration(
long[] values,
Duration<?> duration,
+ TimeSource<?> refClock,
boolean weekToDays
) {
@@ -935,11 +937,12 @@ public final class PrettyTime {
} else if (unit.equals(CalendarUnit.weekBasedYears())) {
values[0] = MathUtils.safeAdd(amount, values[0]); // YEARS
} else { // approximated duration by normalization without nanos
- PlainTimestamp start = SystemClock.inLocalView().now();
+ Moment unix = Moment.from(refClock.currentTime());
+ PlainTimestamp start = unix.toZonalTimestamp(ZonalOffset.UTC);
PlainTimestamp end = start.plus(amount, unit);
IsoUnit[] units = (weekToDays ? TSP_UNITS : STD_UNITS);
Duration<?> part = Duration.in(units).between(start, end);
- pushDuration(values, part, weekToDays);
+ pushDuration(values, part, refClock, weekToDays);
}
}
diff --git a/i18n/src/test/java/net/time4j/i18n/PrettyTimeTest.java b/i18n/src/test/java/net/time4j/i18n/PrettyTimeTest.java
index <HASH>..<HASH> 100644
--- a/i18n/src/test/java/net/time4j/i18n/PrettyTimeTest.java
+++ b/i18n/src/test/java/net/time4j/i18n/PrettyTimeTest.java
@@ -3,10 +3,14 @@ package net.time4j.i18n;
import net.time4j.CalendarUnit;
import net.time4j.ClockUnit;
import net.time4j.Duration;
+import net.time4j.IsoUnit;
import net.time4j.Moment;
import net.time4j.PlainTimestamp;
import net.time4j.PrettyTime;
import net.time4j.base.TimeSource;
+import net.time4j.engine.ChronoEntity;
+import net.time4j.engine.Chronology;
+import net.time4j.engine.UnitRule;
import net.time4j.format.TextWidth;
import net.time4j.tz.Timezone;
import net.time4j.tz.ZonalOffset;
@@ -16,8 +20,10 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
+import static net.time4j.CalendarUnit.CENTURIES;
import static net.time4j.CalendarUnit.DAYS;
import static net.time4j.CalendarUnit.MONTHS;
+import static net.time4j.CalendarUnit.QUARTERS;
import static net.time4j.CalendarUnit.WEEKS;
import static net.time4j.CalendarUnit.YEARS;
import static net.time4j.ClockUnit.HOURS;
@@ -559,4 +565,95 @@ public class PrettyTimeTest {
is("in 23 Tagen"));
}
+ @Test
+ public void printCenturiesAndWeekBasedYearsEnglish() {
+ Duration<?> dur =
+ Duration.ofZero()
+ .plus(1, CENTURIES)
+ .plus(2, CalendarUnit.weekBasedYears());
+ assertThat(
+ PrettyTime.of(Locale.US).print(dur, TextWidth.WIDE),
+ is("102 years"));
+ }
+
+ @Test
+ public void printOverflowUnitsEnglish() {
+ Duration<?> dur =
+ Duration.ofZero()
+ .plus(1, QUARTERS)
+ .plus(2, MONTHS.withCarryOver());
+ assertThat(
+ PrettyTime.of(Locale.US).print(dur, TextWidth.WIDE),
+ is("5 months"));
+ }
+
+ @Test
+ public void printSpecialUnitsEnglish() {
+ TimeSource<?> clock = new TimeSource<Moment>() {
+ @Override
+ public Moment currentTime() {
+ return PlainTimestamp.of(2014, 10, 1, 14, 30).atUTC();
+ }
+ };
+ Duration<?> dur =
+ Duration.ofZero()
+ .plus(8, DAYS)
+ .plus(2, new FortnightPlusOneDay());
+ assertThat(
+ PrettyTime.of(Locale.US)
+ .withReferenceClock(clock)
+ .print(dur, TextWidth.WIDE),
+ is("4 weeks and 10 days"));
+ }
+
+ private static class FortnightPlusOneDay
+ implements IsoUnit, UnitRule.Source {
+
+ @Override
+ public char getSymbol() {
+ return 'F';
+ }
+
+ @Override
+ public double getLength() {
+ return 86400.0 * 15;
+ }
+
+ @Override
+ public boolean isCalendrical() {
+ return true;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T extends ChronoEntity<T>> UnitRule<T> derive(Chronology<T> c) {
+
+ if (c == PlainTimestamp.axis()) {
+ UnitRule<PlainTimestamp> rule =
+ new UnitRule<PlainTimestamp>() {
+ @Override
+ public PlainTimestamp addTo(
+ PlainTimestamp timepoint,
+ long amount
+ ) {
+ return timepoint.plus(amount * 15, DAYS);
+ }
+ @Override
+ public long between(
+ PlainTimestamp start,
+ PlainTimestamp end
+ ) {
+ long days = DAYS.between(start, end);
+ return days / 15;
+ }
+ };
+ return (UnitRule<T>) rule;
+ }
+
+ throw new UnsupportedOperationException(
+ c.getChronoType().getName());
+ }
+
+ }
+
}
\ No newline at end of file | enhanced PrettyTime-test
see issue #<I> | MenoData_Time4J | train |
d64da2c8a09523cd5e258d47bccd75dc4ba0770b | diff --git a/neptulon.go b/neptulon.go
index <HASH>..<HASH> 100644
--- a/neptulon.go
+++ b/neptulon.go
@@ -66,10 +66,7 @@ func (a *App) Stop() error {
// this is not a problem as we always require an ACK but it will also mean that message deliveries will be at-least-once; to-and-from the server
a.connMutex.Lock()
for _, conn := range a.conns {
- err := conn.Close()
- if err != nil {
- return err
- }
+ conn.Close()
}
a.connMutex.Unlock() | don't handle conn close errors | neptulon_neptulon | train |
2690f847d052e843226ad040038a25683ced4048 | diff --git a/src/executors/IifExecutor.js b/src/executors/IifExecutor.js
index <HASH>..<HASH> 100644
--- a/src/executors/IifExecutor.js
+++ b/src/executors/IifExecutor.js
@@ -4,6 +4,17 @@ import {getArgNames} from '../util/function';
import {buildArgumentsFromRingaEvent} from '../util/executors';
class IifExecutor extends ExecutorAbstract {
+ //-----------------------------------
+ // Constructor
+ //-----------------------------------
+ /**
+ * Constructor for a new IifExecutor.
+ *
+ * @param thread The parent thread that owns this command.
+ * @param condition The result of this function determines which executor to run
+ * @param trueExecutor The executor run if condition returns a truthy value
+ * @param falseExecutor The executor run if condition returns a falsy value
+ */
constructor(thread, { condition, trueExecutor, falseExecutor }) {
super(thread);
@@ -12,6 +23,17 @@ class IifExecutor extends ExecutorAbstract {
this.falseExecutor = falseExecutor;
}
+ //-----------------------------------
+ // Methods
+ //-----------------------------------
+
+ /**
+ * Internal execution method called by CommandThread only.
+ *
+ * @param doneHandler The handler to call when done() is called.
+ * @param failHandler The handler to call when fail() is called;
+ * @private
+ */
_execute(doneHandler, failHandler) {
super._execute(doneHandler, failHandler); | Add comments on IifExecutor | ringa-js_ringa | train |
8383856b744565f3520adbc4eb1c84967040d7fb | diff --git a/src/Operation/CreateCollection.php b/src/Operation/CreateCollection.php
index <HASH>..<HASH> 100644
--- a/src/Operation/CreateCollection.php
+++ b/src/Operation/CreateCollection.php
@@ -126,11 +126,11 @@ class CreateCollection implements Executable
}
}
- if ( ! empty($this->options['indexOptionDefaults'])) {
+ if (isset($this->options['indexOptionDefaults'])) {
$cmd['indexOptionDefaults'] = (object) $this->options['indexOptionDefaults'];
}
- if ( ! empty($this->options['storageEngine'])) {
+ if (isset($this->options['storageEngine'])) {
$cmd['storageEngine'] = (object) $this->options['storageEngine'];
} | Don't use empty() to check for empty documents
empty() is inconsistent for checking if the document is empty (i.e. empty([]) != empty(new stdClass)). Simply check that the option has been set in order to include it in the command. | mongodb_mongo-php-library | train |
b473c03e63fec813aacf543262885876e3a28593 | diff --git a/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java b/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java
+++ b/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java
@@ -22,6 +22,9 @@ public class ApnsClientTest {
private static final String CLIENT_KEYSTORE_FILENAME = "/pushy-test-client.jks";
private static final String CLIENT_KEYSTORE_PASSWORD = "pushy-test";
+ private MockApnsServer server;
+ private ApnsClient<SimpleApnsPushNotification> client;
+
@BeforeClass
public static void setUpBeforeClass() throws Exception {
ApnsClientTest.EVENT_LOOP_GROUP = new NioEventLoopGroup();
@@ -38,10 +41,17 @@ public class ApnsClientTest {
@Before
public void setUp() throws Exception {
+ this.server = new MockApnsServer(8443, EVENT_LOOP_GROUP);
+ this.server.start().await();
+
+ this.client = new ApnsClient<>("localhost", 8443, CLIENT_KEY_STORE, CLIENT_KEYSTORE_PASSWORD, EVENT_LOOP_GROUP);
+ this.client.connect().get();
}
@After
public void tearDown() throws Exception {
+ this.client.close().await();
+ this.server.shutdown().await();
}
@AfterClass
@@ -53,21 +63,12 @@ public class ApnsClientTest {
public void testSendNotification() throws Exception {
final String testToken = TokenTestUtil.generateRandomToken();
- final MockApnsServer server = new MockApnsServer(8443, EVENT_LOOP_GROUP);
- server.registerToken(testToken);
- server.start().await();
-
- final ApnsClient<SimpleApnsPushNotification> client =
- new ApnsClient<>("localhost", 8443, CLIENT_KEY_STORE, CLIENT_KEYSTORE_PASSWORD, EVENT_LOOP_GROUP);
-
- client.connect().get();
+ this.server.registerToken(testToken);
final SimpleApnsPushNotification pushNotification = new SimpleApnsPushNotification(testToken, "test-payload");
- final PushNotificationResponse<SimpleApnsPushNotification> response = client.sendNotification(pushNotification).get();
+ final PushNotificationResponse<SimpleApnsPushNotification> response =
+ this.client.sendNotification(pushNotification).get();
assertTrue(response.isSuccess());
-
- client.close().await();
- server.shutdown().await();
}
} | Moved some test setup things into `@Before` and `@After` methods. | relayrides_pushy | train |
15a8b3fe875abba56ccdd5fea094181fb3a0a206 | diff --git a/Model/ContactClientModel.php b/Model/ContactClientModel.php
index <HASH>..<HASH> 100644
--- a/Model/ContactClientModel.php
+++ b/Model/ContactClientModel.php
@@ -714,6 +714,8 @@ class ContactClientModel extends FormModel
'expr' => 'like',
'value' => "%'contactclient': $contactclientId,%",
],
+ 'orderBy' => 'c.name',
+ 'orderByDir' => 'ASC',
]
);
} | pass orderBy for campaigns drop down | TheDMSGroup_mautic-contact-client | train |
167e8e33259edd11eff6e6447071d31033460290 | diff --git a/Lib/fontbakery/specifications/googlefonts.py b/Lib/fontbakery/specifications/googlefonts.py
index <HASH>..<HASH> 100644
--- a/Lib/fontbakery/specifications/googlefonts.py
+++ b/Lib/fontbakery/specifications/googlefonts.py
@@ -2314,10 +2314,12 @@ def com_google_fonts_test_064(ttFont, ligatures):
issues with caret rendering.
"""
if ligatures == -1:
- yield FAIL, ("Failed to lookup ligatures."
- " This font file seems to be malformed."
- " For more info, read:"
- " https://github.com/googlefonts/fontbakery/issues/1596")
+ yield FAIL, Message("malformed",
+ "Failed to lookup ligatures."
+ " This font file seems to be malformed."
+ " For more info, read:"
+ " https://github.com"
+ "/googlefonts/fontbakery/issues/1596")
elif "GDEF" not in ttFont:
yield WARN, Message("GDEF-missing",
("GDEF table is missing, but it is mandatory"
@@ -2377,10 +2379,12 @@ def com_google_fonts_test_065(ttFont, ligatures, has_kerning_info):
return result
if ligatures == -1:
- yield FAIL, ("Failed to lookup ligatures."
- " This font file seems to be malformed."
- " For more info, read:"
- " https://github.com/googlefonts/fontbakery/issues/1596")
+ yield FAIL, Message("malformed",
+ "Failed to lookup ligatures."
+ " This font file seems to be malformed."
+ " For more info, read:"
+ " https://github.com"
+ "/googlefonts/fontbakery/issues/1596")
else:
for lookup in ttFont["GPOS"].table.LookupList.Lookup:
if lookup.LookupType == 2: # type 2 = Pair Adjustment
@@ -2390,9 +2394,10 @@ def com_google_fonts_test_065(ttFont, ligatures, has_kerning_info):
# look_for_nonligated_kern_info(lookup.SubTable[0])
if remaining != {}:
- yield FAIL, ("GPOS table lacks kerning info for the following"
- " non-ligated sequences: "
- "{}").format(ligatures_str(remaining))
+ yield FAIL, Message("lacks-kern-info",
+ ("GPOS table lacks kerning info for the following"
+ " non-ligated sequences: "
+ "{}").format(ligatures_str(remaining)))
else:
yield PASS, ("GPOS table provides kerning info for "
"all non-ligated sequences.")
diff --git a/Lib/fontbakery/specifications/googlefonts_test.py b/Lib/fontbakery/specifications/googlefonts_test.py
index <HASH>..<HASH> 100644
--- a/Lib/fontbakery/specifications/googlefonts_test.py
+++ b/Lib/fontbakery/specifications/googlefonts_test.py
@@ -1280,7 +1280,7 @@ def test_id_065():
# So it must FAIL the test:
print ("Test FAIL with a bad font...")
status, message = list(test(ttFont, lig, has_kinfo))[-1]
- assert status == FAIL
+ assert status == FAIL and message.code == "lacks-kern-info"
def test_id_066(): | update tests for <I> & <I>
(issue #<I>) | googlefonts_fontbakery | train |
559864507ab8e81e55b8fb2a1514185c068a1eac | diff --git a/keanu-project/src/main/java/io/improbable/keanu/plating/loop/Loop.java b/keanu-project/src/main/java/io/improbable/keanu/plating/loop/Loop.java
index <HASH>..<HASH> 100644
--- a/keanu-project/src/main/java/io/improbable/keanu/plating/loop/Loop.java
+++ b/keanu-project/src/main/java/io/improbable/keanu/plating/loop/Loop.java
@@ -63,7 +63,7 @@ public class Loop {
* package-private because it is intended to be created by the LoopBuilder
*
* @param plates the set of plates, one for each iteration in the loop
- * @param throwWhenMaxCountIsReached - optionally disable throwing and log a warning instead
+ * @param throwWhenMaxCountIsReached optionally disable throwing and log a warning instead
*/
Loop(Plates plates, boolean throwWhenMaxCountIsReached) {
this.plates = plates;
@@ -78,7 +78,7 @@ public class Loop {
* A factory method for creating a loop
* It automatically labels the initial state correctly for you.
*
- * @param initialState - a single Vertex that defines the loop's base case.
+ * @param initialState a single Vertex that defines the loop's base case.
* @param <V> the input type
* @return a builder object
*/
@@ -92,8 +92,8 @@ public class Loop {
/**
* A factory method for creating a loop
*
- * @param first - the first Vertex (mandatory)
- * @param others - other Vertices (optional)
+ * @param first the first Vertex (mandatory)
+ * @param others other Vertices (optional)
* @param <V> the input type
* @return a builder object
*/
@@ -104,7 +104,7 @@ public class Loop {
/**
* A factory method for creating a loop
*
- * @param initialState - the collection of vertices that define the loop's base case
+ * @param initialState the collection of vertices that define the loop's base case
* @param <V> the input type
* @return a builder object
*/
diff --git a/keanu-project/src/main/java/io/improbable/keanu/plating/loop/LoopBuilder.java b/keanu-project/src/main/java/io/improbable/keanu/plating/loop/LoopBuilder.java
index <HASH>..<HASH> 100644
--- a/keanu-project/src/main/java/io/improbable/keanu/plating/loop/LoopBuilder.java
+++ b/keanu-project/src/main/java/io/improbable/keanu/plating/loop/LoopBuilder.java
@@ -39,7 +39,7 @@ public class LoopBuilder {
/**
* An optional method to override the default value
*
- * @param maxCount - the max number of times the loop can run
+ * @param maxCount the max number of times the loop can run
* @return self
*/
public LoopBuilder atMost(int maxCount) {
@@ -61,8 +61,8 @@ public class LoopBuilder {
/**
* An optional method to add custom mappings
*
- * @param inputLabel - the label assigned to the ProxyVertex in frame t
- * @param outputLabel - the label assigned to a Vertex in frame t-1 which will become the ProxyVertex's parent
+ * @param inputLabel the label assigned to the ProxyVertex in frame t
+ * @param outputLabel the label assigned to a Vertex in frame t-1 which will become the ProxyVertex's parent
* @return self
*/
public LoopBuilder mapping(VertexLabel inputLabel, VertexLabel outputLabel) {
@@ -73,7 +73,7 @@ public class LoopBuilder {
/**
* A mandatory method to specify the condition
*
- * @param conditionSupplier - a lambda that creates and returns a new BoolVertex
+ * @param conditionSupplier a lambda that creates and returns a new BoolVertex
* @return the next stage builder
*/
public LoopBuilder2 whilst(Supplier<BoolVertex> conditionSupplier) {
@@ -83,7 +83,7 @@ public class LoopBuilder {
/**
* A mandatory method to specify the condition
*
- * @param conditionFunction - a lambda that takes the current Plate and creates and returns a new BoolVertex
+ * @param conditionFunction a lambda that takes the current Plate and creates and returns a new BoolVertex
* @return the next stage builder
*/
public LoopBuilder2 whilst(Function<Plate, BoolVertex> conditionFunction) {
@@ -133,7 +133,7 @@ public class LoopBuilder {
/**
* A mandatory method to specify the iteration step
*
- * @param iterationFunction - a lambda that takes the Proxy input vertex
+ * @param iterationFunction a lambda that takes the Proxy input vertex
* and creates and returns a new output Vertex
* @return the fully constructed Loop object
*/
@@ -146,7 +146,7 @@ public class LoopBuilder {
/**
* A mandatory method to specify the iteration step
*
- * @param iterationFunction - a lambda that takes the current Plate and the Proxy input vertex
+ * @param iterationFunction a lambda that takes the current Plate and the Proxy input vertex
* and creates and returns a new output vertex
* @return the fully constructed Loop object
*/ | removed hyphens from javadoc | improbable-research_keanu | train |
68bb096b1d8048aa2570a2714060f1a61e501c23 | diff --git a/tests/src/XeroProviderTest.php b/tests/src/XeroProviderTest.php
index <HASH>..<HASH> 100644
--- a/tests/src/XeroProviderTest.php
+++ b/tests/src/XeroProviderTest.php
@@ -2,6 +2,7 @@
namespace Radcliffe\Tests\Xero;
+use Prophecy\Argument;
use Radcliffe\Xero\XeroProvider;
use PHPUnit\Framework\TestCase;
@@ -86,4 +87,53 @@ class XeroProviderTest extends TestCase
[['accounting.transactions.read', 'accounting.reports.read'], 'custom'],
];
}
+
+ /**
+ * Asserts that response errors are mapped correctly.
+ *
+ * @param array $data
+ * The method parameter.
+ * @param string $expected
+ * The expect error string.
+ *
+ * @dataProvider provideResponseData
+ *
+ * @throws \League\OAuth2\Client\Provider\Exception\IdentityProviderException
+ */
+ public function testGetResponseMessage(array $data, string $expected)
+ {
+ $json = json_encode($data);
+ $this->expectExceptionMessage($expected);
+
+ $requestProphet = $this->prophesize('\Psr\Http\Message\RequestInterface');
+ $responseProphet = $this->prophesize('\Psr\Http\Message\ResponseInterface');
+ $responseProphet->getStatusCode()->willReturn(400);
+ $responseProphet->getBody()->willReturn($json);
+ $responseProphet
+ ->getHeader(Argument::containingString('content-type'))
+ ->willReturn('application/json');
+ $guzzleProphet = $this->prophesize('\GuzzleHttp\ClientInterface');
+ $guzzleProphet->send(Argument::any())->willReturn($responseProphet->reveal());
+
+ $provider = new XeroProvider();
+ $provider->setHttpClient($guzzleProphet->reveal());
+ $provider->getParsedResponse($requestProphet->reveal());
+ }
+
+ /**
+ * Provides test arguments for ::testGetResponseMessage().
+ *
+ * @return array
+ */
+ public function provideResponseData()
+ {
+ return [
+ 'invalid client' => [['error' => 'invalid_client'], 'Invalid client credentials'],
+ 'unspported grant type' => [['error' => 'unsupported_grant_type'], 'Missing required grant_type parameter'],
+ 'invalid grant' => [['error' => 'invalid_grant'], 'Invalid, expired, or already used code'],
+ 'unauthorized' => [['error' => 'unauthorized_client'], 'Invalid callback URI'],
+ 'unknown' => [['error' => 'unknown'], 'Unknown error code unknown'],
+ 'when null' => [[], 'An unknown error occurred with this request'],
+ ];
+ }
} | Adds test coverage for #<I> | mradcliffe_xeroclient | train |
07ba40a53b886d633c58c28f9277d4784a6ed7c5 | diff --git a/code/SEO_FacebookDomainInsights_Member_DataExtension.php b/code/SEO_FacebookDomainInsights_Member_DataExtension.php
index <HASH>..<HASH> 100644
--- a/code/SEO_FacebookDomainInsights_Member_DataExtension.php
+++ b/code/SEO_FacebookDomainInsights_Member_DataExtension.php
@@ -1,7 +1,7 @@
<?php
/**
- * @todo Adds Facebook Domain Insight data to Member.
+ * Adds Facebook Domain Insight data to Member.
*
* @package silverstripe-seo
* @subpackage facebook-domain-insights
diff --git a/code/SEO_FacebookDomainInsights_SiteConfig_DataExtension.php b/code/SEO_FacebookDomainInsights_SiteConfig_DataExtension.php
index <HASH>..<HASH> 100644
--- a/code/SEO_FacebookDomainInsights_SiteConfig_DataExtension.php
+++ b/code/SEO_FacebookDomainInsights_SiteConfig_DataExtension.php
@@ -1,7 +1,7 @@
<?php
/**
- * @todo Adds Facebook Domain Insights settings to SiteConfig.
+ * Adds Facebook Domain Insights settings to SiteConfig.
*
* @package silverstripe-seo
* @subpackage facebook-domain-insights | More code spit & polish | Graphiques-Digitale_silverstripe-seo-facebook-domain-insights | train |
Subsets and Splits