hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
4e89b6d25ce46c341c7e0c829f2d61e42a42f852 | diff --git a/src/YouTube.js b/src/YouTube.js
index <HASH>..<HASH> 100644
--- a/src/YouTube.js
+++ b/src/YouTube.js
@@ -99,7 +99,7 @@ class YouTube extends React.Component {
}
onReset() {
- if (this._internalPlayer) {
+ if (this._internalPlayer && this._internalPlayer.getIframe()) {
this._internalPlayer.removeEventListener('onReady', this._playerReadyHandle);
this._internalPlayer.removeEventListener('onError', this._playerErrorHandle);
this._internalPlayer.removeEventListener('onStateChange', this._stateChangeHandle); | Only destroy the youtube player if it is initialized
If you are very quickly creating and destroying YouTube components, the iframe might not be initialized by the time we want to destroy it, resulting in this error: `Uncaught TypeError: this._internalPlayer.removeEventListener is not a function`.
This doesn't seem perfect, but I couldn't think of a better check. | troybetz_react-youtube | train |
4144207699997d23ecb69fc733bd4fbf1c28a329 | diff --git a/src/ContentfulService.php b/src/ContentfulService.php
index <HASH>..<HASH> 100644
--- a/src/ContentfulService.php
+++ b/src/ContentfulService.php
@@ -18,12 +18,13 @@ class ContentfulService implements ContentfulServiceContract
/**
- * @param string $apiKey
- * @param string $spaceId
+ * ContentfulService constructor.
+ *
+ * @param Client $client
*/
- public function __construct($apiKey, $spaceId)
+ public function __construct(Client $client)
{
- $this->client = new Client($apiKey, $spaceId);
+ $this->client = $client;
}
diff --git a/src/ContentfulServiceProvider.php b/src/ContentfulServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/ContentfulServiceProvider.php
+++ b/src/ContentfulServiceProvider.php
@@ -2,6 +2,7 @@
namespace Nord\Lumen\Contentful;
+use Contentful\Delivery\Client;
use Illuminate\Contracts\Config\Repository as ConfigRepository;
use Illuminate\Contracts\Container\Container;
use Illuminate\Support\ServiceProvider;
@@ -33,8 +34,10 @@ class ContentfulServiceProvider extends ServiceProvider
*/
protected function registerBindings(Container $container, ConfigRepository $config)
{
- $container->singleton(ContentfulServiceContract::class, function() use ($config) {
- return new ContentfulService($config->get('contentful.api_key'), $config->get('contentful.space_id'));
+ $container->singleton(ContentfulServiceContract::class, function () use ($config) {
+ $client = new Client($config->get('contentful.api_key'), $config->get('contentful.space_id'));
+
+ return new ContentfulService($client);
});
$container->alias(ContentfulServiceContract::class, ContentfulService::class); | Inject the Client into ContentfulService
This makes it mockable in projects that use this library | digiaonline_lumen-contentful | train |
60b50d1b6012aed45628ff6e1d3cb360143a4168 | diff --git a/controller/src/main/java/org/jboss/as/controller/AbstractAddStepHandler.java b/controller/src/main/java/org/jboss/as/controller/AbstractAddStepHandler.java
index <HASH>..<HASH> 100644
--- a/controller/src/main/java/org/jboss/as/controller/AbstractAddStepHandler.java
+++ b/controller/src/main/java/org/jboss/as/controller/AbstractAddStepHandler.java
@@ -271,9 +271,9 @@ public class AbstractAddStepHandler implements OperationStepHandler {
for (RuntimeCapability capability : capabilitySet) {
if (capability.isDynamicallyNamed()) {
- context.registerCapability(capability.fromBaseCapability(context.getCurrentAddressValue()), null);
+ context.registerCapability(capability.fromBaseCapability(context.getCurrentAddressValue()));
} else {
- context.registerCapability(capability, null);
+ context.registerCapability(capability);
}
}
diff --git a/controller/src/main/java/org/jboss/as/controller/ServiceRemoveStepHandler.java b/controller/src/main/java/org/jboss/as/controller/ServiceRemoveStepHandler.java
index <HASH>..<HASH> 100644
--- a/controller/src/main/java/org/jboss/as/controller/ServiceRemoveStepHandler.java
+++ b/controller/src/main/java/org/jboss/as/controller/ServiceRemoveStepHandler.java
@@ -22,6 +22,10 @@
package org.jboss.as.controller;
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+import java.util.Set;
+
import org.jboss.as.controller.capability.RuntimeCapability;
import org.jboss.dmr.ModelNode;
import org.jboss.msc.service.ServiceName;
@@ -39,7 +43,7 @@ public class ServiceRemoveStepHandler extends AbstractRemoveStepHandler {
private static final RuntimeCapability[] NO_CAPABILITIES = new RuntimeCapability[0];
private final ServiceName baseServiceName;
private final AbstractAddStepHandler addOperation;
- private final RuntimeCapability[] unavailableCapabilities;
+ private final Set<RuntimeCapability> unavailableCapabilities;
/**
* Creates a {@code ServiceRemoveStepHandler}.
@@ -52,7 +56,7 @@ public class ServiceRemoveStepHandler extends AbstractRemoveStepHandler {
super(unavailableCapabilities);
this.baseServiceName = baseServiceName;
this.addOperation = addOperation;
- this.unavailableCapabilities = unavailableCapabilities;
+ this.unavailableCapabilities = new LinkedHashSet<>(Arrays.asList(unavailableCapabilities));
}
/**
@@ -103,7 +107,9 @@ public class ServiceRemoveStepHandler extends AbstractRemoveStepHandler {
context.removeService(serviceName(name, address));
}
- for (RuntimeCapability<?> capability : unavailableCapabilities) {
+ Set<RuntimeCapability> capabilitySet = unavailableCapabilities.isEmpty() ? context.getResourceRegistration().getCapabilities() : unavailableCapabilities;
+
+ for (RuntimeCapability<?> capability : capabilitySet) {
if (capability.getCapabilityServiceValueType() != null) {
ServiceName sname;
if (capability.isDynamicallyNamed()) { | Minor api improvment for working with capabilites
- takes capabilites defined on resource into account | wildfly_wildfly-core | train |
5dd02b1cba0e99e95e7c25560ff4c0999dc15489 | diff --git a/danceschool/core/models.py b/danceschool/core/models.py
index <HASH>..<HASH> 100644
--- a/danceschool/core/models.py
+++ b/danceschool/core/models.py
@@ -658,7 +658,7 @@ class Event(EmailRecipientMixin, PolymorphicModel):
@property
def firstOccurrenceTime(self):
if self.firstOccurrence:
- return self.firstOccurrence.startTime
+ return timezone.localtime(self.firstOccurrence.startTime)
return None
firstOccurrenceTime.fget.short_description = _('Begins')
@@ -670,7 +670,7 @@ class Event(EmailRecipientMixin, PolymorphicModel):
@property
def nextOccurrenceTime(self):
if self.nextOccurrence:
- return self.nextOccurrence.startTime
+ return timezone.localtime(self.nextOccurrence.startTime)
return None
nextOccurrenceTime.fget.short_description = _('Next occurs')
@@ -682,7 +682,7 @@ class Event(EmailRecipientMixin, PolymorphicModel):
@property
def lastOccurrenceTime(self):
if self.lastOccurrence:
- return self.lastOccurrence.endTime
+ return timezone.localtime(self.lastOccurrence.endTime)
return None
lastOccurrenceTime.fget.short_description = _('Ends') | Use localized timezones for determining day of week, etc. | django-danceschool_django-danceschool | train |
59d5019204be37958fd23d7cceefe2b59ca917bd | diff --git a/java/server/test/org/openqa/grid/internal/utils/SelfRegisteringRemoteTest.java b/java/server/test/org/openqa/grid/internal/utils/SelfRegisteringRemoteTest.java
index <HASH>..<HASH> 100644
--- a/java/server/test/org/openqa/grid/internal/utils/SelfRegisteringRemoteTest.java
+++ b/java/server/test/org/openqa/grid/internal/utils/SelfRegisteringRemoteTest.java
@@ -87,7 +87,7 @@ public class SelfRegisteringRemoteTest {
// there should be three servlets on the remote's map -- The resource servlet, the
// help servlet, and the one we added above.
- assertEquals(3, remote.getNodeServlets().size());
+ assertEquals(5, remote.getNodeServlets().size());
assertEquals(ResourceServlet.class, remote.getNodeServlets().get("/resources/*"));
assertEquals(DisplayHelpServlet.class,
remote.getNodeServlets().get("/extra/DisplayHelpServlet/*")); | Fixing a grid test: two new status servlets were added recently | SeleniumHQ_selenium | train |
15e46334b5152e7b7d135cf24daefc360b193002 | diff --git a/test_xbee.py b/test_xbee.py
index <HASH>..<HASH> 100755
--- a/test_xbee.py
+++ b/test_xbee.py
@@ -270,5 +270,32 @@ class TestReadFromDevice(unittest.TestCase):
data = xbee.wait_for_frame()
self.assertEqual(data, '\x05')
+class TestNotImplementedFeatures(unittest.TestCase):
+ """
+ In order to properly use the XBee class for most situations,
+ it must be subclassed with the proper attributes definined. If
+ this is not the case, then a NotImplemented exception should be
+ raised as appropriate.
+ """
+
+ def setUp(self):
+ """
+ Set up a base class XBee object which does not have api_commands
+ or api_responses defined
+ """
+ self.xbee = XBee(None)
+
+ def test_build_command(self):
+ """
+ build_command should raise NotImplemented
+ """
+ self.assertRaises(NotImplementedError, self.xbee.build_command, "at")
+
+ def test_split_response(self):
+ """
+ split_command should raise NotImplemented
+ """
+ self.assertRaises(NotImplementedError, self.xbee.split_response, "\00")
+
if __name__ == '__main__':
unittest.main()
diff --git a/xbee.py b/xbee.py
index <HASH>..<HASH> 100644
--- a/xbee.py
+++ b/xbee.py
@@ -183,8 +183,11 @@ class XBee(object):
Each field will be written out in the order they are defined
in the command definition.
"""
-
- cmd_spec = self.api_commands[cmd]
+ try:
+ cmd_spec = self.api_commands[cmd]
+ except AttributeError:
+ raise NotImplementedError("API command specifications could not be found; use a derived class which defines 'api_commands'.")
+
packet = ''
for field in cmd_spec:
@@ -238,6 +241,8 @@ class XBee(object):
packet_id = data[0]
try:
packet = self.api_responses[packet_id]
+ except AttributeError:
+ raise NotImplementedError("API response specifications could not be found; use a derived class which defines 'api_responses'.")
except KeyError:
raise KeyError(
"Unrecognized response packet with id byte %s" | Added tests and implementation to ensure that NotImplementedError is raised if the base XBee class is instantiated and mistakenly used as a derived, device-specific class | niolabs_python-xbee | train |
86cdd04671d53be33cc1a6d180662466b457a40f | diff --git a/src/main/java/com/xtremelabs/robolectric/Robolectric.java b/src/main/java/com/xtremelabs/robolectric/Robolectric.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/xtremelabs/robolectric/Robolectric.java
+++ b/src/main/java/com/xtremelabs/robolectric/Robolectric.java
@@ -3,6 +3,7 @@ package com.xtremelabs.robolectric;
import android.accounts.AccountManager;
import android.animation.Animator;
import android.animation.AnimatorSet;
+import android.animation.ObjectAnimator;
import android.app.*;
import android.appwidget.AppWidgetHost;
import android.appwidget.AppWidgetHostView;
@@ -15,9 +16,9 @@ import android.content.res.AssetManager;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.content.res.TypedArray;
+import android.database.ContentObserver;
import android.database.CursorWrapper;
import android.database.MergeCursor;
-import android.database.ContentObserver;
import android.database.sqlite.*;
import android.graphics.*;
import android.graphics.drawable.*;
@@ -511,7 +512,7 @@ public class Robolectric {
@SuppressWarnings("rawtypes")
public static ShadowAsyncTask shadowOf(AsyncTask instance){
- return (ShadowAsyncTask) Robolectric.shadowOf_( instance );
+ return (ShadowAsyncTask) Robolectric.shadowOf_(instance);
}
public static ShadowAudioManager shadowOf(AudioManager instance) {
@@ -842,6 +843,10 @@ public class Robolectric {
return (ShadowNotificationManager) Robolectric.shadowOf_(other);
}
+ public static ShadowObjectAnimator shadowOf(ObjectAnimator instance) {
+ return (ShadowObjectAnimator) shadowOf_(instance);
+ }
+
public static ShadowPagerAdapter shadowOf(PagerAdapter instance) {
return (ShadowPagerAdapter) shadowOf_(instance);
}
diff --git a/src/main/java/com/xtremelabs/robolectric/shadows/ShadowObjectAnimator.java b/src/main/java/com/xtremelabs/robolectric/shadows/ShadowObjectAnimator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/xtremelabs/robolectric/shadows/ShadowObjectAnimator.java
+++ b/src/main/java/com/xtremelabs/robolectric/shadows/ShadowObjectAnimator.java
@@ -29,6 +29,7 @@ public class ShadowObjectAnimator extends ShadowValueAnimator {
private Class<?> animationType;
private static final Map<Object, Map<String, ObjectAnimator>> mapsForAnimationTargets = new HashMap<Object, Map<String, ObjectAnimator>>();
private boolean isRunning;
+ private boolean cancelWasCalled;
@Implementation
public static ObjectAnimator ofFloat(Object target, String propertyName, float... values) {
@@ -126,6 +127,19 @@ public class ShadowObjectAnimator extends ShadowValueAnimator {
return isRunning;
}
+ @Implementation
+ public void cancel() {
+ cancelWasCalled = true;
+ }
+
+ public boolean cancelWasCalled() {
+ return cancelWasCalled;
+ }
+
+ public void resetCancelWasCalled() {
+ cancelWasCalled = false;
+ }
+
public static Map<String, ObjectAnimator> getAnimatorsFor(Object target) {
return getAnimatorMapFor(target);
}
diff --git a/src/main/java/com/xtremelabs/robolectric/shadows/ShadowValueAnimator.java b/src/main/java/com/xtremelabs/robolectric/shadows/ShadowValueAnimator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/xtremelabs/robolectric/shadows/ShadowValueAnimator.java
+++ b/src/main/java/com/xtremelabs/robolectric/shadows/ShadowValueAnimator.java
@@ -23,7 +23,7 @@ public class ShadowValueAnimator extends ShadowAnimator {
public TimeInterpolator getInterpolator() {
return interpolator;
}
-
+
@Implementation
public static ValueAnimator ofInt (int... values){
return new ValueAnimator(); | Added code to ShadowObjectAnimator to detect when "cancel()" gets called. | robolectric_robolectric | train |
23a05f73bd3cdf780ef90580dc6eef2242fc6578 | diff --git a/ginga/colors.py b/ginga/colors.py
index <HASH>..<HASH> 100644
--- a/ginga/colors.py
+++ b/ginga/colors.py
@@ -755,7 +755,12 @@ def recalc_color_list():
color_list.sort()
def lookup_color(name, format='tuple'):
- color = color_dict[name]
+ color = None
+ try:
+ color = color_dict[name]
+ except KeyError:
+ raise KeyError("%s color does not exist in color_dict"%(name))
+
if format == 'tuple':
return color
elif format == 'hash': | Added the exception handling login in lookup_color() | ejeschke_ginga | train |
3893b05fda6e442ab7c98ce4ecedda15308d1715 | diff --git a/server/src/com/thoughtworks/go/server/security/OauthAuthenticationFilter.java b/server/src/com/thoughtworks/go/server/security/OauthAuthenticationFilter.java
index <HASH>..<HASH> 100644
--- a/server/src/com/thoughtworks/go/server/security/OauthAuthenticationFilter.java
+++ b/server/src/com/thoughtworks/go/server/security/OauthAuthenticationFilter.java
@@ -44,7 +44,9 @@ public class OauthAuthenticationFilter extends SpringSecurityFilter {
String header = request.getHeader(AUTHORIZATION);//Token token="ACCESS_TOKEN"
if (header != null) {
- logger.debug("Oauth authorization header: " + header);
+ if (logger.isDebugEnabled()) {
+ logger.debug("Oauth authorization header: " + header);
+ }
Matcher matcher = OAUTH_TOKEN_PATTERN.matcher(header);
if (matcher.matches()) {
String token = matcher.group(1);
@@ -53,7 +55,9 @@ public class OauthAuthenticationFilter extends SpringSecurityFilter {
Authentication authResult = authenticationManager.authenticate(authenticationToken);
SecurityContextHolder.getContext().setAuthentication(authResult);
} catch (AuthenticationException e) {
- logger.debug("Oauth authentication request for token: " + token + " failed: " + e.toString());
+ if (logger.isDebugEnabled()) {
+ logger.debug("Oauth authentication request for token: " + token, e);
+ }
SecurityContextHolder.getContext().setAuthentication(null);
}
} | Enclosing debug logs with isDebugEnabled to prevent String object construction when unnecessary. | gocd_gocd | train |
65d30618878fe90dbc4e1e85b15fd31cd7bb2b0c | diff --git a/test/format_test.rb b/test/format_test.rb
index <HASH>..<HASH> 100644
--- a/test/format_test.rb
+++ b/test/format_test.rb
@@ -60,6 +60,9 @@ class FormatTest < Test::Unit::TestCase
format = Format.new(one_channel, 16, 44100)
assert_equal(2, format.block_align)
+
+ format = Format.new(one_channel, 32, 44100)
+ assert_equal(4, format.block_align)
end
[2, :stereo].each do |two_channels|
@@ -68,6 +71,9 @@ class FormatTest < Test::Unit::TestCase
format = Format.new(two_channels, 16, 44100)
assert_equal(4, format.block_align)
+
+ format = Format.new(two_channels, 32, 44100)
+ assert_equal(8, format.block_align)
end
end | Adding more tests for Format.block_align | jstrait_wavefile | train |
a8728246dbda35c46612bea1ccd3ad680c377bbc | diff --git a/cake/tests/cases/libs/model/db_acl.test.php b/cake/tests/cases/libs/model/db_acl.test.php
index <HASH>..<HASH> 100644
--- a/cake/tests/cases/libs/model/db_acl.test.php
+++ b/cake/tests/cases/libs/model/db_acl.test.php
@@ -147,12 +147,10 @@ class AclNodeTest extends CakeTestCase {
$this->assertEqual($result, $expected);
$result = Set::extract($Aco->node('Controller2/action3'), '{n}.DbAcoTest.id');
- $expected = array(6, 1);
- $this->assertEqual($result, $expected);
+ $this->assertFalse($result);
$result = Set::extract($Aco->node('Controller2/action3/record5'), '{n}.DbAcoTest.id');
- $expected = array(6, 1);
- $this->assertEqual($result, $expected);
+ $this->assertFalse($result);
}
function testNodeAliasParenting() {
@@ -175,5 +173,4 @@ class AclNodeTest extends CakeTestCase {
$this->assertEqual($result, $expected);
}
}
-
?>
\ No newline at end of file | Updated the expectations of the two failing tests. Both pass now. Fixes #<I>
git-svn-id: <URL> | cakephp_cakephp | train |
1f2b784735bf9b635d4d4f7d146263a906c74c87 | diff --git a/lib/ping-promise.js b/lib/ping-promise.js
index <HASH>..<HASH> 100644
--- a/lib/ping-promise.js
+++ b/lib/ping-promise.js
@@ -83,7 +83,8 @@ function probe(addr, config) {
if (p.match(/^win/)) {
// this is my solution on Chinese Windows8 64bit
result = false;
- for (var line in lines) {
+ for (var i = 0; i < lines.length; i++) {
+ var line = lines[i];
if (line.search(/TTL=[0-9]+/i) > 0) {
result = true;
break; | refs #<I>: Fix a bug cause alive in window is always false | danielzzz_node-ping | train |
d638bf5b67f1afe9c23b3cd32f2e9d7ae7e17b78 | diff --git a/spyderlib/widgets/sourcecode/syntaxhighlighters.py b/spyderlib/widgets/sourcecode/syntaxhighlighters.py
index <HASH>..<HASH> 100644
--- a/spyderlib/widgets/sourcecode/syntaxhighlighters.py
+++ b/spyderlib/widgets/sourcecode/syntaxhighlighters.py
@@ -331,8 +331,10 @@ class PythonSH(BaseSH):
self.setFormat(start, end-start, self.formats[key])
if key == "comment":
if text.lstrip().startswith(CELL_SEPARATORS):
+ title = to_text_string(text).strip()
+ title = title.lstrip("# ")
oedata = OutlineExplorerData()
- oedata.text = to_text_string(text).strip()
+ oedata.text = title
oedata.fold_level = start
oedata.def_type = OutlineExplorerData.CELL
oedata.def_name = text.strip() | Remove comment marker from the source outline for cells | spyder-ide_spyder | train |
f0d72477b3c92481440ee05c53a4fb1fd89614f8 | diff --git a/rootpy/tree/chain.py b/rootpy/tree/chain.py
index <HASH>..<HASH> 100644
--- a/rootpy/tree/chain.py
+++ b/rootpy/tree/chain.py
@@ -4,6 +4,7 @@ import time
from ..io import open as ropen, DoesNotExist
from .filtering import EventFilterList
from .. import rootpy_globals
+from ..util.extras import humanize_bytes
from .. import log; log = log[__name__]
@@ -205,6 +206,9 @@ class _BaseTreeChain(object):
self._buffer = self._tree._buffer
if self._use_cache:
# enable TTreeCache for this tree
+ log.info(("enabling a %s TTreeCache for the current tree "
+ "(%d learning entries)") %
+ (humanize_bytes(self._cache_size), self._learn_entries))
self._tree.SetCacheSize(self._cache_size)
self._tree.SetCacheLearnEntries(self._learn_entries)
self._tree.read_branches_on_demand(self._read_branches_on_demand)
diff --git a/rootpy/util/extras.py b/rootpy/util/extras.py
index <HASH>..<HASH> 100644
--- a/rootpy/util/extras.py
+++ b/rootpy/util/extras.py
@@ -16,3 +16,34 @@ def iter_ROOT_classes():
yield getattr(QROOT, class_name)
except AttributeError:
pass
+
+
+def humanize_bytes(bytes, precision=1):
+
+ abbrevs = (
+ (1<<50L, 'PB'),
+ (1<<40L, 'TB'),
+ (1<<30L, 'GB'),
+ (1<<20L, 'MB'),
+ (1<<10L, 'kB'),
+ (1, 'bytes')
+ )
+ if bytes == 1:
+ return '1 byte'
+ for factor, suffix in abbrevs:
+ if bytes >= factor:
+ break
+ return '%.*f %s' % (precision, bytes / factor, suffix)
+
+
+def print_table(table, sep = ' '):
+
+ # Reorganize data by columns
+ cols = zip(*table)
+ # Compute column widths by taking maximum length of values per column
+ col_widths = [ max(len(value) for value in col) for col in cols ]
+ # Create a suitable format string
+ format = sep.join(['%%-%ds' % width for width in col_widths ])
+ # Print each row using the computed format
+ for row in table:
+ print format % tuple(row)
diff --git a/scripts/rootpy b/scripts/rootpy
index <HASH>..<HASH> 100755
--- a/scripts/rootpy
+++ b/scripts/rootpy
@@ -2,6 +2,7 @@
from __future__ import division
from rootpy.extern import argparse
+from rootpy.util.extras import humanize_bytes, print_table
import rootpy
import ROOT
import fnmatch
@@ -44,35 +45,6 @@ def make_chain(args, **kwargs):
chain.Add(filename)
return chain
-def humanize_bytes(bytes, precision=1):
-
- abbrevs = (
- (1<<50L, 'PB'),
- (1<<40L, 'TB'),
- (1<<30L, 'GB'),
- (1<<20L, 'MB'),
- (1<<10L, 'kB'),
- (1, 'bytes')
- )
- if bytes == 1:
- return '1 byte'
- for factor, suffix in abbrevs:
- if bytes >= factor:
- break
- return '%.*f %s' % (precision, bytes / factor, suffix)
-
-def print_table(table, sep = ' '):
-
- # Reorganize data by columns
- cols = zip(*table)
- # Compute column widths by taking maximum length of values per column
- col_widths = [ max(len(value) for value in col) for col in cols ]
- # Create a suitable format string
- format = sep.join(['%%-%ds' % width for width in col_widths ])
- # Print each row using the computed format
- for row in table:
- print format % tuple(row)
-
class formatter_class(argparse.ArgumentDefaultsHelpFormatter,
argparse.RawTextHelpFormatter): | move humanize_bytes and print_table to util.extras and log info if TTreeCache is enabled | rootpy_rootpy | train |
2455fa5be42f581c5de398b4ad0e86588d71ca32 | diff --git a/commands/issue.go b/commands/issue.go
index <HASH>..<HASH> 100644
--- a/commands/issue.go
+++ b/commands/issue.go
@@ -135,12 +135,14 @@ func listIssues(cmd *Command, args *Args) {
os.Exit(0)
}
-func formatIssue(issue github.Issue, format string, colorize bool) string {
- var assigneeLogin string
- if a := issue.Assignee; a != nil {
- assigneeLogin = a.Login
+func maybeUserLogin(u *github.User) string {
+ if u == nil {
+ return ""
}
+ return u.Login
+}
+func formatIssue(issue github.Issue, format string, colorize bool) string {
var stateColorSwitch string
if colorize {
issueColor := 32
@@ -177,8 +179,8 @@ func formatIssue(issue github.Issue, format string, colorize bool) string {
"t": issue.Title,
"l": strings.Join(labelStrings, " "),
"b": issue.Body,
- "u": issue.User.Login,
- "a": assigneeLogin,
+ "u": maybeUserLogin(issue.User),
+ "a": maybeUserLogin(issue.Assignee),
}
return ui.Expand(format, placeholders, colorize)
diff --git a/commands/issue_test.go b/commands/issue_test.go
index <HASH>..<HASH> 100644
--- a/commands/issue_test.go
+++ b/commands/issue_test.go
@@ -45,7 +45,6 @@ func TestFormatIssue(t *testing.T) {
Number: 42,
Title: "Just an Issue",
State: "closed",
- User: &github.User{Login: "pcorpet"},
},
format: format,
colorize: true,
@@ -57,7 +56,6 @@ func TestFormatIssue(t *testing.T) {
Number: 42,
Title: "An issue with labels",
State: "open",
- User: &github.User{Login: "pcorpet"},
Labels: []github.IssueLabel{
{Name: "bug", Color: "800000"},
{Name: "reproduced", Color: "55ff55"},
@@ -73,7 +71,6 @@ func TestFormatIssue(t *testing.T) {
Number: 42,
Title: "Just an Issue",
State: "open",
- User: &github.User{Login: "pcorpet"},
},
format: format,
colorize: false,
@@ -85,7 +82,6 @@ func TestFormatIssue(t *testing.T) {
Number: 42,
Title: "An issue with labels",
State: "open",
- User: &github.User{Login: "pcorpet"},
Labels: []github.IssueLabel{
{Name: "bug", Color: "880000"},
{Name: "reproduced", Color: "55ff55"}, | Account for nil User as well. | github_hub | train |
fa020a5ec1561896981058e31a704cebdd0f5035 | diff --git a/src/Utils.php b/src/Utils.php
index <HASH>..<HASH> 100644
--- a/src/Utils.php
+++ b/src/Utils.php
@@ -16,17 +16,6 @@ class Utils
}
/**
- * Does File Contain
- *
- * Determine if the file contains a string
- * @return boolean
- */
- public static function doesFileContain($filename, $str)
- {
- return strpos(file_get_contents($filename), $str) !== false;
- }
-
- /**
* Is Array Indexed
*
* Determine if the array is indexed | Remove unused doesFileContain | soberwp_controller | train |
efe53aab8a008a7a34b650b22695d38c12c3f9e7 | diff --git a/cf/net/paginated_resources.go b/cf/net/paginated_resources.go
index <HASH>..<HASH> 100644
--- a/cf/net/paginated_resources.go
+++ b/cf/net/paginated_resources.go
@@ -17,9 +17,9 @@ type PaginatedResources struct {
resourceType reflect.Type
}
-func (this PaginatedResources) Resources() ([]interface{}, error) {
- slicePtr := reflect.New(reflect.SliceOf(this.resourceType))
- err := json.Unmarshal([]byte(this.ResourcesBytes), slicePtr.Interface())
+func (pr PaginatedResources) Resources() ([]interface{}, error) {
+ slicePtr := reflect.New(reflect.SliceOf(pr.resourceType))
+ err := json.Unmarshal([]byte(pr.ResourcesBytes), slicePtr.Interface())
slice := reflect.Indirect(slicePtr)
contents := make([]interface{}, 0, slice.Len()) | Don't use 'this' as a receiver name | cloudfoundry_cli | train |
59144eef21ba873a058c3b046f3068b8029671a7 | diff --git a/src/terra/Command/Environment/EnvironmentProxyEnable.php b/src/terra/Command/Environment/EnvironmentProxyEnable.php
index <HASH>..<HASH> 100644
--- a/src/terra/Command/Environment/EnvironmentProxyEnable.php
+++ b/src/terra/Command/Environment/EnvironmentProxyEnable.php
@@ -20,7 +20,7 @@ class EnvironmentProxyEnable extends Command
protected function execute(InputInterface $input, OutputInterface $output)
{
$output->writeln('Hello Terra!');
- $cmd = 'docker run -d -p 80:80 -v /var/run/docker.sock:/tmp/docker.sock:ro jwilder/nginx-proxy';
+ $cmd = 'docker run -d -p 80:80 -v /var/run/docker.sock:/tmp/docker.sock:ro --security-opt label:disable jwilder/nginx-proxy';
$process = new Process($cmd);
$process->setTimeout(null); | Fixing URL proxy being blocked when using SELinux by adding a label to skip security. | terra-ops_terra-cli | train |
4b78574d31b5f19fa269a28a8e18c3a14ff6f54c | diff --git a/modernrpc/handlers/jsonhandler.py b/modernrpc/handlers/jsonhandler.py
index <HASH>..<HASH> 100644
--- a/modernrpc/handlers/jsonhandler.py
+++ b/modernrpc/handlers/jsonhandler.py
@@ -55,10 +55,8 @@ class JSONRPCHandler(RPCHandler):
if not isinstance(body, dict):
raise RPCInvalidRequest('Payload object must be a struct')
- if 'id' in body:
- self.request_id = body['id']
- else:
- raise RPCInvalidRequest('Missing parameter "id"')
+ # Store current request id, or None if request is a notification
+ self.request_id = body.get('id')
if 'jsonrpc' not in body:
raise RPCInvalidRequest('Missing parameter "jsonrpc"')
@@ -70,8 +68,14 @@ class JSONRPCHandler(RPCHandler):
return body['method'], body.get('params', [])
- @staticmethod
- def json_http_response(data, http_response_cls=HttpResponse):
+ def is_notification_request(self):
+ return self.request_id is None
+
+ def json_http_response(self, data, http_response_cls=HttpResponse):
+
+ if self.is_notification_request():
+ return http_response_cls()
+
response = http_response_cls(data)
response['Content-Type'] = 'application/json'
return response | JSON-RPC: allow unspecified request id
This effectively allows "notification" request, according to the standard | alorence_django-modern-rpc | train |
3db902ea2d98c5294f0f7ea203af8cd6a463a16a | diff --git a/parfait-spring/src/test/java/com/custardsource/parfait/spring/SelfStartingMonitoringViewTest.java b/parfait-spring/src/test/java/com/custardsource/parfait/spring/SelfStartingMonitoringViewTest.java
index <HASH>..<HASH> 100644
--- a/parfait-spring/src/test/java/com/custardsource/parfait/spring/SelfStartingMonitoringViewTest.java
+++ b/parfait-spring/src/test/java/com/custardsource/parfait/spring/SelfStartingMonitoringViewTest.java
@@ -9,6 +9,8 @@ import com.custardsource.parfait.dxm.InMemoryByteBufferFactory;
import com.custardsource.parfait.dxm.PcpMmvWriter;
import com.custardsource.parfait.dxm.PcpWriter;
import com.custardsource.parfait.pcp.PcpMonitorBridge;
+import com.google.common.base.Charsets;
+import com.google.common.primitives.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
@@ -101,13 +103,10 @@ public class SelfStartingMonitoringViewTest {
}
/**
- * TODO this probably isn't the best/most accurate way to assert these conditions, but it does kinda work..
- *
- * use Guava's Bytes.indexOf(byte[], byte[]) and cast the expectedString to the proper Charset for proper comparison
- * because this really only works because the PCP_CHARSET is currently US_ASCII
+ * we cast the string to US-ASCII charset because that's what PCP uses and then find it in the ByteBuffer array
+ * to ensure it's there.
*/
- private void assertBufferContainsExpectedStrings(ByteBuffer buffer, final CharSequence expectedString) {
- final String stringVersion = new String(buffer.array(), PcpMmvWriter.PCP_CHARSET);
- assertTrue(stringVersion.contains(expectedString));
+ private void assertBufferContainsExpectedStrings(ByteBuffer buffer, final String expectedString) {
+ assertTrue(Bytes.indexOf(buffer.array(), expectedString.getBytes(Charsets.US_ASCII)) >= 0);
}
} | Add more accurate byteBuffer scan with proper Charset handling based on Cowan's feedback | performancecopilot_parfait | train |
c4a2cd8d998cb4f827bdb9f3b0f450da2b13a16a | diff --git a/peyotl/api/taxomachine.py b/peyotl/api/taxomachine.py
index <HASH>..<HASH> 100644
--- a/peyotl/api/taxomachine.py
+++ b/peyotl/api/taxomachine.py
@@ -84,6 +84,9 @@ class _TaxomachineAPIWrapper(_WSWrapper):
raise ValueError('"{}" is not a valid context name'.format(context_name))
if not (isinstance(names, list) or isinstance(names, tuple)):
names = [names]
+ for name in names:
+ if len(name) < 2:
+ raise ValueError('Name "{}" found. Names must have at least 2 characters!'.format(name))
if id_list and len(id_list) != len(names):
raise ValueError('"id_list must be the same size as "names"')
data = {'names': names}
@@ -96,13 +99,13 @@ class _TaxomachineAPIWrapper(_WSWrapper):
data['contextName'] = context_name
else:
data['context_name'] = context_name
- data['do_approximate_matching'] = bool(fuzzy_matching)
- if id_list:
- data['ids'] = list(id_list)
- if include_deprecated:
- data['include_deprecated'] = True
- if include_dubious:
- data['include_dubious'] = True
+ data['do_approximate_matching'] = bool(fuzzy_matching)
+ if id_list:
+ data['ids'] = list(id_list)
+ if include_deprecated:
+ data['include_deprecated'] = True
+ if include_dubious:
+ data['include_dubious'] = True
return self.json_http_post(uri, data=anyjson.dumps(data))
def autocomplete(self, name, context_name=None, include_dubious=False): | bug fix in non-default search and catching bogus names before sending them to taxomachine | OpenTreeOfLife_peyotl | train |
5aa882639865b8750079aed2c6b2a381edb2de3f | diff --git a/tests/TalesStringTest.php b/tests/TalesStringTest.php
index <HASH>..<HASH> 100644
--- a/tests/TalesStringTest.php
+++ b/tests/TalesStringTest.php
@@ -30,8 +30,7 @@ class TalesStringTest extends PHPTAL_TestCase {
function testSubPathSimple()
{
$res = PHPTAL_Php_TalesInternal::string('hello $name how are you ?');
- $rgm = preg_match('/\'hello \'.*?\$ctx->name.*?\' how are you \?\'$/', $res);
- $this->assertEquals(1, $rgm);
+ $this->assertRegExp('/\'hello \'.*?\$ctx->name.*?\' how are you \?\'$/', $res);
}
function testSubPath()
@@ -43,8 +42,7 @@ class TalesStringTest extends PHPTAL_TestCase {
function testSubPathExtended()
{
$res = PHPTAL_Php_TalesInternal::string('hello ${user/name} how are you ?');
- $rgm = preg_match('/\'hello \'.*?\$ctx->user, \'name\'.*?\' how are you \?\'$/', $res);
- $this->assertEquals(1, $rgm);
+ $this->assertRegExp('/\'hello \'.*?\$ctx->user, \'name\'.*?\' how are you \?\'$/', $res);
}
function testQuote()
@@ -59,8 +57,8 @@ class TalesStringTest extends PHPTAL_TestCase {
function testDoubleVar()
{
$res = PHPTAL_Php_TalesInternal::string('hello $foo $bar');
- $this->assertEquals(1, preg_match('/ctx->foo/', $res), '$foo not interpolated');
- $this->assertEquals(1, preg_match('/ctx->bar/', $res), '$bar not interpolated');
+ $this->assertRegExp('/ctx->foo/', $res, '$foo not interpolated');
+ $this->assertRegExp('/ctx->bar/', $res, '$bar not interpolated');
}
function testDoubleDotComa() | Changed test to use PHPUnit's regex | phptal_PHPTAL | train |
8fd309a5ea551e68ef22b616ac043c58fea0bda0 | diff --git a/core/model/fieldtypes/HTMLText.php b/core/model/fieldtypes/HTMLText.php
index <HASH>..<HASH> 100755
--- a/core/model/fieldtypes/HTMLText.php
+++ b/core/model/fieldtypes/HTMLText.php
@@ -25,7 +25,17 @@ class HTMLText extends Text {
*/
function LimitCharacters($limit = 20, $add = "...") {
$value = trim(strip_tags($this->value));
- return (strlen($value) > $limit) ? substr($value, 0, $limit) . $add : $value;
+
+ // Content html text to plan text before sub string-ing
+ // to cutting off part of the html entity character
+ // For example, & because &am
+ $value = html_entity_decode($value);
+ $value = (strlen($value) > $limit) ? substr($value, 0, $limit) . $add : $value;
+
+ // Convert plan text back to html entities
+ $value = htmlentities($value);
+
+ return $value;
}
/**
diff --git a/tests/fieldtypes/HTMLTextTest.php b/tests/fieldtypes/HTMLTextTest.php
index <HASH>..<HASH> 100644
--- a/tests/fieldtypes/HTMLTextTest.php
+++ b/tests/fieldtypes/HTMLTextTest.php
@@ -11,7 +11,8 @@ class HTMLTextTest extends SapphireTest {
function testLimitCharacters() {
$cases = array(
'The little brown fox jumped over the lazy cow.' => 'The little brown fox...',
- '<p>This is some text in a paragraph.</p>' => 'This is some text in...'
+ '<p>This is some text in a paragraph.</p>' => 'This is some text in...',
+ 'This text contains & in it' => 'This text contains &...'
);
foreach($cases as $originalValue => $expectedValue) { | BUGFIX: In some circumstances, parts of the entities were cut off thus display the LimitCharacters-ed text incorrectly | silverstripe_silverstripe-framework | train |
b775ce22ae4e8f5ee77504e9d9fc82de92150647 | diff --git a/django_extensions/management/commands/runserver_plus.py b/django_extensions/management/commands/runserver_plus.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/commands/runserver_plus.py
+++ b/django_extensions/management/commands/runserver_plus.py
@@ -44,9 +44,7 @@ class Command(BaseCommand):
from django.core.handlers.wsgi import WSGIHandler
try:
from werkzeug import run_simple, DebuggedApplication
- except ImportError, e:
- raise e
- except:
+ except ImportError:
raise CommandError("Werkzeug is required to use runserver_plus. Please visit http://werkzeug.pocoo.org/download")
# usurp django's handler | fixed message when werkzeug is not found for runserver_plus | django-extensions_django-extensions | train |
0da2b30c8100891ba2357e5a8e4ea384c4d92886 | diff --git a/lib/encoding-length.js b/lib/encoding-length.js
index <HASH>..<HASH> 100644
--- a/lib/encoding-length.js
+++ b/lib/encoding-length.js
@@ -3,7 +3,7 @@ const isPlainObject = require('is-plain-object')
module.exports = encodingLengthCommon
-function encodingLength(obj, schema) {
+function encodingLength(obj, schema, context) {
if (!isPlainObject(obj)) {
throw new TypeError('Argument #1 should be a plain object.')
}
@@ -12,12 +12,26 @@ function encodingLength(obj, schema) {
throw new TypeError('Argument #2 should be a plain object.')
}
+ if (context === undefined) {
+ context = {
+ node: obj,
+ }
+ }
+
return Object.keys(schema).reduce(
- (counter, key) => counter + encodingLengthCommon(obj[key], schema[key]),
+ (counter, key) => counter + encodingLengthCommon(obj[key], schema[key], context),
0
)
}
-function encodingLengthCommon(item, type) {
- return isType(type) ? type.encodingLength(item) : encodingLength(item, type)
+function encodingLengthCommon(item, type, context) {
+ if (context === undefined) {
+ context = {
+ node: item,
+ }
+ }
+
+ return isType(type)
+ ? type.encodingLength(item, context)
+ : encodingLength(item, type, context)
}
diff --git a/test/encoding-length.js b/test/encoding-length.js
index <HASH>..<HASH> 100644
--- a/test/encoding-length.js
+++ b/test/encoding-length.js
@@ -11,9 +11,13 @@ describe('encodingLength', () => {
a: 1,
}
+ const context = {
+ node: obj,
+ }
+
const expectedSize = 5
- schema.a.encodingLength.withArgs(obj.a).returns(expectedSize)
+ schema.a.encodingLength.withArgs(obj.a, context).returns(expectedSize)
common.plug(schema.a)
expect(encodingLength(obj, schema)).toBe(expectedSize)
@@ -35,10 +39,14 @@ describe('encodingLength', () => {
},
}
+ const context = {
+ node: obj,
+ }
+
const expectedSize = 5
- schema.a.encodingLength.withArgs(obj.a).returns(expectedSize)
- schema.b.c.encodingLength.withArgs(obj.b.c).returns(expectedSize)
+ schema.a.encodingLength.withArgs(obj.a, context).returns(expectedSize)
+ schema.b.c.encodingLength.withArgs(obj.b.c, context).returns(expectedSize)
common.plug(schema.a)
common.plug(schema.b.c) | `encodingLength` support context too. | reklatsmasters_binary-data | train |
88167f39a6437e282ff75a6ec30e8081d6d50441 | diff --git a/p2p/client_identity.go b/p2p/client_identity.go
index <HASH>..<HASH> 100644
--- a/p2p/client_identity.go
+++ b/p2p/client_identity.go
@@ -7,8 +7,9 @@ import (
// ClientIdentity represents the identity of a peer.
type ClientIdentity interface {
- String() string // human readable identity
- Pubkey() []byte // 512-bit public key
+ String() string // human readable identity
+ Pubkey() []byte // 512-bit public key
+ PrivKey() []byte // 512-bit private key
}
type SimpleClientIdentity struct {
@@ -17,10 +18,11 @@ type SimpleClientIdentity struct {
customIdentifier string
os string
implementation string
+ privkey []byte
pubkey []byte
}
-func NewSimpleClientIdentity(clientIdentifier string, version string, customIdentifier string, pubkey []byte) *SimpleClientIdentity {
+func NewSimpleClientIdentity(clientIdentifier string, version string, customIdentifier string, privkey []byte, pubkey []byte) *SimpleClientIdentity {
clientIdentity := &SimpleClientIdentity{
clientIdentifier: clientIdentifier,
version: version,
@@ -28,6 +30,7 @@ func NewSimpleClientIdentity(clientIdentifier string, version string, customIden
os: runtime.GOOS,
implementation: runtime.Version(),
pubkey: pubkey,
+ privkey: privkey,
}
return clientIdentity
@@ -50,8 +53,12 @@ func (c *SimpleClientIdentity) String() string {
c.implementation)
}
+func (c *SimpleClientIdentity) Privkey() []byte {
+ return c.privkey
+}
+
func (c *SimpleClientIdentity) Pubkey() []byte {
- return []byte(c.pubkey)
+ return c.pubkey
}
func (c *SimpleClientIdentity) SetCustomIdentifier(customIdentifier string) {
diff --git a/p2p/client_identity_test.go b/p2p/client_identity_test.go
index <HASH>..<HASH> 100644
--- a/p2p/client_identity_test.go
+++ b/p2p/client_identity_test.go
@@ -1,13 +1,22 @@
package p2p
import (
+ "bytes"
"fmt"
"runtime"
"testing"
)
func TestClientIdentity(t *testing.T) {
- clientIdentity := NewSimpleClientIdentity("Ethereum(G)", "0.5.16", "test", []byte("pubkey"))
+ clientIdentity := NewSimpleClientIdentity("Ethereum(G)", "0.5.16", "test", []byte("privkey"), []byte("pubkey"))
+ key := clientIdentity.Privkey()
+ if !bytes.Equal(key, []byte("privkey")) {
+ t.Errorf("Expected Privkey to be %x, got %x", key, []byte("privkey"))
+ }
+ key = clientIdentity.Pubkey()
+ if !bytes.Equal(key, []byte("pubkey")) {
+ t.Errorf("Expected Pubkey to be %x, got %x", key, []byte("pubkey"))
+ }
clientString := clientIdentity.String()
expected := fmt.Sprintf("Ethereum(G)/v0.5.16/test/%s/%s", runtime.GOOS, runtime.Version())
if clientString != expected { | add privkey to clientIdentity + tests | ethereum_go-ethereum | train |
e16f6ed267a470dcf412768a88a76b9710f7ef52 | diff --git a/src/components/record/recordPreview/index.js b/src/components/record/recordPreview/index.js
index <HASH>..<HASH> 100644
--- a/src/components/record/recordPreview/index.js
+++ b/src/components/record/recordPreview/index.js
@@ -3,7 +3,7 @@ import * as Rx from 'rx';
let image_enhancer = require('imports?$=jquery!../../utils/jquery-plugins/imageEnhancer');
require('phraseanet-common/src/components/tooltip');
const previewRecordService = (services) => {
- const { configService, localeService, appEvents } = services;
+ const {configService, localeService, appEvents} = services;
const url = configService.get('baseUrl');
let $bodyContainer = null;
let $previewContainer = null;
@@ -255,6 +255,7 @@ const previewRecordService = (services) => {
options.current.width = parseInt($('#PREVIEWIMGCONT input[name=width]').val(), 10);
options.current.height = parseInt($('#PREVIEWIMGCONT input[name=height]').val(), 10);
options.current.tot = data.tot;
+ console.log('setting up current pos to', relativePos)
options.current.pos = relativePos;
if ($('#PREVIEWBOX img.record.zoomable').length > 0) {
@@ -374,7 +375,10 @@ const previewRecordService = (services) => {
function getPrevious() {
if (options.mode === 'RESULT') {
let posAsk = parseInt(options.current.pos, 10) - 1;
- posAsk = (posAsk < 0) ? ((parseInt(options.navigation.tot, 10) - 1)) : posAsk;
+ if (options.navigation.page === 1) {
+ // may go to last result
+ posAsk = (posAsk < 0) ? ((parseInt(options.navigation.tot, 10) - 1)) : posAsk;
+ }
_openPreview('RESULT', posAsk, '', false);
} else {
if (!$('#PREVIEWCURRENT li.selected').is(':first-child')) { | PHRAS-<I> - fix backward navigation in preview | alchemy-fr_Phraseanet-production-client | train |
8af1c70a8be17543eb33e9bfbbcdd8371e3201cc | diff --git a/cli/setup.go b/cli/setup.go
index <HASH>..<HASH> 100644
--- a/cli/setup.go
+++ b/cli/setup.go
@@ -16,7 +16,7 @@ import (
const (
RootFlag = "root"
HomeFlag = "home"
- DebugFlag = "debug"
+ TraceFlag = "trace"
OutputFlag = "output"
EncodingFlag = "encoding"
)
@@ -36,7 +36,7 @@ func PrepareBaseCmd(cmd *cobra.Command, envPrefix, defautRoot string) Executor {
// also, default must be empty, so we can detect this unset and fall back
// to --root / TM_ROOT / TMROOT
cmd.PersistentFlags().String(HomeFlag, "", "root directory for config and data")
- cmd.PersistentFlags().Bool(DebugFlag, false, "print out full stack trace on errors")
+ cmd.PersistentFlags().Bool(TraceFlag, false, "print out full stack trace on errors")
cmd.PersistentPreRunE = concatCobraCmdFuncs(bindFlagsLoadViper, cmd.PersistentPreRunE)
return Executor{cmd}
}
@@ -92,7 +92,7 @@ func (e Executor) Execute() error {
err := e.Command.Execute()
if err != nil {
// TODO: something cooler with log-levels
- if viper.GetBool(DebugFlag) {
+ if viper.GetBool(TraceFlag) {
fmt.Printf("ERROR: %+v\n", err)
} else {
fmt.Println("ERROR:", err.Error())
diff --git a/cli/setup_test.go b/cli/setup_test.go
index <HASH>..<HASH> 100644
--- a/cli/setup_test.go
+++ b/cli/setup_test.go
@@ -184,7 +184,7 @@ func TestSetupUnmarshal(t *testing.T) {
}
}
-func TestSetupDebug(t *testing.T) {
+func TestSetupTrace(t *testing.T) {
assert, require := assert.New(t), require.New(t)
cases := []struct {
@@ -193,22 +193,22 @@ func TestSetupDebug(t *testing.T) {
long bool
expected string
}{
- {nil, nil, false, "Debug flag = false"},
- {[]string{"--debug"}, nil, true, "Debug flag = true"},
+ {nil, nil, false, "Trace flag = false"},
+ {[]string{"--trace"}, nil, true, "Trace flag = true"},
{[]string{"--no-such-flag"}, nil, false, "unknown flag: --no-such-flag"},
- {nil, map[string]string{"DBG_DEBUG": "true"}, true, "Debug flag = true"},
+ {nil, map[string]string{"DBG_TRACE": "true"}, true, "Trace flag = true"},
}
for idx, tc := range cases {
i := strconv.Itoa(idx)
// test command that store value of foobar in local variable
- debug := &cobra.Command{
- Use: "debug",
+ trace := &cobra.Command{
+ Use: "trace",
RunE: func(cmd *cobra.Command, args []string) error {
- return errors.Errorf("Debug flag = %t", viper.GetBool(DebugFlag))
+ return errors.Errorf("Trace flag = %t", viper.GetBool(TraceFlag))
},
}
- cmd := PrepareBaseCmd(debug, "DBG", "/qwerty/asdfgh") // some missing dir..
+ cmd := PrepareBaseCmd(trace, "DBG", "/qwerty/asdfgh") // some missing dir..
viper.Reset()
args := append([]string{cmd.Use}, tc.args...)
@@ -219,7 +219,7 @@ func TestSetupDebug(t *testing.T) {
assert.Equal(desired, msg[0], i)
if tc.long && assert.True(len(msg) > 2, i) {
// the next line starts the stack trace...
- assert.Contains(msg[1], "TestSetupDebug", i)
+ assert.Contains(msg[1], "TestSetupTrace", i)
assert.Contains(msg[2], "setup_test.go", i)
}
} | Renamed --debug to --trace, used for light-client and basecoin | tendermint_tendermint | train |
a0f777196254ef0256b016c011c1f6ae004e3e5d | diff --git a/core/vm/instructions_test.go b/core/vm/instructions_test.go
index <HASH>..<HASH> 100644
--- a/core/vm/instructions_test.go
+++ b/core/vm/instructions_test.go
@@ -525,12 +525,14 @@ func TestOpMstore(t *testing.T) {
mem.Resize(64)
pc := uint64(0)
v := "abcdef00000000000000abba000000000deaf000000c0de00100000000133700"
- stack.pushN(*new(uint256.Int).SetBytes(common.Hex2Bytes(v)), *new(uint256.Int))
+ stack.push(new(uint256.Int).SetBytes(common.Hex2Bytes(v)))
+ stack.push(new(uint256.Int))
opMstore(&pc, evmInterpreter, &ScopeContext{mem, stack, nil})
if got := common.Bytes2Hex(mem.GetCopy(0, 32)); got != v {
t.Fatalf("Mstore fail, got %v, expected %v", got, v)
}
- stack.pushN(*new(uint256.Int).SetUint64(0x1), *new(uint256.Int))
+ stack.push(new(uint256.Int).SetUint64(0x1))
+ stack.push(new(uint256.Int))
opMstore(&pc, evmInterpreter, &ScopeContext{mem, stack, nil})
if common.Bytes2Hex(mem.GetCopy(0, 32)) != "0000000000000000000000000000000000000000000000000000000000000001" {
t.Fatalf("Mstore failed to overwrite previous value")
@@ -553,7 +555,8 @@ func BenchmarkOpMstore(bench *testing.B) {
bench.ResetTimer()
for i := 0; i < bench.N; i++ {
- stack.pushN(*value, *memStart)
+ stack.push(value)
+ stack.push(memStart)
opMstore(&pc, evmInterpreter, &ScopeContext{mem, stack, nil})
}
}
@@ -572,7 +575,8 @@ func BenchmarkOpKeccak256(bench *testing.B) {
bench.ResetTimer()
for i := 0; i < bench.N; i++ {
- stack.pushN(*uint256.NewInt(32), *start)
+ stack.push(uint256.NewInt(32))
+ stack.push(start)
opKeccak256(&pc, evmInterpreter, &ScopeContext{mem, stack, nil})
}
}
diff --git a/core/vm/stack.go b/core/vm/stack.go
index <HASH>..<HASH> 100644
--- a/core/vm/stack.go
+++ b/core/vm/stack.go
@@ -54,10 +54,6 @@ func (st *Stack) push(d *uint256.Int) {
// NOTE push limit (1024) is checked in baseCheck
st.data = append(st.data, *d)
}
-func (st *Stack) pushN(ds ...uint256.Int) {
- // FIXME: Is there a way to pass args by pointers.
- st.data = append(st.data, ds...)
-}
func (st *Stack) pop() (ret uint256.Int) {
ret = st.data[len(st.data)-1] | core/vm: remove stack.pushN (#<I>) | ethereum_go-ethereum | train |
c38a1a498f8979150c389c83edeba7d3493c9fd4 | diff --git a/bin/require-command.js b/bin/require-command.js
index <HASH>..<HASH> 100755
--- a/bin/require-command.js
+++ b/bin/require-command.js
@@ -55,12 +55,12 @@ switch (opts.command) {
case 'compile':
var example = 'require compile ./path/to/file.js --level 2'
if (opts.level === null) {
- console.log('Please specify a compilation level, e.g.')
+ console.log('Specify a compilation level, e.g.')
console.log(example)
process.exit(1)
}
- if (opts.paths.length != 1) {
- console.log('Please specify a single file to compile, e.g.')
+ if (!opts.file) {
+ console.log('Specify a single file to compile, e.g.')
console.log(example)
process.exit(1)
} | We switched from using opts.paths[0] to opts.file | marcuswestin_require | train |
2c77bbfb4b743904c675ad8474e4e7788e24ab89 | diff --git a/src/Model/Behavior/UploadBehavior.php b/src/Model/Behavior/UploadBehavior.php
index <HASH>..<HASH> 100644
--- a/src/Model/Behavior/UploadBehavior.php
+++ b/src/Model/Behavior/UploadBehavior.php
@@ -69,8 +69,13 @@ class UploadBehavior extends Behavior
}
$file = $entity->get($virtualField);
- if ((int)$file['error'] === UPLOAD_ERR_NO_FILE) {
+
+ $error = $this->_triggerErrors($file);
+
+ if ($error === false) {
continue;
+ } elseif (is_string($error)) {
+ throw new \ErrorException($error);
}
if (!isset($fieldOption['path'])) {
@@ -103,6 +108,53 @@ class UploadBehavior extends Behavior
}
/**
+ * Trigger upload errors.
+ *
+ * @param array $file The file to check.
+ *
+ * @return string|int|void
+ */
+ protected function _triggerErrors($file)
+ {
+ if (!empty($file['error'])) {
+ switch ((int)$file['error']) {
+ case UPLOAD_ERR_INI_SIZE:
+ $message = __('The uploaded file exceeds the upload_max_filesize directive in php.ini : {0}', ini_get('upload_max_filesize'));
+ break;
+
+ case UPLOAD_ERR_FORM_SIZE:
+ $message = __('The uploaded file exceeds the MAX_FILE_SIZE directive that was specified in the HTML form.');
+ break;
+
+ case UPLOAD_ERR_NO_FILE:
+ $message = false;
+ break;
+
+ case UPLOAD_ERR_PARTIAL:
+ $message = __('The uploaded file was only partially uploaded.');
+ break;
+
+ case UPLOAD_ERR_NO_TMP_DIR:
+ $message = __('Missing a temporary folder.');
+ break;
+
+ case UPLOAD_ERR_CANT_WRITE:
+ $message = __('Failed to write file to disk.');
+ break;
+
+ case UPLOAD_ERR_EXTENSION:
+ $message = __('A PHP extension stopped the file upload.');
+ break;
+
+ default:
+ $message = __('Unknown upload error.');
+ }
+
+ return $message;
+ }
+ }
+
+ /**
* Move the temporary source file to the destination file.
*
* @param \Cake\ORM\Entity $entity The entity that is going to be saved.
diff --git a/tests/bootstrap.php b/tests/bootstrap.php
index <HASH>..<HASH> 100644
--- a/tests/bootstrap.php
+++ b/tests/bootstrap.php
@@ -8,7 +8,7 @@ use Cake\Log\Log;
require_once 'vendor/autoload.php';
// Path constants to a few helpful things.
-define('DS', DIRECTORY_SEPARATOR);
+//define('DS', DIRECTORY_SEPARATOR);
define('ROOT', dirname(__DIR__) . DS);
define('CAKE_CORE_INCLUDE_PATH', ROOT . 'vendor' . DS . 'cakephp' . DS . 'cakephp');
define('CORE_PATH', ROOT . 'vendor' . DS . 'cakephp' . DS . 'cakephp' . DS); | Trigger all upload errors.
Ref #8 | Xety_Cake3-Upload | train |
abbee41b7852480ff8552474a147cc5912fe755a | diff --git a/forms/ReadonlyField.php b/forms/ReadonlyField.php
index <HASH>..<HASH> 100644
--- a/forms/ReadonlyField.php
+++ b/forms/ReadonlyField.php
@@ -11,10 +11,43 @@ class ReadonlyField extends FormField {
protected $readonly = true;
+ /**
+ * Include a hidden field in the HTML for the readonly field
+ * @var boolean
+ */
+ protected $includeHiddenField = false;
+
+ /**
+ * If true, a hidden field will be included in the HTML for the readonly field.
+ *
+ * This can be useful if you need to pass the data through on the form submission, as
+ * long as it's okay than an attacker could change the data before it's submitted.
+ *
+ * This is disabled by default as it can introduce security holes if the data is not
+ * allowed to be modified by the user.
+ *
+ * @param boolean $includeHiddenField
+ */
+ public function setIncludeHiddenField($includeHiddenField) {
+ $this->includeHiddenField = $includeHiddenField;
+ }
+
public function performReadonlyTransformation() {
return clone $this;
}
+ public function Field($properties = array()) {
+ // Include a hidden field in the HTML
+ if($this->includeHiddenField && $this->readonly) {
+ $hidden = clone $this;
+ $hidden->setReadonly(false);
+ return parent::Field($properties) . $hidden->Field($properties);
+
+ } else {
+ return parent::Field($properties);
+ }
+ }
+
public function Value() {
if($this->value) return $this->dontEscape ? $this->value : Convert::raw2xml($this->value);
else return '<i>(' . _t('FormField.NONE', 'none') . ')</i>';
@@ -25,7 +58,7 @@ class ReadonlyField extends FormField {
parent::getAttributes(),
array(
'type' => 'hidden',
- 'value' => null,
+ 'value' => $this->readonly ? null : $this->value,
)
);
} | NEW: Add ReadonlyField::setIncludeHiddenField()
The new config setter restores the <I> behaviour of including <input type="hidden"> with a field. Although as a default, this option has security flaws; it is useful in a few circumstances and, if nothing else, is handy to make upgrading sites easier. | silverstripe_silverstripe-framework | train |
b77092bf813acc3eb7e342403967cb8b7185beeb | diff --git a/lib/src/main/java/com/github/kevinsawicki/http/HttpRequest.java b/lib/src/main/java/com/github/kevinsawicki/http/HttpRequest.java
index <HASH>..<HASH> 100644
--- a/lib/src/main/java/com/github/kevinsawicki/http/HttpRequest.java
+++ b/lib/src/main/java/com/github/kevinsawicki/http/HttpRequest.java
@@ -118,6 +118,11 @@ public class HttpRequest {
public static final String HEADER_HOST = "Host";
/**
+ * 'Location' header name
+ */
+ public static final String HEADER_LOCATION = "Location";
+
+ /**
* 'Server' header name
*/
public static final String HEADER_SERVER = "Server";
@@ -1043,6 +1048,15 @@ public class HttpRequest {
}
/**
+ * Get the 'Location' header from the response
+ *
+ * @return location
+ */
+ public String location() {
+ return header(HEADER_LOCATION);
+ }
+
+ /**
* Set the 'Authentication' header to given value
*
* @param value | Add helper to get 'Location' header response value | kevinsawicki_http-request | train |
fc3e3dc245a13bda728ab9e3aadc8c5da5a06ae9 | diff --git a/wafer/schedule/admin.py b/wafer/schedule/admin.py
index <HASH>..<HASH> 100644
--- a/wafer/schedule/admin.py
+++ b/wafer/schedule/admin.py
@@ -9,17 +9,23 @@ from wafer.pages.models import Page
# These are functions to simplify testing
def validate_slots():
"""Find any slots that overlap"""
- pass
+ overlaps = []
+ return overlaps
-def validate_talks():
- """Find pending / rejected talks that are assigned to slots"""
- pass
+def validate_items():
+ """Find errors in the schedule. Check for:
+ - pending / rejected talks in the schedule
+ - items with both talks and pages assigned
+ """
+ validation = []
+ return validation
def find_duplicate_schedule_items():
"""Find talks / pages assigned to mulitple schedule items"""
- pass
+ duplicates = []
+ return duplicates
def find_clashes():
diff --git a/wafer/schedule/tests/test_schedule.py b/wafer/schedule/tests/test_schedule.py
index <HASH>..<HASH> 100644
--- a/wafer/schedule/tests/test_schedule.py
+++ b/wafer/schedule/tests/test_schedule.py
@@ -3,7 +3,7 @@ from django.utils.timezone import utc
import datetime as D
from wafer.schedule.models import Venue, Slot, ScheduleItem
-from wafer.schedule.admin import (validate_slots, validate_talks,
+from wafer.schedule.admin import (validate_slots, validate_items,
find_duplicate_schedule_items,
find_clashes) | Rename function to better reflect aim - expand description to match | CTPUG_wafer | train |
0bb13c0f040516e62158979e7c6665e8b678eafb | diff --git a/command/agent/config_parse_test.go b/command/agent/config_parse_test.go
index <HASH>..<HASH> 100644
--- a/command/agent/config_parse_test.go
+++ b/command/agent/config_parse_test.go
@@ -286,7 +286,19 @@ func TestConfig_Parse(t *testing.T) {
if (err != nil) != tc.Err {
t.Fatalf("file: %s\n\n%s", tc.File, err)
}
- require.EqualValues(actual, tc.Result)
+
+ //panic(fmt.Sprintf("first: %+v \n second: %+v", actual.TLSConfig, tc.Result.TLSConfig))
+ require.EqualValues(removeHelperAttributes(actual), tc.Result)
})
}
}
+
+// In order to compare the Config struct after parsing, and from generating what
+// is expected in the test, we need to remove helper attributes that are
+// instantiated in the process of parsing the configuration
+func removeHelperAttributes(c *Config) *Config {
+ if c.TLSConfig != nil {
+ c.TLSConfig.KeyLoader = nil
+ }
+ return c
+} | fix up test failure due to keyloader instantiated on tls config during parsing | hashicorp_nomad | train |
822cb3b72954056b6a9952f21e090be5a46fd50a | diff --git a/src/main/java/com/stratio/qa/specs/MarathonSpec.java b/src/main/java/com/stratio/qa/specs/MarathonSpec.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/stratio/qa/specs/MarathonSpec.java
+++ b/src/main/java/com/stratio/qa/specs/MarathonSpec.java
@@ -254,16 +254,16 @@ public class MarathonSpec extends BaseGSpec {
// Set REST connection
commonspec.setCCTConnection(null, null);
position = position != null ? position : 0;
- String ip = getHostIPFromMarathon(internalIP != null, serviceId, taskName, position);
+ String ip = getHostIPFromMarathon(internalIP != null, serviceId, position);
Assert.assertNotNull(ip, "Error obtaining IP");
ThreadProperty.set(envVar, ip);
}
- private String getHostIPFromMarathon(boolean internalIp, String serviceId, String taskName, int position) throws Exception {
+ private String getHostIPFromMarathon(boolean internalIp, String serviceId, int position) throws Exception {
AppResponse app = this.commonspec.marathonClient.getApp(serviceId);
Collection<Task> tasks = app.getApp().getTasks();
Task task = tasks.stream()
- .filter(t -> t.getId().contains(taskName.replaceAll("\\.", "_")))
+ .filter(t -> t.getAppId().equals(serviceId))
.filter(t -> t.getState().equals("TASK_RUNNING"))
.skip(position)
.findFirst() | [SAAS-<I>] Fix step to get IP (internal and external) from Marathon in DCOS <I> (#<I>) | Stratio_bdt | train |
4dfbb05e7f2de50a767c33c7258730e7ebec8999 | diff --git a/src/Router.js b/src/Router.js
index <HASH>..<HASH> 100644
--- a/src/Router.js
+++ b/src/Router.js
@@ -203,8 +203,8 @@ function Router(declarativeStates) {
function notFound(state) {
log('State not found: {0}', state);
- if (options.notFound)
- setState(leafStates[options.notFound] || options.notFound);
+ if (options.notFound)
+ setState(leafStates[options.notFound] || options.notFound, {});
else throw new Error ('State "' + state + '" could not be found');
} | Fix: When entering the notFound state, an RTE is thrown because it has null params | AlexGalays_abyssa-js | train |
5d0a9c08037811ea153abfcd5e58f5f7db8f9751 | diff --git a/cmd/client-s3.go b/cmd/client-s3.go
index <HASH>..<HASH> 100644
--- a/cmd/client-s3.go
+++ b/cmd/client-s3.go
@@ -53,7 +53,6 @@ type s3Client struct {
}
const (
- amazonHostName = "s3.amazonaws.com"
amazonHostNameAccelerated = "s3-accelerate.amazonaws.com"
googleHostName = "storage.googleapis.com"
@@ -109,11 +108,6 @@ func newFactory() func(config *Config) (Client, *probe.Error) {
isS3AcceleratedEndpoint := isAmazonAccelerated(hostName)
if s3Clnt.virtualStyle {
- // If Amazon URL replace it with 's3.amazonaws.com'
- if isAmazon(hostName) || isAmazonAccelerated(hostName) {
- hostName = amazonHostName
- }
-
// If Google URL replace it with 'storage.googleapis.com'
if isGoogle(hostName) {
hostName = googleHostName | Make sure to preserve region specific URLs (#<I>)
This is to ensure that restricted access
to certain buckets is allowed on certain
regions.
This is a continuation of last commit | minio_mc | train |
e30574bb8f9cd677c2dcc6c9bb5ec36378de3f61 | diff --git a/test/support/matchers.rb b/test/support/matchers.rb
index <HASH>..<HASH> 100644
--- a/test/support/matchers.rb
+++ b/test/support/matchers.rb
@@ -56,14 +56,14 @@ module Minitest
msg = message(msg) do
"Expected #{mu_pp(original)} to include #{mu_pp(given)} in order"
end
- assert _includes_in_order(original, given), msg
+ assert _includes_in_order(given, original), msg
end
def refute_includes_in_order(given, original, msg = nil)
msg = message(msg) do
"Expected #{mu_pp(original)} to not include #{mu_pp(given)} in order"
end
- refute _includes_in_order(original, given), msg
+ refute _includes_in_order(given, original), msg
end
def assert_location(file, line)
@@ -80,7 +80,7 @@ module Minitest
private
- def _includes_in_order(original_collection, given_collection)
+ def _includes_in_order(given_collection, original_collection)
given_collection.each_with_index do |given_item, i|
index = case given_item
when String | Switch param order for consistency
It's already hard enough to learn the order of minitest matchers, let
alone if we use different orders. | deivid-rodriguez_byebug | train |
7be97168840431090d73552d8c4477e6447a2e9e | diff --git a/src/Packages/Starter/app/Http/Requests/UserInviteRequest.php b/src/Packages/Starter/app/Http/Requests/UserInviteRequest.php
index <HASH>..<HASH> 100644
--- a/src/Packages/Starter/app/Http/Requests/UserInviteRequest.php
+++ b/src/Packages/Starter/app/Http/Requests/UserInviteRequest.php
@@ -14,7 +14,7 @@ class UserInviteRequest extends FormRequest
*/
public function authorize()
{
- if (Auth::user()->can('admin')) {
+ if (Auth::user()->isTeamAdmin($this->route('id'))) {
return true;
} | Change auth on team invite request to check if the user is team admin rather that system admin | GrafiteInc_Builder | train |
357cb2c97fb2098d4a973d43c7c4fc6c1acb9b46 | diff --git a/cdkmod/cdk/__init__.py b/cdkmod/cdk/__init__.py
index <HASH>..<HASH> 100644
--- a/cdkmod/cdk/__init__.py
+++ b/cdkmod/cdk/__init__.py
@@ -28,16 +28,17 @@ Options:
import subprocess, zipfile
from os.path import dirname, basename, join, abspath, isfile, isdir, expanduser
-from os import mkdir, unlink
+from os import mkdir, unlink, listdir
from shutil import copy
import ConfigParser as cp
from docopt import docopt
LOCATION = abspath(dirname(__file__))
-
PREFS_DIR = expanduser("~/.cdk")
PREFS_FILE = join(PREFS_DIR, "prefs")
+THEMES_DIR = join(LOCATION, "custom", "deck.js", "themes")
+
def set_default_theme(theme):
"""
@@ -89,14 +90,13 @@ def install_theme(path_to_theme):
pref_init()
# cp the file
filename = basename(path_to_theme)
- themes = join(LOCATION, "custom", "deck.js", "themes")
- dest = join(themes, filename)
+ dest = join(THEMES_DIR, filename)
copy(path_to_theme, dest)
# unzip
zf = zipfile.ZipFile(dest)
# should make sure zipfile contains only themename folder which doesn't conflict
# with existing themename. Or some kind of sanity check
- zf.extractall(themes) # plus this is a potential security flaw pre 2.7.4
+ zf.extractall(THEMES_DIR) # plus this is a potential security flaw pre 2.7.4
# remove the copied zipfile
unlink(dest)
@@ -150,7 +150,10 @@ def main():
# Am I going to need validation? No Schema for the moment...
if args['FILE']:
# Great! Run asciidoc with appropriate flags
- cmd = create_command(pick_theme(args['--theme']), args['--bare'])
+ theme = pick_theme(args['--theme'])
+ if theme not in listdir(THEMES_DIR):
+ exit('Selected theme "%s" not found. Check ~/.cdk/prefs' % theme)
+ cmd = create_command(theme, args['--bare'])
run_command(cmd, args)
# other commands
elif args['--install-theme']: | Complain if selected theme doesn't exist | twitter_cdk | train |
36b04a3ef1ca9a865a1f0f7a65824d457342c0bf | diff --git a/test/unit/common_definitions.py b/test/unit/common_definitions.py
index <HASH>..<HASH> 100644
--- a/test/unit/common_definitions.py
+++ b/test/unit/common_definitions.py
@@ -59,7 +59,7 @@ class URLTesterTestBaseMixin(object):
self._test_any_match_returns_false(self.valid_urls)
-class UrlTesterTestMixin(URLTesterTestBaseMixin):
+class URLTesterTestMixin(URLTesterTestBaseMixin):
''' A class providing pre-generated tests for classes
having any_match, filter_matching and lookup_matching
methods '''
@@ -120,7 +120,7 @@ def get_hosts(urls):
return [urlparse(u).hostname for u in urls]
-class HostListTestMixin(UrlTesterTestMixin):
+class HostListTestMixin(URLTesterTestMixin):
''' A common test case for all classes that represent
a host list stored locally or by a remote service '''
valid_host_input = [
diff --git a/test/unit/test_clients.py b/test/unit/test_clients.py
index <HASH>..<HASH> 100644
--- a/test/unit/test_clients.py
+++ b/test/unit/test_clients.py
@@ -13,7 +13,7 @@ from spam_lists.clients import (
)
from test.compat import unittest, Mock, patch
from test.unit.common_definitions import (
- HostListTestMixin, host_list_host_factory, UrlTesterTestMixin
+ HostListTestMixin, host_list_host_factory, URLTesterTestMixin
)
@@ -213,12 +213,12 @@ def create_gsb_post(expected_401, spam_urls, classification):
return post
-class GoogleSafeBrowsingTest(UrlTesterTestMixin, unittest.TestCase):
+class GoogleSafeBrowsingTest(URLTesterTestMixin, unittest.TestCase):
# pylint: disable=too-many-public-methods
''' Tests for GoogleSafeBrowsing class
This class adds an additional test method to the ones provided
- by UrlTesterTestMixin: test_unathorized_query_with. This method
+ by URLTesterTestMixin: test_unathorized_query_with. This method
is used to test methods of GoogleSafeBrowsing class for expected
behaviour while calling Google Safe Browsing lookup API with
an unathorized API key | Rename UrlTesterTestMixin to URLTesterTestMixin | piotr-rusin_spam-lists | train |
b722d5165f2af62c34d95326cace18256fcba932 | diff --git a/src/lib/KevinGH/Amend/Command.php b/src/lib/KevinGH/Amend/Command.php
index <HASH>..<HASH> 100644
--- a/src/lib/KevinGH/Amend/Command.php
+++ b/src/lib/KevinGH/Amend/Command.php
@@ -165,6 +165,8 @@
*/
protected function replace($temp)
{
+ $perms = fileperms($_SERVER['argv'][0]) & 511;
+
if (false === @ rename($temp, $_SERVER['argv'][0]))
{
$error = error_get_last();
@@ -175,5 +177,15 @@
$error['message']
));
}
+
+ if (false === @ chmod($_SERVER['argv'][0], $perms))
+ {
+ $error = error_get_last();
+
+ throw new RuntimeException(sprintf(
+ 'Unable to copy permissions: %s',
+ $error['message']
+ ));
+ }
}
}
\ No newline at end of file
diff --git a/src/tests/KevinGH/Amend/CommandTest.php b/src/tests/KevinGH/Amend/CommandTest.php
index <HASH>..<HASH> 100644
--- a/src/tests/KevinGH/Amend/CommandTest.php
+++ b/src/tests/KevinGH/Amend/CommandTest.php
@@ -11,7 +11,8 @@
namespace KevinGH\Amend;
- use Mock\Command,
+ use Exception,
+ Mock\Command,
Symfony\Component\Console\Application,
Symfony\Component\Console\Command\Command as _Command,
Symfony\Component\Console\Tester\CommandTester;
@@ -139,6 +140,8 @@
)
))));
+ chmod($_SERVER['argv'][0], 0755);
+
$this->tester->execute(array(
'command' => self::NAME
));
@@ -148,6 +151,8 @@
$this->tester->getDisplay()
);
+ $this->assertEquals(0755, fileperms($_SERVER['argv'][0]) & 511);
+
$this->assertEquals('1', file_get_contents($_SERVER['argv'][0]));
}
@@ -188,10 +193,37 @@
* @expectedException RuntimeException
* @expectedExceptionMessage Unable to replace with update
*/
- public function testReplace()
+ public function testReplaceRenameFail()
{
$method = $this->method($this->command, 'replace');
$method('/does/not/exist');
}
+
+ /**
+ * @expectedException RuntimeException
+ * @expectedExceptionMessage Unable to copy permissions
+ */
+ public function testReplaceChmodFail()
+ {
+ if ($this->redeclare('chmod', '', 'return false;'))
+ {
+ return;
+ }
+
+ try
+ {
+ $method = $this->method($this->command, 'replace');
+
+ $method($this->file());
+ }
+
+ catch (Exception $e)
+ {
+ }
+
+ $this->restore('chmod');
+
+ if (isset($e)) throw $e;
+ }
}
\ No newline at end of file | Preserving permissions of replaced file. | deployphp_phar-update | train |
d5b386ac1b36d2fa082e2fc967a7719538026522 | diff --git a/pymc/Matplot.py b/pymc/Matplot.py
index <HASH>..<HASH> 100755
--- a/pymc/Matplot.py
+++ b/pymc/Matplot.py
@@ -420,7 +420,7 @@ def plot(data, name, format='png', suffix='', path='./', common_scale=True, data
# Current subplot number
_num = i % _rows + 1
# Final subplot of current figure?
- _last = not (_num + 1) % (_rows * 2) or (i==len(tdata)-1)
+ _last = (_num==_rows) or (i==len(tdata)-1)
plot(tdata[i], name+'_'+str(i), format=format, path=path, common_scale=common_scale, datarange=datarange, suffix=suffix, new=_new, last=_last, rows=_rows, num=_num) | make Matplot.plot save all figures in a multifigure plot | pymc-devs_pymc | train |
84b5477536461fd53e7664b19933c6cda1d597db | diff --git a/oscrypto/_openssl/asymmetric.py b/oscrypto/_openssl/asymmetric.py
index <HASH>..<HASH> 100644
--- a/oscrypto/_openssl/asymmetric.py
+++ b/oscrypto/_openssl/asymmetric.py
@@ -692,7 +692,7 @@ def load_public_key(source):
source must be a byte string, unicode string or
asn1crypto.keys.PublicKeyInfo object, not %s
''',
- type_name(public_key)
+ type_name(source)
))
if public_key.algorithm == 'dsa': | Line <I>: corrected parameter name (#<I>)
Resolves the following: UnboundLocalError: local variable 'public_key' referenced before assignment | wbond_oscrypto | train |
3e7822980f3172108fa765762a192e35eaa7cef4 | diff --git a/build/export-exchanges.js b/build/export-exchanges.js
index <HASH>..<HASH> 100644
--- a/build/export-exchanges.js
+++ b/build/export-exchanges.js
@@ -109,7 +109,7 @@ function exportSupportedAndCertifiedExchanges (exchanges, { allExchangesPaths, c
// ----------------------------------------------------------------------------
// list all supported exchanges
- const exchangesNotListedInDocs = [ 'okcoinusd', 'okex3' ]
+ const exchangesNotListedInDocs = []
function makeTableData (exchanges) {
return ( | build/export-exchanges okcoinusd → okcoin, okex3 → okex #<I> | ccxt_ccxt | train |
c5ad9b25bf67d381e217a0183448f74bd20b584d | diff --git a/src/imagetilesource.js b/src/imagetilesource.js
index <HASH>..<HASH> 100644
--- a/src/imagetilesource.js
+++ b/src/imagetilesource.js
@@ -72,6 +72,13 @@
$.TileSource.apply(this, [options]);
};
+
+ /* IE8 fix for tileSources type: 'image' */
+ $.getNatural = function (DOMelement) {
+ var img = new Image();
+ img.src = DOMelement.src;
+ return { width: img.width, height: img.height };
+ };
$.extend($.ImageTileSource.prototype, $.TileSource.prototype, /** @lends OpenSeadragon.ImageTileSource.prototype */{
/**
@@ -114,8 +121,8 @@
}
$.addEvent(image, 'load', function () {
- _this.width = image.naturalWidth;
- _this.height = image.naturalHeight;
+ _this.width = $.getNatural(image).width;
+ _this.height = $.getNatural(image).height;
_this.aspectRatio = _this.width / _this.height;
_this.dimensions = new $.Point(_this.width, _this.height);
_this._tileWidth = _this.width;
@@ -202,8 +209,8 @@
_buildLevels: function () {
var levels = [{
url: this._image.src,
- width: this._image.naturalWidth,
- height: this._image.naturalHeight
+ width: $.getNatural(this._image).width,
+ height: $.getNatural(this._image).height
}];
if (!this.buildPyramid || !$.supportsCanvas || !this.useCanvas) { | IE8 fix for tileSources type: 'image' | openseadragon_openseadragon | train |
ccc60bde94c6dd7630d81ba5882b674e0d29498d | diff --git a/andes/models/distributed/dgprct.py b/andes/models/distributed/dgprct.py
index <HASH>..<HASH> 100644
--- a/andes/models/distributed/dgprct.py
+++ b/andes/models/distributed/dgprct.py
@@ -4,7 +4,7 @@ Distributed energy resource protection model base.
from andes.core.param import IdxParam, NumParam, ExtParam
from andes.core.model import Model, ModelData
from andes.core.var import Algeb, ExtAlgeb
-from andes.core.service import ConstService, EventFlag, ExtService
+from andes.core.service import ConstService, EventFlag, ExtService, VarService, ExtendedEvent
from andes.core.discrete import Limiter
from andes.core.block import Integrator
@@ -249,6 +249,28 @@ class DGPRCTBaseModel(Model):
self.ueflag = EventFlag(u=self.ue, tex_name='z^{ue}')
+ # --- debug
+
+ self.uevs = VarService(v_str='ue',
+ info='Voltage before Xc compensation',
+ tex_name='ue VS'
+ )
+ self.uee = EventFlag(u=self.uevs, tex_name='z^{ue}')
+ self.ueee = ExtendedEvent(self.uevs, v_disabled=1)
+
+ self.ob = Algeb(v_str='0',
+ e_str='uevs - ob',
+ info='uevs flag',
+ tex_name=r'ob uevs',
+ )
+ self.ob2 = Algeb(v_str='0',
+ e_str='ueee - ob2',
+ info='lock flag',
+ tex_name=r'ob ueee',
+ )
+
+ # --- debug end
+
# lock DG frequency signal
# fflag option 1: leave source signal online in protection | Added some observations, and will remove later. | cuihantao_andes | train |
32e2028e62d7c3273c7303c79fa8700a4e30a58c | diff --git a/packages/neos-ui-guest-frame/src/InlineUI/index.js b/packages/neos-ui-guest-frame/src/InlineUI/index.js
index <HASH>..<HASH> 100644
--- a/packages/neos-ui-guest-frame/src/InlineUI/index.js
+++ b/packages/neos-ui-guest-frame/src/InlineUI/index.js
@@ -3,11 +3,15 @@ import PropTypes from 'prop-types';
import {connect} from 'react-redux';
import {$transform, $get, $contains} from 'plow-js';
import {actions, selectors} from '@neos-project/neos-ui-redux-store';
+import {neos} from '@neos-project/neos-ui-decorators';
import NodeToolbar from './NodeToolbar/index';
import style from './style.css';
+@neos(globalRegistry => ({
+ nodeTypesRegistry: globalRegistry.get('@neos-project/neos-ui-contentrepository')
+}))
@connect($transform({
focused: $get('cr.nodes.focused'),
focusedNode: selectors.CR.Nodes.focusedSelector,
@@ -22,6 +26,7 @@ export default class InlineUI extends PureComponent {
static propTypes = {
focused: PropTypes.object,
focusedNode: PropTypes.object,
+ nodeTypesRegistry: PropTypes.object,
destructiveOperationsAreDisabled: PropTypes.bool.isRequired,
requestScrollIntoView: PropTypes.func.isRequired,
shouldScrollIntoView: PropTypes.bool.isRequired,
@@ -32,7 +37,12 @@ export default class InlineUI extends PureComponent {
render() {
const focused = this.props.focused.toJS();
const focusedNodeContextPath = focused.contextPath;
- const {shouldScrollIntoView, requestScrollIntoView, destructiveOperationsAreDisabled, clipboardMode, clipboardNodeContextPath} = this.props;
+ const {nodeTypesRegistry, focusedNode, shouldScrollIntoView, requestScrollIntoView, destructiveOperationsAreDisabled, clipboardMode, clipboardNodeContextPath} = this.props;
+ const isDocument = nodeTypesRegistry.hasRole($get('nodeType', focusedNode), 'document');
+ // Don't render toolbar for the document nodes
+ if (isDocument) {
+ return null;
+ }
const isCut = focusedNodeContextPath === clipboardNodeContextPath && clipboardMode === 'Move';
const isCopied = focusedNodeContextPath === clipboardNodeContextPath && clipboardMode === 'Copy';
const canBeDeleted = $get('policy.canRemove', this.props.focusedNode); | BUGFIX: don't show NodeToolbar for document nodes | neos_neos-ui | train |
fb522959999baa1a14dc7790744852c9e90ca1c8 | diff --git a/src/main/java/com/lazerycode/jmeter/configuration/JMeterProcessJVMSettings.java b/src/main/java/com/lazerycode/jmeter/configuration/JMeterProcessJVMSettings.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/lazerycode/jmeter/configuration/JMeterProcessJVMSettings.java
+++ b/src/main/java/com/lazerycode/jmeter/configuration/JMeterProcessJVMSettings.java
@@ -12,8 +12,9 @@ import java.util.List;
* {@code
* <configuration>
* <jMeterProcessJVMSettings>
- * <xms>true</xms>
- * <xmx>true</xmx>
+ * <javaRuntime>{env.JAVA_HOME}/bin/java</javaRuntime>
+ * <xms>512</xms>
+ * <xmx>1024</xmx>
* <arguments>
* <argument>foo</argument>
* </arguments>
@@ -28,6 +29,7 @@ public class JMeterProcessJVMSettings {
private int xms = 512;
private int xmx = 512;
+ private String java = "java";
private List<String> arguments = new ArrayList<String>();
public int getXms() {
@@ -53,4 +55,12 @@ public class JMeterProcessJVMSettings {
public void setArguments(List<String> arguments) {
this.arguments = arguments;
}
+
+ public void setJavaRuntime(String java) {
+ this.java = java;
+ }
+
+ public String getJavaRuntime() {
+ return this.java;
+ }
}
diff --git a/src/main/java/com/lazerycode/jmeter/testrunner/JMeterProcessBuilder.java b/src/main/java/com/lazerycode/jmeter/testrunner/JMeterProcessBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/lazerycode/jmeter/testrunner/JMeterProcessBuilder.java
+++ b/src/main/java/com/lazerycode/jmeter/testrunner/JMeterProcessBuilder.java
@@ -14,6 +14,7 @@ public class JMeterProcessBuilder {
private int initialHeapSizeInMegaBytes;
private int maximumHeapSizeInMegaBytes;
private String workingDirectory;
+ private String javaRuntime;
private List<String> userSuppliedArguments;
private List<String> mainClassArguments = new ArrayList<String>();
@@ -24,6 +25,7 @@ public class JMeterProcessBuilder {
this.initialHeapSizeInMegaBytes = settings.getXms();
this.maximumHeapSizeInMegaBytes = settings.getXmx();
this.userSuppliedArguments = settings.getArguments();
+ this.javaRuntime = settings.getJavaRuntime();
}
public void setWorkingDirectory(File workingDirectory) throws MojoExecutionException {
@@ -41,7 +43,6 @@ public class JMeterProcessBuilder {
}
private String[] constructArgumentsList() {
- String javaRuntime = "java";
String mainClass = "ApacheJMeter.jar";
List<String> argumentsList = new ArrayList<String>(); | Allowing injection of Java to Jmeter | jmeter-maven-plugin_jmeter-maven-plugin | train |
3cd70f00e4f4b79ef7a15470bc7bf2f3e7a3142c | diff --git a/google-cloud-spanner/lib/google/cloud/spanner/pool.rb b/google-cloud-spanner/lib/google/cloud/spanner/pool.rb
index <HASH>..<HASH> 100644
--- a/google-cloud-spanner/lib/google/cloud/spanner/pool.rb
+++ b/google-cloud-spanner/lib/google/cloud/spanner/pool.rb
@@ -59,6 +59,7 @@ module Google
end
def checkout_session
+ action = nil
@mutex.synchronize do
loop do
read_session = session_queue.shift
@@ -66,13 +67,18 @@ module Google
write_transaction = transaction_queue.shift
return write_transaction.session if write_transaction
- return new_session! if can_allocate_more_sessions?
+ if can_allocate_more_sessions?
+ action = :new
+ break
+ end
fail SessionLimitError if @fail
@resource.wait @mutex
end
end
+
+ return new_session! if action == :new
end
def checkin_session session
@@ -99,20 +105,31 @@ module Google
end
def checkout_transaction
+ action = nil
@mutex.synchronize do
loop do
write_transaction = transaction_queue.shift
return write_transaction if write_transaction
read_session = session_queue.shift
- return read_session.create_transaction if read_session
+ if read_session
+ action = read_session
+ break
+ end
- return new_transaction! if can_allocate_more_sessions?
+ if can_allocate_more_sessions?
+ action = :new
+ break
+ end
fail SessionLimitError if @fail
@resource.wait @mutex
end
end
+ if action.is_a? Google::Cloud::Spanner::Session
+ return action.create_transaction
+ end
+ return new_transaction! if action == :new
end
def checkin_transaction tx
@@ -154,16 +171,26 @@ module Google
@transaction_queue = []
ensure_valid_thread!
# init session queue
- @min.times { session_queue << new_session! }
+ @min.times do
+ s = new_session!
+ @mutex.synchronize do
+ session_queue << s
+ end
+ end
# init transaction queue
(@min * @write_ratio).round.times do
- transaction_queue << checkout_session.create_transaction
+ tx = checkout_session.create_transaction
+ @mutex.synchronize do
+ transaction_queue << tx
+ end
end
end
def new_session!
session = @client.create_new_session
- all_sessions << session
+ @mutex.synchronize do
+ all_sessions << session
+ end
session
end
@@ -172,6 +199,7 @@ module Google
end
def can_allocate_more_sessions?
+ # This is expected to be called from within a synchronize block
all_sessions.size < @max
end | Create new sessions/transactions outside of mutex | googleapis_google-cloud-ruby | train |
b0409b55c673c63c366f6a15217cca2991ef8fc9 | diff --git a/lib/ronin/program/commands/install.rb b/lib/ronin/program/commands/install.rb
index <HASH>..<HASH> 100644
--- a/lib/ronin/program/commands/install.rb
+++ b/lib/ronin/program/commands/install.rb
@@ -30,30 +30,31 @@ module Ronin
command :install
- options('URI [options]') do |opts|
- opts.settings.media = nil
- opts.settings.uri = nil
+ def define_options(opts)
+ opts.usage = 'URI [options]'
opts.options do
opts.on('-m','--media [MEDIA]','Spedify the media-type of the repository') do |media|
- options.settings.media = media
+ @media = media
end
end
- opts.arguments do
- opts.arg('URI','The URI of the repository to install')
- end
+ opts.arguments {
+ 'URI' => 'The URI of the repository to install'
+ }
opts.summary('Installs the repository located at the specified URI')
end
def arguments(args)
- unless args.length==1
- fail('install: only one repository URI maybe specified')
+ unless args.length == 1
+ fail('only one repository URI maybe specified')
end
+ uri = args.first
+
Cache::Overlay.save_cache do
- Cache::Overlay.install(:uri => args.first, :media => options.settings.media) do |repo|
+ Cache::Overlay.install(:uri => uri, :media => @media) do |repo|
puts "Overlay #{repo} has been installed."
end
end | Refactored the InstallCommand. | ronin-ruby_ronin | train |
84dc163089881dad55ba1cf9d9bbbe1ee573bf78 | diff --git a/builtin/providers/google/resource_compute_snapshot.go b/builtin/providers/google/resource_compute_snapshot.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/google/resource_compute_snapshot.go
+++ b/builtin/providers/google/resource_compute_snapshot.go
@@ -40,24 +40,24 @@ func resourceComputeSnapshot() *schema.Resource {
Computed: true,
},
- "sourcedisk_encryption_key_raw": &schema.Schema{
+ "source_disk_encryption_key_raw": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Sensitive: true,
},
- "sourcedisk_encryption_key_sha256": &schema.Schema{
+ "source_disk_encryption_key_sha256": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
- "sourcedisk_id": &schema.Schema{
+ "source_disk_id": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
- "sourcedisk": &schema.Schema{
+ "source_disk": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
@@ -102,7 +102,7 @@ func resourceComputeSnapshotCreate(d *schema.ResourceData, meta interface{}) err
snapshot.SnapshotEncryptionKey.RawKey = v.(string)
}
- if v, ok := d.GetOk("sourcedisk_encryption_key_raw"); ok {
+ if v, ok := d.GetOk("source_disk_encryption_key_raw"); ok {
snapshot.SourceDiskEncryptionKey = &compute.CustomerEncryptionKey{}
snapshot.SourceDiskEncryptionKey.RawKey = v.(string)
}
diff --git a/builtin/providers/google/resource_compute_snapshot_test.go b/builtin/providers/google/resource_compute_snapshot_test.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/google/resource_compute_snapshot_test.go
+++ b/builtin/providers/google/resource_compute_snapshot_test.go
@@ -152,7 +152,7 @@ resource "google_compute_snapshot" "foobar" {
name = "%s"
disk = "${google_compute_disk.foobar.name}"
zone = "us-central1-a"
- sourcedisk_encryption_key_raw = "SGVsbG8gZnJvbSBHb29nbGUgQ2xvdWQgUGxhdGZvcm0="
+ source_disk_encryption_key_raw = "SGVsbG8gZnJvbSBHb29nbGUgQ2xvdWQgUGxhdGZvcm0="
snapshot_encryption_key_raw = "SGVsbG8gZnJvbSBHb29nbGUgQ2xvdWQgUGxhdGZvcm0="
}`, diskName, snapshotName)
} | Review by @paddyforan: Rename sourcedisk to source_disk | hashicorp_terraform | train |
7249ed0af042cf3b902b2a84ff218533ae628fc7 | diff --git a/packages/shared-components/src/components/Steps/styles/StepContent.js b/packages/shared-components/src/components/Steps/styles/StepContent.js
index <HASH>..<HASH> 100644
--- a/packages/shared-components/src/components/Steps/styles/StepContent.js
+++ b/packages/shared-components/src/components/Steps/styles/StepContent.js
@@ -1,10 +1,12 @@
import styled from 'styled-components';
import StepContainer from './StepContainer';
+import themeGet from 'extensions/themeGet';
export default styled.div`
${StepContainer} > & {
flex: 1 0 0;
- margin-top: 0.5em;
- margin-left: 1em;
+ margin-top: auto;
+ margin-bottom: auto;
+ margin-left: ${themeGet('spacing.small')};
}
`;
diff --git a/packages/shared-components/src/components/Steps/styles/StepNumber.js b/packages/shared-components/src/components/Steps/styles/StepNumber.js
index <HASH>..<HASH> 100644
--- a/packages/shared-components/src/components/Steps/styles/StepNumber.js
+++ b/packages/shared-components/src/components/Steps/styles/StepNumber.js
@@ -3,7 +3,7 @@ import icons from 'components/Icon/icons';
import get from 'extensions/themeGet';
import StepContainer from './StepContainer';
-const NUMBER_SIZE = '2.5em';
+const CIRCLE_SIZE = 32;
export default styled.div.attrs({
number: props => (props.complete ? icons('checkmark') : props.number),
@@ -11,21 +11,25 @@ export default styled.div.attrs({
props.complete ? get('fonts.icon')(props) : get('fonts.default')(props),
numberColor: props =>
props.complete
- ? get('colors.positive.dark')(props)
+ ? get('colors.text.inverted')(props)
: get('colors.text.default')(props),
numberBorderColor: props =>
props.complete
- ? get('colors.positive.border')(props)
- : get('colors.shadow.light')(props),
+ ? get('colors.positive.default')(props)
+ : get('colors.border.light')(props),
})`
&:before {
display: inline-block;
+ font-size: ${(CIRCLE_SIZE * 2) / 3}px;
content: "${props => props.number}";
font-family: ${props => props.numberFont};
- width: ${NUMBER_SIZE};
- height: ${NUMBER_SIZE};
- line-height: ${NUMBER_SIZE};
- background-color: ${get('colors.background.default')};
+ width: ${CIRCLE_SIZE}px;
+ height: ${CIRCLE_SIZE}px;
+ line-height: ${CIRCLE_SIZE}px;
+ background-color: ${props =>
+ props.complete
+ ? get('colors.positive.default')(props)
+ : get('colors.background.default')(props)};
border-radius: 2em;
border-width: ${get('thicknesses.normal')};
border-style: solid; | recolor and fix the size of steps | Bandwidth_shared-components | train |
1261c641907237787f20886d999d4eb467b3a59d | diff --git a/component-4-comp-2-lookup.js b/component-4-comp-2-lookup.js
index <HASH>..<HASH> 100644
--- a/component-4-comp-2-lookup.js
+++ b/component-4-comp-2-lookup.js
@@ -7,7 +7,7 @@
** file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
-/* top-level API: lookup component by path */
+/* lookup component by path */
_cs.lookup = function (base, path) {
/* handle special calling conventions */
if (arguments.length === 1) {
@@ -51,50 +51,10 @@ _cs.lookup = function (base, path) {
comp = base;
}
- /* lookup component(s) at "comp", reachable via path segment "path[i]" */
- var _lookup = function (result, comp, path, i) {
- if (i >= path.length)
- /* stop recursion */
- result.push(comp);
- else if (path[i] === ".")
- /* CASE 1: current component (= no-op) */
- _lookup(result, comp, path, i + 1);
- else if (path[i] === "..") {
- /* CASE 2: parent component */
- if (comp.parent() !== null)
- _lookup(result, comp.parent(), path, i + 1); /* RECURSION */
- }
- else if (path[i] === "*") {
- /* CASE 3: all child components */
- var children = comp.children();
- for (var j = 0; j < children.length; j++)
- _lookup(result, children[j], path, i + 1); /* RECURSION */
- }
- else if (path[i] === "") {
- /* CASE 4: all descendent components */
- var nodes = comp.walk_down(function (depth, node, nodes, depth_first) {
- if (!depth_first)
- nodes.push(node);
- return nodes;
- }, []);
- for (var j = 0; j < nodes.length; j++)
- _lookup(result, nodes[j], path, i + 1); /* RECURSION */
- }
- else {
- /* CASE 5: a specific child component */
- var children = comp.children();
- for (var j = 0; j < children.length; j++) {
- if (children[j].name() === path[i]) {
- _lookup(result, children[j], path, i + 1); /* RECURSION */
- break;
- }
- }
- }
- };
if (path !== "") {
/* lookup components */
var comps = []
- _lookup(comps, comp, path.split("/"), 0);
+ _cs.lookup_step(comps, comp, path.split("/"), 0);
/* post-process component result set */
if (comps.length === 0)
@@ -133,3 +93,44 @@ _cs.lookup = function (base, path) {
return comp;
};
+/* lookup component(s) at "comp", reachable via path segment "path[i]" */
+_cs.lookup_step = function (result, comp, path, i) {
+ if (i >= path.length)
+ /* stop recursion */
+ result.push(comp);
+ else if (path[i] === ".")
+ /* CASE 1: current component (= no-op) */
+ _cs.lookup_step(result, comp, path, i + 1); /* RECURSION */
+ else if (path[i] === "..") {
+ /* CASE 2: parent component */
+ if (comp.parent() !== null)
+ _cs.lookup_step(result, comp.parent(), path, i + 1); /* RECURSION */
+ }
+ else if (path[i] === "*") {
+ /* CASE 3: all child components */
+ var children = comp.children();
+ for (var j = 0; j < children.length; j++)
+ _cs.lookup_step(result, children[j], path, i + 1); /* RECURSION */
+ }
+ else if (path[i] === "") {
+ /* CASE 4: all descendent components */
+ var nodes = comp.walk_down(function (depth, node, nodes, depth_first) {
+ if (!depth_first)
+ nodes.push(node);
+ return nodes;
+ }, []);
+ for (var j = 0; j < nodes.length; j++)
+ _cs.lookup_step(result, nodes[j], path, i + 1); /* RECURSION */
+ }
+ else {
+ /* CASE 5: a specific child component */
+ var children = comp.children();
+ for (var j = 0; j < children.length; j++) {
+ if (children[j].name() === path[i]) {
+ _cs.lookup_step(result, children[j], path, i + 1); /* RECURSION */
+ break;
+ }
+ }
+ }
+};
+ | refactor lookup step function into own function | rse_componentjs | train |
97add4620afa35f203e5295fbfac20e9ed6524d0 | diff --git a/packages/internal-test-helpers/lib/module-for.js b/packages/internal-test-helpers/lib/module-for.js
index <HASH>..<HASH> 100644
--- a/packages/internal-test-helpers/lib/module-for.js
+++ b/packages/internal-test-helpers/lib/module-for.js
@@ -62,6 +62,8 @@ export default function moduleFor(description, TestClass, ...mixins) {
function generateTest(name) {
if (name.indexOf('@test ') === 0) {
QUnit.test(name.slice(5), assert => context[name](assert));
+ } else if (name.indexOf('@only ') === 0) {
+ QUnit.only(name.slice(5), assert => context[name](assert));
} else if (name.indexOf('@skip ') === 0) {
QUnit.skip(name.slice(5), assert => context[name](assert));
} else { | Support @only => QUnit.only | emberjs_ember.js | train |
c3ee8ecc4d211d456a4afc092ca2af679ed8bbc1 | diff --git a/picopt/detect_format.py b/picopt/detect_format.py
index <HASH>..<HASH> 100644
--- a/picopt/detect_format.py
+++ b/picopt/detect_format.py
@@ -28,7 +28,6 @@ def is_format_selected(image_format, formats, progs):
was selected by the command line arguments"""
intersection = formats & Settings.formats
mode = is_program_selected(progs)
-
result = (image_format in intersection) and mode
return result
@@ -75,7 +74,6 @@ def get_image_format(filename):
def detect_file(filename):
"""decides what to do with the file"""
image_format = get_image_format(filename)
- print(Settings.formats)
if image_format in Settings.formats:
return image_format | remove debugging print from detect_format | ajslater_picopt | train |
2fdc949f40b27ad934bb67fc8e55755560d7231a | diff --git a/src/zh-TW/validation.php b/src/zh-TW/validation.php
index <HASH>..<HASH> 100644
--- a/src/zh-TW/validation.php
+++ b/src/zh-TW/validation.php
@@ -22,29 +22,29 @@ return [
'array' => ':attribute 必須為陣列。',
'before' => ':attribute 必須要在 :date 之前。',
'between' => [
- 'numeric' => ':attribute 必須介乎 :min 至 :max 之間。',
- 'file' => ':attribute 必須介乎 :min 至 :max kb 之間。 ',
- 'string' => ':attribute 必須介乎 :min 至 :max 個字元之間。',
+ 'numeric' => ':attribute 必須介於 :min 至 :max 之間。',
+ 'file' => ':attribute 必須介於 :min 至 :max kb 之間。 ',
+ 'string' => ':attribute 必須介於 :min 至 :max 個字元之間。',
'array' => ':attribute: 必須有 :min - :max 個元素。',
],
'boolean' => ':attribute 必須為bool值。',
- 'confirmed' => ':attribute 確認欄位的輸入並不相符。',
+ 'confirmed' => ':attribute 確認欄位的輸入不一致。',
'date' => ':attribute 並非一個有效的日期。',
- 'date_format' => ':attribute 與 :format 格式不相符。',
+ 'date_format' => ':attribute 不符合 :format 的格式。',
'different' => ':attribute 與 :other 必須不同。',
'digits' => ':attribute 必須是 :digits 位數字。',
- 'digits_between' => ':attribute 必須介乎 :min 至 :max 位數字。',
+ 'digits_between' => ':attribute 必須介於 :min 至 :max 位數字。',
'dimensions' => ':attribute 圖片尺寸不正確。',
'distinct' => ':attribute 已經存在。',
'email' => ':attribute 的格式無效。',
'exists' => '所選擇的 :attribute 選項無效。',
- 'file' => ':attribute 必須是文件。',
+ 'file' => ':attribute 必須是一個檔案。',
'filled' => ':attribute 不能留空。',
'image' => ':attribute 必須是一張圖片。',
'in' => '所選擇的 :attribute 選項無效。',
'in_array' => ':attribute 沒有在 :other 中。',
'integer' => ':attribute 必須是一個整數。',
- 'ip' => ':attribute 必須是一個有效的 IP 地址。',
+ 'ip' => ':attribute 必須是一個有效的 IP 位址。',
'json' => ':attribute 必須是正確的 JSON 字串。',
'max' => [
'numeric' => ':attribute 不能大於 :max。',
@@ -120,7 +120,7 @@ return [
'date' => '日期',
'day' => '天',
'description' => '描述',
- 'email' => '郵箱',
+ 'email' => '電子郵件',
'excerpt' => '摘要',
'first_name' => '名',
'gender' => '性別',
@@ -138,7 +138,7 @@ return [
'size' => '大小',
'time' => '時間',
'title' => '標題',
- 'username' => '使用者名',
+ 'username' => '使用者名字',
'year' => '年',
], | Update validation.php
Hello, just updated some translations of Taiwanese Chinese | caouecs_Laravel-lang | train |
6a55053422b26bdf7f0cdf0323823b65caee2dc2 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,6 +14,7 @@ setup(
'python-swiftclient>=1.4.0',
'python-keystoneclient>=0.2.3',
'six',
+ 'python-magic>=0.4.10',
],
zip_safe=False,
classifiers=[
diff --git a/swift/storage.py b/swift/storage.py
index <HASH>..<HASH> 100644
--- a/swift/storage.py
+++ b/swift/storage.py
@@ -1,5 +1,6 @@
import hmac
import mimetypes
+import magic
import os
import re
from datetime import datetime
@@ -49,6 +50,7 @@ class SwiftStorage(Storage):
auth_token_duration = setting('SWIFT_AUTH_TOKEN_DURATION', 60 * 60 * 23)
os_extra_options = setting('SWIFT_EXTRA_OPTIONS', {})
auto_overwrite = setting('SWIFT_AUTO_OVERWRITE', False)
+ content_type_from_fd = setting('SWIFT_CONTENT_TYPE_FROM_FD', False)
_token_creation_time = 0
_token = ''
name_prefix = setting('SWIFT_NAME_PREFIX')
@@ -161,7 +163,12 @@ class SwiftStorage(Storage):
if self.name_prefix:
name = self.name_prefix + name
- content_type = mimetypes.guess_type(name)[0]
+ if self.content_type_from_fd:
+ content_type = magic.from_buffer(content.read(1024), mime=True)
+ # Go back to the beginning of the file
+ content.seek(0)
+ else:
+ content_type = mimetypes.guess_type(name)[0]
swiftclient.put_object(self.storage_url,
self.token,
self.container_name, | added option to get mimetype from file content | dennisv_django-storage-swift | train |
590c377e7816212adb7b76b4aab127a4a7badcae | diff --git a/pythonforandroid/bootstraps/pygame/build/build.py b/pythonforandroid/bootstraps/pygame/build/build.py
index <HASH>..<HASH> 100755
--- a/pythonforandroid/bootstraps/pygame/build/build.py
+++ b/pythonforandroid/bootstraps/pygame/build/build.py
@@ -68,7 +68,7 @@ def render(template, dest, **kwargs):
template = environment.get_template(template)
text = template.render(**kwargs)
- f = file(dest, 'wb')
+ f = open(dest, 'wb')
f.write(text.encode('utf-8'))
f.close()
@@ -224,9 +224,9 @@ def make_package(args):
args.numeric_version = str(version_code)
- args.name = args.name.decode('utf-8')
- if args.icon_name:
- args.icon_name = args.icon_name.decode('utf-8')
+ # args.name = args.name.decode('utf-8')
+ # if args.icon_name:
+ # args.icon_name = args.icon_name.decode('utf-8')
versioned_name = (args.name.replace(' ', '').replace('\'', '') +
'-' + args.version)
@@ -306,8 +306,8 @@ def make_package(args):
subprocess.call([ANDROID, 'update', 'project', '-p', '.', '-t',
'android-{}'.format(args.sdk_version)])
except (OSError, IOError):
- print 'An error occured while calling', ANDROID, 'update'
- print 'Your PATH must include android tools.'
+ print('An error occured while calling', ANDROID, 'update')
+ print('Your PATH must include android tools.')
sys.exit(-1)
# Delete the old assets.
@@ -346,7 +346,7 @@ def make_package(args):
if args.add_jar:
for jarname in args.add_jar:
if not os.path.exists(jarname):
- print 'Requested jar does not exist: {}'.format(jarname)
+ print('Requested jar does not exist: {}'.format(jarname))
sys.exit(-1)
shutil.copy(jarname, 'libs')
@@ -355,8 +355,8 @@ def make_package(args):
for arg in args.command:
subprocess.check_call([ANT, arg])
except (OSError, IOError):
- print 'An error occured while calling', ANT
- print 'Did you install ant on your system ?'
+ print('An error occured while calling', ANT)
+ print('Did you install ant on your system ?')
sys.exit(-1)
def parse_args(args=None): | Further fixes for running under py3 | kivy_python-for-android | train |
4a1549bdb3dd2f35a29238ebb7d0be33cb078140 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -7,8 +7,12 @@ module.exports = postcss.plugin('postcss-parent-selector', function(opts) {
return function(root, result) {
root.walkRules(rule => {
+ if (rule.parent && rule.parent.type === 'atrule' &&
+ rule.parent.name.indexOf('keyframes') !== -1) {
+ return
+ }
rule.selectors = rule.selectors.map(selector => {
- return selector.split(/,[\s]* /g).map( selector => {git co
+ return selector.split(/,[\s]* /g).map( selector => {
var newsSelector = `${opts.selector} ${selector}`
return newsSelector
}); | fixed error where the parent class was being added to @keyframe contents | domwashburn_postcss-parent-selector | train |
a96f34a47a0398f64ec4b1d14c4bd5ba26cd9505 | diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -58,30 +58,32 @@ function displayCoverageReport (display) {
gulp.task('browserify', function (cb) {
function browserifyBuild (isStandalone, useDebug) {
- return new Promise(function (resolve, reject) {
- var b = browserify('./index.js', {
- debug: useDebug,
- standalone: 'JsonRefs'
- });
+ return function () {
+ return new Promise(function (resolve, reject) {
+ var b = browserify('./index.js', {
+ debug: useDebug,
+ standalone: 'JsonRefs'
+ });
- if (!isStandalone) {
- // Expose Bower modules so they can be required
- exposify.config = {
- 'path-loader': 'PathLoader',
- 'traverse': 'traverse'
- };
+ if (!isStandalone) {
+ // Expose Bower modules so they can be required
+ exposify.config = {
+ 'path-loader': 'PathLoader',
+ 'traverse': 'traverse'
+ };
- b.transform('exposify');
- }
+ b.transform('exposify');
+ }
- b.bundle()
- .pipe(source('json-refs' + (isStandalone ? '-standalone' : '') + (!useDebug ? '-min' : '') + '.js'))
- .pipe($.if(!useDebug, buffer()))
- .pipe($.if(!useDebug, $.uglify()))
- .pipe(gulp.dest('browser/'))
- .on('error', reject)
- .on('end', resolve);
- });
+ b.bundle()
+ .pipe(source('json-refs' + (isStandalone ? '-standalone' : '') + (!useDebug ? '-min' : '') + '.js'))
+ .pipe($.if(!useDebug, buffer()))
+ .pipe($.if(!useDebug, $.uglify()))
+ .pipe(gulp.dest('browser/'))
+ .on('error', reject)
+ .on('end', resolve);
+ });
+ };
}
Promise.resolve() | Fixed Gulp build process for browser testing | whitlockjc_json-refs | train |
43a0d82f8dc9e40e89d07f5cc6bf9ede27918c68 | diff --git a/www/src/pages/components/modal.js b/www/src/pages/components/modal.js
index <HASH>..<HASH> 100644
--- a/www/src/pages/components/modal.js
+++ b/www/src/pages/components/modal.js
@@ -69,7 +69,7 @@ export default withLayout(function ModalSection({ data }) {
</LinkedHeading>
<p>
Below is a <em>static</em> modal dialog (without the positioning) to
- demostrate the look and feel of the Modal.
+ demonstrate the look and feel of the Modal.
</p>
<ReactPlayground codeText={ModalStatic} /> | Fix spelling. (#<I>)
"demostrate" -> "demonstrate". | react-bootstrap_react-bootstrap | train |
79831f9a40be3f26ec1408fe343987e5bf229699 | diff --git a/loam/manager.py b/loam/manager.py
index <HASH>..<HASH> 100644
--- a/loam/manager.py
+++ b/loam/manager.py
@@ -314,19 +314,20 @@ class ConfigurationManager:
for sub, opt, meta in self.defaults_():
self[sub][opt] = meta.default
- def create_config_(self, update=False):
+ def create_config_(self, index=0, update=False):
"""Create config file.
- Create a config file at the first path in :attr:`config_files_`.
+ Create config file in :attr:`config_files_[index]`.
Parameters:
+ index(int): index of config file.
update (bool): if set to True and :attr:`config_files_` already
exists, its content is read and all the options it sets are
kept in the produced config file.
"""
- if not self.config_files_:
+ if not self.config_files_[index:]:
return
- path = self.config_files_[0]
+ path = self.config_files_[index]
if not path.parent.exists():
path.parent.mkdir(parents=True)
conf_dict = {}
diff --git a/loam/tools.py b/loam/tools.py
index <HASH>..<HASH> 100644
--- a/loam/tools.py
+++ b/loam/tools.py
@@ -80,7 +80,10 @@ def config_conf_section():
config_dict = OrderedDict((
('create',
ConfOpt(None, True, None, {'action': 'store_true'},
- False, 'create new config file')),
+ False, 'create most global config file')),
+ ('create_local',
+ ConfOpt(None, True, None, {'action': 'store_true'},
+ False, 'create most local config file')),
('update',
ConfOpt(None, True, None, {'action': 'store_true'},
False, 'add missing entries to config file')),
@@ -150,9 +153,11 @@ def config_cmd_handler(conf, config='config'):
:func:`config_conf_section` function.
"""
if conf[config].create or conf[config].update:
- conf.create_config_(conf[config].update)
+ conf.create_config_(update=conf[config].update)
+ if conf[config].create_local:
+ conf.create_config_(index=-1, update=conf[config].update)
if conf[config].edit:
if not conf.config_files_[0].is_file():
- conf.create_config_(conf[config].update)
+ conf.create_config_(update=conf[config].update)
call(shlex.split('{} {}'.format(conf[config].editor,
conf.config_files_[0]))) | create_config_ method learned the index argument | amorison_loam | train |
9989dab261fd9673a0996de029c6c2cb7e6e9a3c | diff --git a/ensutils.js b/ensutils.js
index <HASH>..<HASH> 100644
--- a/ensutils.js
+++ b/ensutils.js
@@ -905,4 +905,5 @@ var resolverContract = web3.eth.contract([
"payable": false,
"type": "fallback"
}
-]);
\ No newline at end of file
+]);
+var publicResolver = resolverContract.at('0x71e122fc87aa184b966dfaaa81f9f37f45da9bae'); | Add publicResolver instance available on testnet | ensdomains_ens | train |
21f265528074690eaa287037b5d41066b5cb4dcf | diff --git a/validator_watcher_copy_test.go b/validator_watcher_copy_test.go
index <HASH>..<HASH> 100644
--- a/validator_watcher_copy_test.go
+++ b/validator_watcher_copy_test.go
@@ -1,5 +1,4 @@
-// +build go1.3
-// +build !plan9,!solaris,!windows
+// +build go1.3,!plan9,!solaris,!windows
// Turns out you can't copy over an existing file on Windows.
diff --git a/validator_watcher_test.go b/validator_watcher_test.go
index <HASH>..<HASH> 100644
--- a/validator_watcher_test.go
+++ b/validator_watcher_test.go
@@ -1,5 +1,4 @@
-// +build go1.3
-// +build !plan9,!solaris
+// +build go1.3,!plan9,!solaris
package main
diff --git a/watcher.go b/watcher.go
index <HASH>..<HASH> 100644
--- a/watcher.go
+++ b/watcher.go
@@ -1,5 +1,4 @@
-// +build go1.3
-// +build !plan9,!solaris
+// +build go1.3,!plan9,!solaris
package main
diff --git a/watcher_unsupported.go b/watcher_unsupported.go
index <HASH>..<HASH> 100644
--- a/watcher_unsupported.go
+++ b/watcher_unsupported.go
@@ -1,5 +1,4 @@
-// +build go1.1
-// +build plan9,solaris
+// +build !go1.3 plan9 solaris
package main
@@ -7,7 +6,7 @@ import (
"log"
)
-func WatchForUpdates(filename string, action func()) bool {
+func WatchForUpdates(filename string, done <-chan bool, action func()) bool {
log.Printf("file watching not implemented on this platform")
return false
} | Fix unsupported WatchForUpdates and build tags
Closes #<I>.
The `go<I>` constraint has been updated to `!go<I>` per
<URL>` in `watcher_unsupported.go` was stale. | bitly_oauth2_proxy | train |
7aa83cb7b8903422bb2530042f11be7366b261bb | diff --git a/src/pixi/renderers/canvas/CanvasRenderer.js b/src/pixi/renderers/canvas/CanvasRenderer.js
index <HASH>..<HASH> 100644
--- a/src/pixi/renderers/canvas/CanvasRenderer.js
+++ b/src/pixi/renderers/canvas/CanvasRenderer.js
@@ -240,10 +240,11 @@ PIXI.CanvasRenderer.prototype.render = function(stage)
}
// remove frame updates.. // removeing for now...
- //if(PIXI.Texture.frameUpdates.length > 0)
- //{
- // PIXI.Texture.frameUpdates.length = 0;
- //}
+ // TODO remove this eventually!
+ if(PIXI.Texture.frameUpdates.length > 0)
+ {
+ PIXI.Texture.frameUpdates.length = 0;
+ }
};
/** | Added frame clear back in
for now.. | pixijs_pixi.js | train |
a61f5ac5e28ccb566bf463baa312aca032e748ec | diff --git a/lib/dapplerc.js b/lib/dapplerc.js
index <HASH>..<HASH> 100644
--- a/lib/dapplerc.js
+++ b/lib/dapplerc.js
@@ -13,9 +13,9 @@ module.exports = class DappleRC {
static writeSync(path, data) {
return fs.writeYamlSync(path, data);
}
-
- validateSelf() {
- var valid = tv4.validate( this.data, dapplercSchema );
+
+ static validate( rc ) {
+ var valid = tv4.validate( rc, dapplercSchema );
if( !valid ) {
// TODO: implement a custom error reporter which is displaying a
// human readable message, the caused data which is foud ad `dataPath`
@@ -26,6 +26,10 @@ module.exports = class DappleRC {
"error in data: "+tv4.error.dataPath + "\n"+
"error in schema: "+tv4.error.schemaPath);
}
+ }
+
+ validateSelf() {
+ DappleRC.validate( this.data );
}
constructor(opts) {
diff --git a/lib/dapplerc_prompter.js b/lib/dapplerc_prompter.js
index <HASH>..<HASH> 100644
--- a/lib/dapplerc_prompter.js
+++ b/lib/dapplerc_prompter.js
@@ -3,6 +3,8 @@
var DappleRC = require('../lib/dapplerc.js');
var deasync = require('deasync');
var inquirer = require('inquirer');
+var defaults = require('json-schema-defaults');
+var dapplercSchema = require('../specs/dapplerc.json');
module.exports = class DappleRCPrompter {
static prompt() {
@@ -36,24 +38,23 @@ module.exports = class DappleRCPrompter {
var rc;
var done = false;
inquirer.prompt(questions, function(res) {
- rc = {
- environments: {
- default: {
- ethereum: "internal",
- ipfs: {
- host: res.ipfs_host,
- port: res.ipfs_port
- }
- },
- live: {
- ethereum: {
- host: res.eth_host,
- port: res.eth_port
- },
- },
- evm: "default"
- }
- };
+
+ rc = defaults( dapplercSchema );
+
+ // TODO - add ipfs to default environment?
+ // why is ipfs bound to an environment anyway?
+
+ rc.environments.live = {
+ ethereum: {
+ host: res.eth_host,
+ port: res.eth_port
+ },
+ ipfs: {
+ host: res.ipfs_host,
+ port: res.ipfs_port
+ }
+ }
+
done = true;
});
deasync.loopWhile(function() {return !done;})
diff --git a/specs/dapplerc.json b/specs/dapplerc.json
index <HASH>..<HASH> 100644
--- a/specs/dapplerc.json
+++ b/specs/dapplerc.json
@@ -5,6 +5,12 @@
"environments": {
"title": "global environment specifications",
"type": "object",
+ "default": {
+ "default": {
+ "ethereum": "internal"
+ },
+ "evm": "default"
+ },
"patternProperties": {
"^((?!evm)\\w)+$": {
"type": "object",
@@ -44,7 +50,8 @@
},
"properties": {
"evm": {
- "type": "string"
+ "type": "string",
+ "default": "default"
}
}
} | generate and validate dapplerc with json-schema | dapphub_dapple | train |
4c08c7acbe5b39a96042651426884a79b1e29d5f | diff --git a/pyaas/web/application.py b/pyaas/web/application.py
index <HASH>..<HASH> 100644
--- a/pyaas/web/application.py
+++ b/pyaas/web/application.py
@@ -62,6 +62,10 @@ class Application(tornado.web.Application):
if pyaas.args.debug:
self.settings['debug'] = True
+ self.patterns.append(
+ ( r'/src/(.*)', pyaas.web.handlers.Source ),
+ )
+
authModules = pyaas.module.PyaasModule.CLASSES.get('AuthModule', None)
if authModules:
for (name,authModule) in authModules.items():
diff --git a/pyaas/web/handlers/index.py b/pyaas/web/handlers/index.py
index <HASH>..<HASH> 100644
--- a/pyaas/web/handlers/index.py
+++ b/pyaas/web/handlers/index.py
@@ -1,7 +1,11 @@
+import os
+
import tornado.web
-from pyaas.web.handlers import Base
+import pyaas
+
+from . import Base
class Index(Base):
@@ -21,3 +25,21 @@ class Protected(Base):
def get(self, template=None):
template = template + '.html' if template else self.template
self.render(template)
+
+
+class Source(Base):
+ def get(self, src=''):
+ srcdir = os.path.join(pyaas.prefix, 'src')
+ src = os.path.join(srcdir, src)
+ src = os.path.abspath(src)
+
+ if not src.startswith(srcdir):
+ raise tornado.web.HTTPError(404, 'Path escape attempt detected')
+
+ try:
+ data = open(src, 'rb').read()
+ except:
+ raise tornado.web.HTTPError(404, 'File not found')
+
+ self.set_header('Content-Type', 'text/plain')
+ self.write(data) | with debug enabled expose src directory for javascript maps | moertle_pyaas | train |
7dee0e50ce8a6c84499bb9670bf469016010801a | diff --git a/lib/PathParser.js b/lib/PathParser.js
index <HASH>..<HASH> 100755
--- a/lib/PathParser.js
+++ b/lib/PathParser.js
@@ -105,6 +105,12 @@ class PathParser {
const handler = this._handler;
const methodName = PathParser.METHODNAME[mode];
+ // convert types for arcs
+ if (mode === "a" || mode === "A") {
+ params[3] = params[3] !== 0;
+ params[4] = params[4] !== 0;
+ }
+
if (handler !== null && typeof handler[methodName] === "function") {
handler[methodName](...params);
} | Cast arc flags to booleans | thelonious_kld-path-parser | train |
98174429abd5865ed698f29bec59299290fb7e05 | diff --git a/public/js/editors/libraries.js b/public/js/editors/libraries.js
index <HASH>..<HASH> 100644
--- a/public/js/editors/libraries.js
+++ b/public/js/editors/libraries.js
@@ -108,8 +108,8 @@ var libraries = [
{
'url': [
'//code.jquery.com/jquery.min.js',
- '//maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css',
- '//maxcdn.bootstrapcdn.com/bootstrap/3.3.2/js/bootstrap.min.js'
+ '//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css',
+ '//maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js'
],
'label': 'Bootstrap Latest',
'group': 'Bootstrap' | Update Bootstrap *latest* library to version <I>
This updates urls of Bootstrap CDNs to latest <I> version urls.
Please see:
<URL> | jsbin_jsbin | train |
f6c2e8aec7d0b9d7606f9734b183b2cc8c5a0634 | diff --git a/lib/macho/load_commands.rb b/lib/macho/load_commands.rb
index <HASH>..<HASH> 100644
--- a/lib/macho/load_commands.rb
+++ b/lib/macho/load_commands.rb
@@ -106,29 +106,17 @@ module MachO
:LC_LINKER_OPTIMIZATION_HINT => "LinkeditDataCommand"
}
- # pagezero segment name
- SEG_PAGEZERO = "__PAGEZERO"
-
- # text segment name
- SEG_TEXT = "__TEXT"
-
- # data segment name
- SEG_DATA = "__DATA"
-
- # objective-c runtime segment
- SEG_OBJC = "__OBJC"
-
- # icon segment
- SEG_ICON = "__ICON"
-
- # link editor structures segment
- SEG_LINKEDIT = "__LINKEDIT"
-
- # unix stack segment
- SEG_UNIXSTACK = "__UNIXSTACK"
-
- # segment for self-modifying code with RWX permissions
- SEG_IMPORT = "__IMPORT"
+ # association of segment name symbols to names
+ SEGMENT_NAMES = {
+ :SEG_PAGEZERO => "__PAGEZERO",
+ :SEG_TEXT => "__TEXT",
+ :SEG_DATA => "__DATA",
+ :SEG_OBJC => "__OBJC",
+ :SEG_ICON => "__ICON",
+ :SEG_LINKEDIT => "__LINKEDIT",
+ :SEG_UNIXSTACK => "__UNIXSTACK",
+ :SEG_IMPORT => "__IMPORT"
+ }
# association of segment flag symbols to values
SEGMENT_FLAGS = {
diff --git a/lib/macho/sections.rb b/lib/macho/sections.rb
index <HASH>..<HASH> 100644
--- a/lib/macho/sections.rb
+++ b/lib/macho/sections.rb
@@ -47,20 +47,21 @@ module MachO
:S_ATTR_LOC_RELOC => 0x00000100
}
- # currently known section names
- # we don't use these anywhere right now, but they're good to have
- SECT_TEXT = "__text"
- SECT_FVMLIB_INIT0 = "__fvmlib_init0"
- SECT_FVMLIB_INIT1 = "__fvmlib_init1"
- SECT_DATA = "__data"
- SECT_BSS = "__bss"
- SECT_COMMON = "__common"
- SECT_OBJC_SYMBOLS = "__symbol_table"
- SECT_OBJC_MODULES = "__module_info"
- SECT_OBJC_STRINGS = "__selector_strs"
- SECT_OBJC_REFS = "__selector_refs"
- SECT_ICON_HEADER = "__header"
- SECT_ICON_TIFF = "__tiff"
+ # association of section name symbols to names
+ SECTION_NAMES = {
+ :SECT_TEXT => "__text",
+ :SECT_FVMLIB_INIT0 => "__fvmlib_init0",
+ :SECT_FVMLIB_INIT1 => "__fvmlib_init1",
+ :SECT_DATA => "__data",
+ :SECT_BSS => "__bss",
+ :SECT_COMMON => "__common",
+ :SECT_OBJC_SYMBOLS => "__symbol_table",
+ :SECT_OBJC_MODULES => "__module_info",
+ :SECT_OBJC_STRINGS => "__selector_strs",
+ :SECT_OBJC_REFS => "__selector_refs",
+ :SECT_ICON_HEADER => "__header",
+ :SECT_ICON_TIFF => "__tiff"
+ }
# Represents a section of a segment for 32-bit architectures.
class Section < MachOStructure | convert segment/section name constants into SEGMENT_NAMES and SECTION_NAMES hashes | Homebrew_ruby-macho | train |
ff7e5fb4f5df957aae1cec1f26fc543b6725e329 | diff --git a/example.py b/example.py
index <HASH>..<HASH> 100644
--- a/example.py
+++ b/example.py
@@ -14,7 +14,7 @@ import threading
import logging
from limpyd_jobs import STATUSES
-from limpyd_jobs.models import Queue, Job
+from limpyd_jobs.models import Queue, Job, Error
from limpyd_jobs.workers import Worker, logger
from limpyd import model, fields
from limpyd.contrib.database import PipelineDatabase
@@ -30,24 +30,29 @@ database = PipelineDatabase(host='localhost', port=6379, db=15)
QUEUE_NAME = 'update_fullname'
-class MyQueue(Queue):
+class ModelConfigMixin(object):
"""
- A queue that will store the dates of it's first and last successful job
+ A simple mixin to use with all our models, defining one for all the
+ database and namespace to use.
"""
database = database
namespace = 'limpyd-jobs-example'
+
+
+class MyQueue(ModelConfigMixin, Queue):
+ """
+ A queue that will store the dates of it's first and last successful job
+ """
first_job_date = fields.HashableField()
last_job_date = fields.HashableField()
jobs_counter = fields.HashableField()
-class MyJob(Job):
+class MyJob(ModelConfigMixin, Job):
"""
A job that will use Person's PK as identifier, and will store results of
callback in a new field
"""
- database = database
- namespace = 'limpyd-jobs-example'
result = fields.StringField() # to store the result of the task
queue_model = MyQueue
start = fields.HashableField(indexable=True)
@@ -59,14 +64,19 @@ class MyJob(Job):
return Person.get(self.identifier.hget())
-class Person(model.RedisModel):
+class MyError(ModelConfigMixin, Error):
+ """
+ The default Error model, but on our namespace and database
+ """
+ pass
+
+
+class Person(ModelConfigMixin, model.RedisModel):
"""
A simple model for which we want to compute fullname based on firstname and
lastname
"""
- database = database
cacheable = False
- namespace = 'limpyd-jobs-example'
firstname = fields.HashableField()
lastname = fields.HashableField()
@@ -84,6 +94,7 @@ class FullNameWorker(Worker):
# we use our own models
queue_model = MyQueue
job_model = MyJob
+ error_model = MyError
# useful logging level
logger_level = logging.INFO
@@ -177,13 +188,18 @@ class WorkerThread(threading.Thread):
workers.append(worker)
worker.run()
-# some clean
-for queue in MyQueue.collection().instances():
- queue.delete()
-for job in MyJob.collection().instances():
- job.delete()
-for person in Person.collection().instances():
- person.delete()
+
+def clean():
+ """
+ Clean data created by this script
+ """
+ for queue in MyQueue.collection().instances():
+ queue.delete()
+ for job in MyJob.collection().instances():
+ job.delete()
+ for person in Person.collection().instances():
+ person.delete()
+clean()
# create some persons
for name in ("Chandler Bing", "Rachel Green", "Ross Geller", "Joey Tribbiani",
@@ -248,3 +264,6 @@ for queue in MyQueue.collection().sort(by='priority').instances():
if queue.waiting.llen():
waiting_part = ' Still waiting: %s' % queue.waiting.lmembers()
print '\t[%s] (priority: %s).%s%s' % (name, priority, success_part, waiting_part)
+
+# final clean
+clean() | Add a mixin to define database and namespace in example.py
(and clean data at the end of the script) | limpyd_redis-limpyd-jobs | train |
d86e59c340c4cecacba41a1b2772cbd3ce8b7acc | diff --git a/src/main/java/me/legrange/mikrotik/ApiConnection.java b/src/main/java/me/legrange/mikrotik/ApiConnection.java
index <HASH>..<HASH> 100644
--- a/src/main/java/me/legrange/mikrotik/ApiConnection.java
+++ b/src/main/java/me/legrange/mikrotik/ApiConnection.java
@@ -133,16 +133,6 @@ public abstract class ApiConnection {
* @throws me.legrange.mikrotik.MikrotikApiException Thrown if there is a problem canceling the command */
public abstract void cancel(String tag) throws MikrotikApiException;
- /** get the command timeout. The command timeout is used to time out API
- * commands after a specific time.
- *
- * Note: This is not the same as the timeout value passed in the connect() and
- * connectTLS() methods. This timeout is specific to synchronous commands, that
- * timeout is applied to opening the API socket.
- *
- * @return The time out in milliseconds.
- */
- public abstract int getTimeout();
/** set the command timeout. The command timeout is used to time out API
* commands after a specific time.
diff --git a/src/main/java/me/legrange/mikrotik/impl/ApiConnectionImpl.java b/src/main/java/me/legrange/mikrotik/impl/ApiConnectionImpl.java
index <HASH>..<HASH> 100644
--- a/src/main/java/me/legrange/mikrotik/impl/ApiConnectionImpl.java
+++ b/src/main/java/me/legrange/mikrotik/impl/ApiConnectionImpl.java
@@ -98,11 +98,6 @@ public final class ApiConnectionImpl extends ApiConnection {
}
@Override
- public int getTimeout() {
- return timeout;
- }
-
- @Override
public void setTimeout(int timeout) throws MikrotikApiException {
if (timeout > 0) {
this.timeout = timeout; | Removed getTimeout() until somebody wants it. Don't want bloat | GideonLeGrange_mikrotik-java | train |
3d57e15ab5be3161309dece14b519604d1235dea | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -59,6 +59,7 @@ setup(name='bika.lims',
'collective.wtf',
'WeasyPrint==0.19.2',
'collective.progressbar',
+ 'z3c.unconfigure==1.0.1',
],
extras_require={
'test': [ | Added z3c.unconfigure as a new installation require | senaite_senaite.core | train |
2942cea294fc1fe18b6f6e934ea8c6cb09cabc37 | diff --git a/changes.txt b/changes.txt
index <HASH>..<HASH> 100644
--- a/changes.txt
+++ b/changes.txt
@@ -1,3 +1,6 @@
+0.0.9
+ - add support for settings/updating url attribute for message_threads
+
0.0.8
- add support for updating last request time for user through either last_request_at or last_impression_at
diff --git a/lib/intercom/message_thread.rb b/lib/intercom/message_thread.rb
index <HASH>..<HASH> 100644
--- a/lib/intercom/message_thread.rb
+++ b/lib/intercom/message_thread.rb
@@ -98,6 +98,17 @@ module Intercom
@attributes["read"]
end
+ # @return [String]
+ # @param [String] read the url that was being viewed when the comment was sent
+ def url=(url)
+ @attributes["url"] = url
+ end
+
+ # @return [String]
+ def url
+ @attributes["url"]
+ end
+
# @return [Array<Message>]
def messages
@attributes["messages"].map {|message_hash| Message.new(message_hash)}
diff --git a/lib/intercom/version.rb b/lib/intercom/version.rb
index <HASH>..<HASH> 100644
--- a/lib/intercom/version.rb
+++ b/lib/intercom/version.rb
@@ -1,3 +1,3 @@
module Intercom #:nodoc:
- VERSION = "0.0.8"
+ VERSION = "0.0.9"
end
diff --git a/spec/unit/intercom/message_thread_spec.rb b/spec/unit/intercom/message_thread_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/intercom/message_thread_spec.rb
+++ b/spec/unit/intercom/message_thread_spec.rb
@@ -34,10 +34,10 @@ describe "/v1/messages_threads" do
end
it "sets/gets allowed keys" do
- params = {"email" => "[email protected]", :user_id => "abc123", "thread_id" => "123", "body" => "hello world", "read" => true}
+ params = {"email" => "[email protected]", :user_id => "abc123", "thread_id" => "123", "body" => "hello world", "read" => true, "url" => "http://example.com"}
message_thread = Intercom::MessageThread.new(params)
message_thread.to_hash.keys.sort.must_equal params.keys.map(&:to_s).sort
- params.keys.each do | key|
+ params.keys.each do |key|
message_thread.send(key).must_equal params[key]
end
end | Add ability to set/update url for message_threads | intercom_intercom-ruby | train |
cd93114d556d76137a811d3f0492804c4662a4fb | diff --git a/caas/broker.go b/caas/broker.go
index <HASH>..<HASH> 100644
--- a/caas/broker.go
+++ b/caas/broker.go
@@ -19,6 +19,7 @@ import (
"github.com/juju/juju/core/network"
"github.com/juju/juju/core/status"
"github.com/juju/juju/core/watcher"
+ "github.com/juju/juju/docker"
"github.com/juju/juju/environs"
"github.com/juju/juju/storage"
)
@@ -229,6 +230,9 @@ type Broker interface {
// Application returns the broker interface for an Application
Application(string, DeploymentType) Application
+ // EnsureImageRepoSecret ensures the image pull secret gets created.
+ EnsureImageRepoSecret(docker.ImageRepoDetails) error
+
// ClusterMetadataChecker provides an API to query cluster metadata.
ClusterMetadataChecker
@@ -413,6 +417,9 @@ type OperatorConfig struct {
// OperatorImagePath is the docker registry URL for the image.
OperatorImagePath string
+ // OperatorImagePullSecretName is the secret name for image pulling.
+ OperatorImagePullSecretName string
+
// Version is the Juju version of the operator image.
Version version.Number
diff --git a/caas/kubernetes/provider/constants/constants.go b/caas/kubernetes/provider/constants/constants.go
index <HASH>..<HASH> 100644
--- a/caas/kubernetes/provider/constants/constants.go
+++ b/caas/kubernetes/provider/constants/constants.go
@@ -39,6 +39,9 @@ const (
// CAASProviderType is the provider type for k8s.
CAASProviderType = "kubernetes"
+
+ // CAASImageRepoSecretName is the name of the secret for image pull.
+ CAASImageRepoSecretName = "juju-image-pull-secret"
)
// DefaultPropagationPolicy returns the default propagation policy.
diff --git a/cmd/jujud/agent/model/manifolds.go b/cmd/jujud/agent/model/manifolds.go
index <HASH>..<HASH> 100644
--- a/cmd/jujud/agent/model/manifolds.go
+++ b/cmd/jujud/agent/model/manifolds.go
@@ -34,6 +34,7 @@ import (
"github.com/juju/juju/worker/caasenvironupgrader"
"github.com/juju/juju/worker/caasfirewaller"
"github.com/juju/juju/worker/caasfirewallersidecar"
+ "github.com/juju/juju/worker/caasmodelconfigmanager"
"github.com/juju/juju/worker/caasmodeloperator"
"github.com/juju/juju/worker/caasoperatorprovisioner"
"github.com/juju/juju/worker/caasunitprovisioner"
@@ -517,6 +518,14 @@ func CAASManifolds(config ManifoldsConfig) dependency.Manifolds {
ModelUUID: agentConfig.Model().Id(),
})),
+ caasmodelconfigmanagerName: ifResponsible(caasmodelconfigmanager.Manifold(caasmodelconfigmanager.ManifoldConfig{
+ APICallerName: apiCallerName,
+ BrokerName: caasBrokerTrackerName,
+ Logger: loggo.GetLogger("juju.worker.caasmodelconfigmanager"),
+ NewWorker: caasmodelconfigmanager.NewWorker,
+ NewFacade: caasmodelconfigmanager.NewFacade,
+ })),
+
caasOperatorProvisionerName: ifNotMigrating(caasoperatorprovisioner.Manifold(
caasoperatorprovisioner.ManifoldConfig{
AgentName: agentName,
@@ -691,6 +700,7 @@ const (
caasFirewallerNameLegacy = "caas-firewaller-legacy"
caasFirewallerNameSidecar = "caas-firewaller-embedded"
caasModelOperatorName = "caas-model-operator"
+ caasmodelconfigmanagerName = "caas-model-config-manager"
caasOperatorProvisionerName = "caas-operator-provisioner"
caasApplicationProvisionerName = "caas-application-provisioner"
caasUnitProvisionerName = "caas-unit-provisioner"
diff --git a/state/watcher.go b/state/watcher.go
index <HASH>..<HASH> 100644
--- a/state/watcher.go
+++ b/state/watcher.go
@@ -1899,7 +1899,7 @@ func (m *Machine) WatchInstanceData() NotifyWatcher {
return newEntityWatcher(m.st, instanceDataC, m.doc.DocID)
}
-// WatchControllerInfo returns a NotifyWatcher for the controllers collection
+// WatchControllerInfo returns a StringsWatcher for the controllers collection
func (st *State) WatchControllerInfo() StringsWatcher {
return newCollectionWatcher(st, colWCfg{col: controllerNodesC})
} | Add caasmodelconfigmanager worker to model manifolds; | juju_juju | train |
c995ceacd1434058d61eb2e1185139ddcde54b27 | diff --git a/tests/ImageTests.php b/tests/ImageTests.php
index <HASH>..<HASH> 100755
--- a/tests/ImageTests.php
+++ b/tests/ImageTests.php
@@ -202,6 +202,19 @@ class ImageTests extends \PHPUnit_Framework_TestCase
}
/**
+ * Testing Gaussian blur filter.
+ */
+ public function testGaussianBlur() {
+ $image = $this->open('monalisa.jpg')
+ ->gaussianBlur();
+ $secondImage = $this->open('monalisa.jpg')
+ ->gaussianBlur(5);
+
+ $this->assertTrue(file_exists($image));
+ $this->assertTrue(file_exists($secondImage));
+ }
+
+ /**
* Testing creating image from data.
*/
public function testData() | Add a simple test to check that filter is working. | Gregwar_Image | train |
22cdf78dfd42d76306950cfef83558e46a89551f | diff --git a/src/JsonSchema/Uri/Retrievers/AbstractRetriever.php b/src/JsonSchema/Uri/Retrievers/AbstractRetriever.php
index <HASH>..<HASH> 100644
--- a/src/JsonSchema/Uri/Retrievers/AbstractRetriever.php
+++ b/src/JsonSchema/Uri/Retrievers/AbstractRetriever.php
@@ -17,13 +17,7 @@ abstract class AbstractRetriever implements UriRetrieverInterface
* @var string
*/
protected $contentType;
-
- /**
- * {@inheritDoc}
- * @see \JsonSchema\Uri\Retrievers\UriRetrieverInterface::retrieve()
- */
- public abstract function retrieve($uri);
-
+
/**
* {@inheritDoc}
* @see \JsonSchema\Uri\Retrievers\UriRetrieverInterface::getContentType()
@@ -32,4 +26,4 @@ abstract class AbstractRetriever implements UriRetrieverInterface
{
return $this->contentType;
}
-}
\ No newline at end of file
+} | remove abstract declaration of 'retrieve' which duplicated interface declaration | justinrainbow_json-schema | train |
625f2f1fe5fb01022e82f934c17cc32ab5ed17cf | diff --git a/lib/mongoid/config.rb b/lib/mongoid/config.rb
index <HASH>..<HASH> 100644
--- a/lib/mongoid/config.rb
+++ b/lib/mongoid/config.rb
@@ -135,11 +135,7 @@ module Mongoid #:nodoc
# @since 2.0.2
def purge!
master.collections.map do |collection|
- begin
- collection.drop if collection.name !~ /system/
- rescue Mongo::OperationError => e
- ::Logger.new($stdout).info(collection.name)
- end
+ collection.drop if collection.name !~ /system/
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -30,9 +30,13 @@ Mongoid.configure do |config|
config.logger = nil
end
+CLASSES = []
+
Dir[ File.join(MODELS, "*.rb") ].sort.each do |file|
name = File.basename(file, ".rb")
- autoload name.camelize.to_sym, name
+ class_name = name.camelize
+ CLASSES.push(class_name)
+ autoload class_name.to_sym, name
end
module Medical
@@ -66,12 +70,24 @@ Dir[ File.join(SUPPORT, "*.rb") ].each do |file|
require File.basename(file)
end
+def delete_all_documents
+ CLASSES.each do |class_name|
+ if const_defined?(class_name)
+ class_name.constantize.delete_all
+ end
+ end
+end
+
RSpec.configure do |config|
config.mock_with(:mocha)
config.before(:each) do
- Mongoid.purge!
+ if ENV["CI"]
+ delete_all_documents
+ else
+ Mongoid.purge!
+ end
Mongoid::IdentityMap.clear
end | When on travis, attempt to delete all instead of purge | mongodb_mongoid | train |
0132c5badd317f762218dd046a8dd55d3196b3df | diff --git a/spec/app.rb b/spec/app.rb
index <HASH>..<HASH> 100644
--- a/spec/app.rb
+++ b/spec/app.rb
@@ -48,7 +48,7 @@ describe 'app' do
before_time = Time.now
@app.after 0.01 do
after_time = Time.now
- (after_time - before_time).should.be >= 0.01
+ (after_time - before_time).should.be >= 0.008
(after_time - before_time).should.be.close 0.01, 0.005
resume
end
@@ -60,7 +60,7 @@ describe 'app' do
count = 0
timer = @app.every 0.01 do
after_time = Time.now
- (after_time - before_time).should.be >= 0.01
+ (after_time - before_time).should.be >= 0.008
(after_time - before_time).should.be.close 0.01, 0.005
count += 1
before_time = after_time # reset | add serious fuzz to the timer, but still testing for some accuracy | infinitered_rmq | train |
4659cf85109841bc5a73e31dc26d8bf9b96b3a9c | diff --git a/lib/model/type/integer.js b/lib/model/type/integer.js
index <HASH>..<HASH> 100644
--- a/lib/model/type/integer.js
+++ b/lib/model/type/integer.js
@@ -58,7 +58,7 @@ class ModelTypeInteger extends ModelType {
definition.max = min;
}
- const { min, max } = definition;
+ const { min, max, step } = definition;
if ( min !== undefined && ( typeof min === "object" || !NumberUtilities.ptnFloat.test( min ) ) ) {
errors.push( new TypeError( "invalid requirement on minimum value" ) );
@@ -67,6 +67,10 @@ class ModelTypeInteger extends ModelType {
if ( max !== undefined && ( typeof max === "object" || !NumberUtilities.ptnFloat.test( max ) ) ) {
errors.push( new TypeError( "invalid requirement on maximum value" ) );
}
+
+ if ( step !== undefined && ( !step || typeof step === "object" || !NumberUtilities.ptnFloat.test( step ) ) ) {
+ errors.push( new TypeError( "invalid requirement on value stepping" ) );
+ }
}
return errors;
@@ -92,9 +96,15 @@ class ModelTypeInteger extends ModelType {
}
// falls through
- case "number" :
+ case "number" : {
value = Math.round( parseFloat( value ) );
+
+ const { step, min } = requirements;
+ if ( step ) {
+ value = Math.round( value / step - ( min || 0 ) ) * step + ( min || 0 );
+ }
break;
+ }
default :
value = NaN;
diff --git a/lib/model/type/number.js b/lib/model/type/number.js
index <HASH>..<HASH> 100644
--- a/lib/model/type/number.js
+++ b/lib/model/type/number.js
@@ -58,7 +58,7 @@ class ModelTypeNumber extends ModelType {
definition.max = min;
}
- const { min, max } = definition;
+ const { min, max, step } = definition;
if ( min !== undefined && ( typeof min === "object" || !NumberUtilities.ptnFloat.test( min ) ) ) {
errors.push( new TypeError( "invalid requirement on minimum value" ) );
@@ -67,6 +67,10 @@ class ModelTypeNumber extends ModelType {
if ( max !== undefined && ( typeof max === "object" || !NumberUtilities.ptnFloat.test( max ) ) ) {
errors.push( new TypeError( "invalid requirement on maximum value" ) );
}
+
+ if ( step !== undefined && ( !step || typeof step === "object" || !NumberUtilities.ptnFloat.test( step ) ) ) {
+ errors.push( new TypeError( "invalid requirement on value stepping" ) );
+ }
}
return errors;
@@ -92,9 +96,15 @@ class ModelTypeNumber extends ModelType {
}
// falls through
- case "number" :
+ case "number" : {
value = parseFloat( value );
+
+ const { step, min } = requirements;
+ if ( step ) {
+ value = Math.round( value / step - ( min || 0 ) ) * step + ( min || 0 );
+ }
break;
+ }
default :
value = NaN; | adding support for defining stepping on numeric values | hitchyjs_odem | train |
e240ab9ad0f999f383e0ed674600d48705442c71 | diff --git a/src/session.js b/src/session.js
index <HASH>..<HASH> 100644
--- a/src/session.js
+++ b/src/session.js
@@ -27,7 +27,6 @@ class Session {
sub: this.subscribe.bind(this),
unsub: this.unsubscribe.bind(this),
connect: this.connect.bind(this),
- pong: this.pong.bind(this),
ping: this.ping.bind(this)
}; | Remove reference to old pong handler | mixmaxhq_publication-server | train |
daaa98ea5e4c45d3063b3cb1f14b75f15377de46 | diff --git a/rsocket-transport/src/main/java/io/scalecube/services/transport/rsocket/DefaultEventExecutorChooser.java b/rsocket-transport/src/main/java/io/scalecube/services/transport/rsocket/DefaultEventExecutorChooser.java
index <HASH>..<HASH> 100644
--- a/rsocket-transport/src/main/java/io/scalecube/services/transport/rsocket/DefaultEventExecutorChooser.java
+++ b/rsocket-transport/src/main/java/io/scalecube/services/transport/rsocket/DefaultEventExecutorChooser.java
@@ -7,7 +7,6 @@ import java.net.SocketAddress;
import java.util.Iterator;
import java.util.Spliterator;
import java.util.Spliterators;
-import java.util.concurrent.Executor;
import java.util.stream.StreamSupport;
/**
@@ -24,15 +23,21 @@ public class DefaultEventExecutorChooser implements EventExecutorChooser {
@Override
public EventExecutor getEventExecutor(Channel channel, Iterator<EventExecutor> iterator) {
+ EventExecutor[] executors =
+ StreamSupport.stream(
+ Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false)
+ .toArray(EventExecutor[]::new);
+
+ for (EventExecutor executor : executors) {
+ if (executor.inEventLoop()) {
+ return executor;
+ }
+ }
+
String channelId = channel.id().asLongText(); // globally unique id
SocketAddress localAddress = channel.localAddress(); // bound address
SocketAddress remoteAddress = channel.remoteAddress(); // remote ephemeral address
- Executor[] executors =
- StreamSupport.stream(
- Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false)
- .toArray(Executor[]::new);
-
try {
return (EventExecutor)
threadChooser.getWorker(channelId, localAddress, remoteAddress, executors); | Fixed bug in default event executor chooser | scalecube_scalecube-services | train |
a9482d75217d0ae1ad8220515ed22a9245a822fc | diff --git a/src/org/junit/internal/requests/ClassRequest.java b/src/org/junit/internal/requests/ClassRequest.java
index <HASH>..<HASH> 100644
--- a/src/org/junit/internal/requests/ClassRequest.java
+++ b/src/org/junit/internal/requests/ClassRequest.java
@@ -1,8 +1,7 @@
package org.junit.internal.requests;
-import java.lang.reflect.Constructor;
-
import org.junit.Ignore;
+import org.junit.internal.runners.InitializationError;
import org.junit.internal.runners.OldTestClassRunner;
import org.junit.internal.runners.TestClassRunner;
import org.junit.runner.Request;
@@ -11,6 +10,7 @@ import org.junit.runner.Runner;
import org.junit.runners.AllTests;
public class ClassRequest extends Request {
+ private static final String CONSTRUCTOR_ERROR_FORMAT= "Custom runner class %s should have a public constructor with signature %s(Class testClass)";
private final Class<?> fTestClass;
private boolean fCanUseSuiteMethod;
@@ -25,15 +25,20 @@ public class ClassRequest extends Request {
@Override
public Runner getRunner() {
- Class<? extends Runner> runnerClass= getRunnerClass(fTestClass);
+ return buildRunner(getRunnerClass(fTestClass));
+ }
+
+ public Runner buildRunner(Class<? extends Runner> runnerClass) {
try {
- Constructor<? extends Runner> constructor= runnerClass.getConstructor(Class.class); // TODO good error message if no such constructor
- return constructor.newInstance(new Object[] { fTestClass });
- } catch (StackOverflowError e) {
- throw new RuntimeException();
+ return runnerClass.getConstructor(Class.class).newInstance(new Object[] { fTestClass });
+ } catch (NoSuchMethodException e) {
+ String simpleName= runnerClass.getSimpleName();
+ InitializationError error= new InitializationError(String.format(
+ CONSTRUCTOR_ERROR_FORMAT, simpleName, simpleName));
+ return Request.errorReport(fTestClass, error).getRunner();
} catch (Exception e) {
return Request.errorReport(fTestClass, e).getRunner();
- }
+ }
}
Class<? extends Runner> getRunnerClass(final Class<?> testClass) {
diff --git a/src/org/junit/tests/AllTests.java b/src/org/junit/tests/AllTests.java
index <HASH>..<HASH> 100644
--- a/src/org/junit/tests/AllTests.java
+++ b/src/org/junit/tests/AllTests.java
@@ -6,7 +6,8 @@ import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
-// TODO (Feb 21, 2007 10:05:41 AM): organize these tests
+// These test files need to be cleaned. See
+// https://sourceforge.net/pm/task.php?func=detailtask&project_task_id=136507&group_id=15278&group_project_id=51407
@RunWith(Suite.class)
@SuiteClasses({
diff --git a/src/org/junit/tests/RunWithTest.java b/src/org/junit/tests/RunWithTest.java
index <HASH>..<HASH> 100644
--- a/src/org/junit/tests/RunWithTest.java
+++ b/src/org/junit/tests/RunWithTest.java
@@ -1,6 +1,6 @@
package org.junit.tests;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
import org.junit.Test;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
@@ -57,4 +57,28 @@ public class RunWithTest {
JUnitCore.runClasses(SubExampleTest.class);
assertTrue(log.contains("run"));
}
+
+ public static class BadRunner extends Runner {
+ @Override
+ public Description getDescription() {
+ return null;
+ }
+
+ @Override
+ public void run(RunNotifier notifier) {
+ // do nothing
+ }
+ }
+
+ @RunWith(BadRunner.class)
+ public static class Empty {
+ }
+
+ @Test
+ public void characterizeErrorMessageFromBadRunner() {
+ assertEquals(
+ "Custom runner class BadRunner should have a public constructor with signature BadRunner(Class testClass)",
+ JUnitCore.runClasses(Empty.class).getFailures().get(0)
+ .getMessage());
+ }
} | Better error message when a custom runner has a wrongly-signatured constructor | junit-team_junit4 | train |
884c20006a26f202d35f5dca846c38a0e8a67f5c | diff --git a/src/Bynder/Api/Impl/AssetBankManager.php b/src/Bynder/Api/Impl/AssetBankManager.php
index <HASH>..<HASH> 100644
--- a/src/Bynder/Api/Impl/AssetBankManager.php
+++ b/src/Bynder/Api/Impl/AssetBankManager.php
@@ -24,12 +24,12 @@ class AssetBankManager implements IAssetBankManager
/**
* @var IOauthRequestHandler Request handler used to communicate with the API.
*/
- private $requestHandler;
+ protected $requestHandler;
/**
* @var FileUploader Used for file uploading operations.
*/
- private $fileUploader;
+ protected $fileUploader;
/**
* Initialises a new instance of the class.
@@ -114,7 +114,7 @@ class AssetBankManager implements IAssetBankManager
*/
public function getMetaproperty($propertyId, $query = null)
{
- return $this->requestHandler->sendRequestAsync('GET', 'api/v4/metaproperties/' . $propertyId . '/',
+ return $this->requestHandler->sendRequestAsync('GET', 'api/v4/metaproperties/' . $propertyId . '/',
['query' => $query]
);
}
diff --git a/src/Bynder/Api/Impl/BynderApi.php b/src/Bynder/Api/Impl/BynderApi.php
index <HASH>..<HASH> 100644
--- a/src/Bynder/Api/Impl/BynderApi.php
+++ b/src/Bynder/Api/Impl/BynderApi.php
@@ -29,15 +29,15 @@ class BynderApi implements IBynderApi
/**
* @var string Base Url necessary for API calls.
*/
- private $baseUrl;
+ protected $baseUrl;
/**
* @var AssetBankManager Instance of the Asset bank manager.
*/
- private $assetBankManager;
+ protected $assetBankManager;
/**
* @var OauthRequestHandler Instance of the Oauth request handler.
*/
- private $requestHandler;
+ protected $requestHandler;
/**
* Initialises a new instance of the class.
@@ -279,7 +279,7 @@ class BynderApi implements IBynderApi
* @param $settings
* @return bool Whether the settings array is valid.
*/
- private static function validateSettings($settings)
+ protected static function validateSettings($settings)
{
if (!isset($settings['consumerKey']) || !isset($settings['consumerSecret'])) {
return false;
@@ -300,4 +300,4 @@ class BynderApi implements IBynderApi
return $this->requestHandler;
}
-}
\ No newline at end of file
+} | Issue <I> - add protected methods
Allows to extend easily the Bynder SDK | Bynder_bynder-php-sdk | train |
0cdd19db8bfed8387298f65b4ef2bbe40a6a2894 | diff --git a/test/integration/index.js b/test/integration/index.js
index <HASH>..<HASH> 100644
--- a/test/integration/index.js
+++ b/test/integration/index.js
@@ -85,33 +85,6 @@ export async function ready (mocha, native = true, withAccounts = false) {
})
}
-export const WindowPostMessageFake = (name) => ({
- name,
- messages: [],
- addEventListener (onEvent, listener) {
- this.listener = listener
- },
- removeEventListener (onEvent, listener) {
- return () => null
- },
- postMessage (msg) {
- this.messages.push(msg)
- setTimeout(() => { if (typeof this.listener === 'function') this.listener({ data: msg, origin: 'testOrigin', source: this }) }, 0)
- }
-})
-
-export const getFakeConnections = (direct = false) => {
- const waelletConnection = WindowPostMessageFake('wallet')
- const aeppConnection = WindowPostMessageFake('aepp')
- if (direct) {
- const waelletP = waelletConnection.postMessage
- const aeppP = aeppConnection.postMessage
- waelletConnection.postMessage = aeppP.bind(aeppConnection)
- aeppConnection.postMessage = waelletP.bind(waelletConnection)
- }
- return { waelletConnection, aeppConnection }
-}
-
export default {
BaseAe,
url,
@@ -120,5 +93,4 @@ export default {
configure,
ready,
plan,
- WindowPostMessageFake
}
diff --git a/test/integration/rpc.js b/test/integration/rpc.js
index <HASH>..<HASH> 100644
--- a/test/integration/rpc.js
+++ b/test/integration/rpc.js
@@ -16,8 +16,7 @@
*/
import { Aepp, Wallet, Node, RpcWallet, RpcAepp, MemoryAccount } from '../../es'
-import { compilerUrl, getFakeConnections, url, internalUrl, networkId, publicKey, genesisAccount } from './'
-
+import { compilerUrl, url, internalUrl, networkId, publicKey, genesisAccount } from './'
import { describe, it, before } from 'mocha'
import BrowserWindowMessageConnection from '../../es/utils/aepp-wallet-communication/connection/browser-window-message'
import { generateKeyPair, verify } from '../../es/utils/crypto'
@@ -669,6 +668,33 @@ describe('Aepp<->Wallet', function () {
})
})
+const WindowPostMessageFake = (name) => ({
+ name,
+ messages: [],
+ addEventListener (onEvent, listener) {
+ this.listener = listener
+ },
+ removeEventListener (onEvent, listener) {
+ return () => null
+ },
+ postMessage (msg) {
+ this.messages.push(msg)
+ setTimeout(() => { if (typeof this.listener === 'function') this.listener({ data: msg, origin: 'testOrigin', source: this }) }, 0)
+ }
+})
+
+const getFakeConnections = (direct = false) => {
+ const waelletConnection = WindowPostMessageFake('wallet')
+ const aeppConnection = WindowPostMessageFake('aepp')
+ if (direct) {
+ const waelletP = waelletConnection.postMessage
+ const aeppP = aeppConnection.postMessage
+ waelletConnection.postMessage = aeppP.bind(aeppConnection)
+ aeppConnection.postMessage = waelletP.bind(waelletConnection)
+ }
+ return { waelletConnection, aeppConnection }
+}
+
const getConnections = (direct) => {
global.chrome = { runtime: {} }
global.window = { location: { origin: '//test' }, chrome: global.chrome } | rpc tests: Inline dependency used in one place | aeternity_aepp-sdk-js | train |
62ff2bd5b5326a1abf38d8be4f8b7c31554959e5 | diff --git a/storage/src/main/java/org/openbase/jul/storage/registry/AbstractRegistry.java b/storage/src/main/java/org/openbase/jul/storage/registry/AbstractRegistry.java
index <HASH>..<HASH> 100644
--- a/storage/src/main/java/org/openbase/jul/storage/registry/AbstractRegistry.java
+++ b/storage/src/main/java/org/openbase/jul/storage/registry/AbstractRegistry.java
@@ -345,6 +345,7 @@ public class AbstractRegistry<KEY, ENTRY extends Identifiable<KEY>, MAP extends
pluginPool.beforeClear();
sandbox.clear();
entryMap.clear();
+ consistent = true;
} finally {
unlock();
}
@@ -441,7 +442,7 @@ public class AbstractRegistry<KEY, ENTRY extends Identifiable<KEY>, MAP extends
*
* @param registry the dependency of these registry.
*/
- public void registerDependency(final Registry registry) {
+ public void registerDependency(final Registry registry) throws CouldNotPerformException {
// check if already registered
if (dependingRegistryMap.containsKey(registry)) {
return;
diff --git a/storage/src/main/java/org/openbase/jul/storage/registry/RemoteRegistry.java b/storage/src/main/java/org/openbase/jul/storage/registry/RemoteRegistry.java
index <HASH>..<HASH> 100644
--- a/storage/src/main/java/org/openbase/jul/storage/registry/RemoteRegistry.java
+++ b/storage/src/main/java/org/openbase/jul/storage/registry/RemoteRegistry.java
@@ -139,6 +139,11 @@ public class RemoteRegistry<KEY, M extends GeneratedMessage, MB extends M.Builde
}
@Override
+ public void registerDependency(Registry registry) throws CouldNotPerformException {
+ throw new NotSupportedException("registerDependency", "method", this);
+ }
+
+ @Override
public boolean tryLockRegistry() throws RejectedException {
throw new RejectedException("RemoteRegistry not lockable!");
}
@@ -147,4 +152,5 @@ public class RemoteRegistry<KEY, M extends GeneratedMessage, MB extends M.Builde
public void unlockRegistry() {
// because remote registry does not support locks there is no need for any action here.
}
+
} | registration of dependencies on remoteRegistries now throws a notSupportedException | openbase_jul | train |
bcb822c3906a0ffad5d2bcd56f1aa262bd08cd70 | diff --git a/api-response.go b/api-response.go
index <HASH>..<HASH> 100644
--- a/api-response.go
+++ b/api-response.go
@@ -502,10 +502,11 @@ func writeErrorResponse(w http.ResponseWriter, req *http.Request, errorCode APIE
setCommonHeaders(w)
// write Header
w.WriteHeader(error.HTTPStatusCode)
- writeErrorResponseNoHeader(w, req, error, resource)
+ writeErrorResponseNoHeader(w, req, errorCode, resource)
}
-func writeErrorResponseNoHeader(w http.ResponseWriter, req *http.Request, error APIError, resource string) {
+func writeErrorResponseNoHeader(w http.ResponseWriter, req *http.Request, errorCode APIErrorCode, resource string) {
+ error := getAPIError(errorCode)
// Generate error response.
errorResponse := getAPIErrorResponse(error, resource)
encodedErrorResponse := encodeResponse(errorResponse)
diff --git a/object-handlers.go b/object-handlers.go
index <HASH>..<HASH> 100644
--- a/object-handlers.go
+++ b/object-handlers.go
@@ -974,6 +974,14 @@ func (api objectAPIHandlers) CompleteMultipartUploadHandler(w http.ResponseWrite
// Send 200 OK
setCommonHeaders(w)
w.WriteHeader(http.StatusOK)
+ // Xml headers need to be sent before we possibly send whitespace characters
+ // to the client.
+ _, err = w.Write([]byte(xml.Header))
+ if err != nil {
+ errorIf(err, "Unable to write XML header for complete multipart upload")
+ writeErrorResponseNoHeader(w, r, ErrInternalError, r.URL.Path)
+ return
+ }
doneCh := make(chan struct{})
// Signal that completeMultipartUpload is over via doneCh
@@ -992,7 +1000,7 @@ func (api objectAPIHandlers) CompleteMultipartUploadHandler(w http.ResponseWrite
writePartSmallErrorResponse(w, r, oErr)
default:
// Handle all other generic issues.
- writeErrorResponseNoHeader(w, r, getAPIError(toAPIErrorCode(err)), r.URL.Path)
+ writeErrorResponseNoHeader(w, r, toAPIErrorCode(err), r.URL.Path)
}
return
}
@@ -1001,7 +1009,12 @@ func (api objectAPIHandlers) CompleteMultipartUploadHandler(w http.ResponseWrite
location := getLocation(r)
// Generate complete multipart response.
response := generateCompleteMultpartUploadResponse(bucket, object, location, md5Sum)
- encodedSuccessResponse := encodeResponse(response)
+ encodedSuccessResponse, err := xml.Marshal(response)
+ if err != nil {
+ errorIf(err, "Unable to parse CompleteMultipartUpload response")
+ writeErrorResponseNoHeader(w, r, ErrInternalError, r.URL.Path)
+ return
+ }
// write success response.
w.Write(encodedSuccessResponse)
w.(http.Flusher).Flush() | Send XML header before the first of whitespace chars (#<I>)
* Sent XML header before the first of whitespace chars
XML parsing fails in aws cli due to unexpected whitespace character. To
fix this, we send the xml header before we send the first whitespace
character, if any.
* Fix race between sendWhiteSpaceChars and completeMultiUploadpart | minio_minio | train |
205b2bf06e895c51b58cd5e5773c1be9556f3f93 | diff --git a/template/app/view/comments/Index.js b/template/app/view/comments/Index.js
index <HASH>..<HASH> 100644
--- a/template/app/view/comments/Index.js
+++ b/template/app/view/comments/Index.js
@@ -2,7 +2,7 @@
* Container for recent comments listing.
*/
Ext.define('Docs.view.comments.Index', {
- extend: 'Ext.container.Container',
+ extend: 'Ext.panel.Panel',
alias: 'widget.commentindex',
cls: 'comment-index iScroll',
diff --git a/template/app/view/guides/Container.js b/template/app/view/guides/Container.js
index <HASH>..<HASH> 100644
--- a/template/app/view/guides/Container.js
+++ b/template/app/view/guides/Container.js
@@ -4,7 +4,7 @@
* Renders the guide and print button.
*/
Ext.define('Docs.view.guides.Container', {
- extend: 'Ext.container.Container',
+ extend: 'Ext.panel.Panel',
alias: 'widget.guidecontainer',
componentCls: 'guide-container',
diff --git a/template/app/view/videos/Container.js b/template/app/view/videos/Container.js
index <HASH>..<HASH> 100644
--- a/template/app/view/videos/Container.js
+++ b/template/app/view/videos/Container.js
@@ -4,7 +4,7 @@
* Renders the video itself and its title + description.
*/
Ext.define('Docs.view.videos.Container', {
- extend: 'Ext.container.Container',
+ extend: 'Ext.panel.Panel',
alias: 'widget.videocontainer',
componentCls: 'video-container', | Fix regression of guides page styles.
The main styles were restricted to panel body, but guides and others
were containers, not panels, so the styles did not get applied.
Now guides, videos, comments are all panels. | senchalabs_jsduck | train |
7d0c5a612f1c540cba8bcebe8088d0f525d1498e | diff --git a/test/test_xmlmap/test_core.py b/test/test_xmlmap/test_core.py
index <HASH>..<HASH> 100755
--- a/test/test_xmlmap/test_core.py
+++ b/test/test_xmlmap/test_core.py
@@ -17,7 +17,7 @@
#!/usr/bin/env python
from lxml import etree
-from os import path
+import os
import unittest
import tempfile
@@ -319,13 +319,12 @@ class TestXmlObject(unittest.TestCase):
self.assertEqual(init_values['bool'], obj.bool)
[email protected]('HTTP_PROXY' not in os.environ,
+ 'Cchema validation test requires an HTTP_PROXY')
class TestLoadSchema(unittest.TestCase):
def test_load_schema(self):
- # NOTE: overriding HTTP_PROXY requirement so schema logic can
- # be tested even if a proxy is not set
- schema = xmlmap.loadSchema('http://www.w3.org/2001/xml.xsd',
- override_proxy_requirement=True)
+ schema = xmlmap.loadSchema('http://www.w3.org/2001/xml.xsd')
self.assert_(isinstance(schema, etree.XMLSchema),
'loadSchema should return an etree.XMLSchema object when successful')
@@ -350,7 +349,8 @@ class TestLoadSchema(unittest.TestCase):
def test_parse_error(self):
# test document that is loaded but can't be parsed as a schema
# valid xml non-schema doc
- xmldoc = path.join(path.dirname(path.abspath(__file__)), 'fixtures', 'heaney653.xml')
+ xmldoc = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'fixtures', 'heaney653.xml')
# confirm an exception is raised
self.assertRaises(etree.XMLSchemaParseError, xmlmap.loadSchema, xmldoc)
# inspect the exception for expected detail in error messages | skip all unit tests that require schemas when no http proxy is configured | emory-libraries_eulxml | train |
4a57b076edc28bf602e9ab1e8f8ef38f2d553359 | diff --git a/client/hotrod-client/src/test/java/org/infinispan/client/hotrod/query/ReplicationIndexTest.java b/client/hotrod-client/src/test/java/org/infinispan/client/hotrod/query/ReplicationIndexTest.java
index <HASH>..<HASH> 100644
--- a/client/hotrod-client/src/test/java/org/infinispan/client/hotrod/query/ReplicationIndexTest.java
+++ b/client/hotrod-client/src/test/java/org/infinispan/client/hotrod/query/ReplicationIndexTest.java
@@ -45,7 +45,7 @@ public class ReplicationIndexTest extends MultiHotRodServersTest {
public static final String CACHE_NAME = "test-cache";
public static final String PROTO_FILE = "file.proto";
- public static final int ENTRIES = 10;
+ public static final int ENTRIES = 2;
private final AtomicInteger serverCount = new AtomicInteger(0);
diff --git a/query/src/main/java/org/infinispan/query/backend/TxQueryInterceptor.java b/query/src/main/java/org/infinispan/query/backend/TxQueryInterceptor.java
index <HASH>..<HASH> 100644
--- a/query/src/main/java/org/infinispan/query/backend/TxQueryInterceptor.java
+++ b/query/src/main/java/org/infinispan/query/backend/TxQueryInterceptor.java
@@ -54,13 +54,11 @@ public final class TxQueryInterceptor extends DDAsyncInterceptor {
oldValues = Collections.emptyMap();
}
AbstractCacheTransaction transaction = txCtx.getCacheTransaction();
- Set<Object> keys = transaction.getAffectedKeys();
- if (!ctx.isOriginLocal() || transaction.getModifications().stream().anyMatch(mod -> mod.hasAnyFlag(FlagBitSets.SKIP_INDEXING))) {
- keys = transaction.getModifications().stream()
- .filter(mod -> !mod.hasAnyFlag(FlagBitSets.SKIP_INDEXING))
- .flatMap(mod -> mod.getAffectedKeys().stream())
- .collect(Collectors.toSet());
- }
+ Set<Object> keys = transaction.getAllModifications().stream()
+ .filter(mod -> !mod.hasAnyFlag(FlagBitSets.SKIP_INDEXING))
+ .flatMap(mod -> mod.getAffectedKeys().stream())
+ .collect(Collectors.toSet());
+
for (Object key : keys) {
CacheEntry entry = txCtx.lookupEntry(key);
if (entry != null) {
diff --git a/remote-query/remote-query-server/src/main/java/org/infinispan/query/remote/impl/LifecycleManager.java b/remote-query/remote-query-server/src/main/java/org/infinispan/query/remote/impl/LifecycleManager.java
index <HASH>..<HASH> 100644
--- a/remote-query/remote-query-server/src/main/java/org/infinispan/query/remote/impl/LifecycleManager.java
+++ b/remote-query/remote-query-server/src/main/java/org/infinispan/query/remote/impl/LifecycleManager.java
@@ -116,7 +116,8 @@ public final class LifecycleManager implements ModuleLifecycle {
}
/**
- * Registers the interceptor in the cache before it gets started.
+ * Registers the interceptor in the ___protobuf_metadata cache before it gets started. Also creates query components
+ * for user caches.
*/
@Override
public void cacheStarting(ComponentRegistry cr, Configuration cfg, String cacheName) {
@@ -140,6 +141,12 @@ public final class LifecycleManager implements ModuleLifecycle {
SerializationContext serCtx = protobufMetadataManager.getSerializationContext();
RemoteQueryManager remoteQueryManager = buildQueryManager(cfg, serCtx, cr);
cr.registerComponent(remoteQueryManager, RemoteQueryManager.class);
+
+ if (cfg.indexing().index().isEnabled()) {
+ log.debugf("Wrapping the SearchWorkCreator for indexed cache %s", cacheName);
+ QueryInterceptor queryInterceptor = cr.getComponent(QueryInterceptor.class);
+ queryInterceptor.setSearchWorkCreator(new ProtobufValueWrapperSearchWorkCreator(queryInterceptor.getSearchWorkCreator(), serCtx));
+ }
}
}
@@ -168,21 +175,4 @@ public final class LifecycleManager implements ModuleLifecycle {
}
return querySerializers;
}
-
- @Override
- public void cacheStarted(ComponentRegistry cr, String cacheName) {
- GlobalComponentRegistry gcr = cr.getGlobalComponentRegistry();
- InternalCacheRegistry icr = gcr.getComponent(InternalCacheRegistry.class);
- if (!icr.isInternalCache(cacheName)) {
- Configuration cfg = cr.getComponent(Configuration.class);
- if (cfg.indexing().index().isEnabled()) {
- ProtobufMetadataManagerImpl protobufMetadataManager = (ProtobufMetadataManagerImpl) gcr.getComponent(ProtobufMetadataManager.class);
- SerializationContext serCtx = protobufMetadataManager.getSerializationContext();
-
- log.debugf("Wrapping the SearchWorkCreator for indexed cache %s", cacheName);
- QueryInterceptor queryInterceptor = cr.getComponent(QueryInterceptor.class);
- queryInterceptor.setSearchWorkCreator(new ProtobufValueWrapperSearchWorkCreator(queryInterceptor.getSearchWorkCreator(), serCtx));
- }
- }
- }
} | ISPN-<I> Data not indexed during state transfer in server mode
* move creation of ProtobufValueWrapperSearchWorkCreator to cacheStarting event | infinispan_infinispan | train |
db1b70fbbba63b91dbcaff414bfb7aa932a6635a | diff --git a/server.js b/server.js
index <HASH>..<HASH> 100644
--- a/server.js
+++ b/server.js
@@ -58,14 +58,14 @@ bundles.on('allLoaded', function(allbundles) {
}
if (!allbundles[i].bundleDependencies) {
- log.info("[server.js] Bundle %s has extension with no dependencies", allbundles[i].name);
+ log.debug("[server.js] Bundle %s has extension with no dependencies", allbundles[i].name);
loadExtension(allbundles[i]);
allbundles.splice(i, 1);
break;
}
if (bundleDepsSatisfied(allbundles[i])) {
- log.info("[server.js] Bundle %s has extension with satisfied dependencies", allbundles[i].name);
+ log.debug("[server.js] Bundle %s has extension with satisfied dependencies", allbundles[i].name);
loadExtension(allbundles[i]);
allbundles.splice(i, 1);
break; | whoops, accidentally committed some debugging changes | nodecg_nodecg | train |
1d636c61ac6791a7145fac53a75109c2b5adbf41 | diff --git a/face/geomajas-face-puregwt/client-impl/src/main/java/org/geomajas/puregwt/client/gfx/GfxUtilImpl.java b/face/geomajas-face-puregwt/client-impl/src/main/java/org/geomajas/puregwt/client/gfx/GfxUtilImpl.java
index <HASH>..<HASH> 100644
--- a/face/geomajas-face-puregwt/client-impl/src/main/java/org/geomajas/puregwt/client/gfx/GfxUtilImpl.java
+++ b/face/geomajas-face-puregwt/client-impl/src/main/java/org/geomajas/puregwt/client/gfx/GfxUtilImpl.java
@@ -105,7 +105,7 @@ public final class GfxUtilImpl implements GfxUtil {
private Path toPathPoint(Geometry point) {
if (point.getCoordinates() != null && point.getCoordinates().length == 1) {
Coordinate first = point.getCoordinates()[0];
- return new Path((int) first.getX(), (int) first.getY());
+ return new Path(first.getX(), first.getY());
}
return null;
}
@@ -113,10 +113,10 @@ public final class GfxUtilImpl implements GfxUtil {
private Path toPathLineString(Geometry lineString) {
if (lineString.getCoordinates() != null && lineString.getCoordinates().length > 0) {
Coordinate first = lineString.getCoordinates()[0];
- Path path = new Path((int) first.getX(), (int) first.getY());
+ Path path = new Path(first.getX(), first.getY());
for (int i = 1; i < lineString.getCoordinates().length; i++) {
Coordinate coordinate = lineString.getCoordinates()[i];
- path.lineTo((int) coordinate.getX(), (int) coordinate.getY());
+ path.lineTo(coordinate.getX(), coordinate.getY());
}
return path;
}
@@ -126,10 +126,10 @@ public final class GfxUtilImpl implements GfxUtil {
private Path toPathLinearRing(Geometry linearRing) {
if (linearRing.getCoordinates() != null && linearRing.getCoordinates().length > 0) {
Coordinate first = linearRing.getCoordinates()[0];
- Path path = new Path((int) first.getX(), (int) first.getY());
+ Path path = new Path(first.getX(), first.getY());
for (int i = 1; i < linearRing.getCoordinates().length - 1; i++) {
Coordinate coordinate = linearRing.getCoordinates()[i];
- path.lineTo((int) coordinate.getX(), (int) coordinate.getY());
+ path.lineTo(coordinate.getX(), coordinate.getY());
}
path.close();
path.getElement().getStyle().setProperty("fillRule", "evenOdd");
@@ -144,10 +144,10 @@ public final class GfxUtilImpl implements GfxUtil {
path.getElement().getStyle().setProperty("fillRule", "evenOdd");
for (int i = 1; i < polygon.getGeometries().length; i++) {
Geometry ring = polygon.getGeometries()[i];
- path.moveTo((int) ring.getCoordinates()[0].getX(), (int) ring.getCoordinates()[0].getY());
+ path.moveTo(ring.getCoordinates()[0].getX(), ring.getCoordinates()[0].getY());
for (int j = 1; j < ring.getCoordinates().length - 1; j++) {
Coordinate coordinate = ring.getCoordinates()[j];
- path.lineTo((int) coordinate.getX(), (int) coordinate.getY());
+ path.lineTo(coordinate.getX(), coordinate.getY());
}
path.close();
}
@@ -181,10 +181,10 @@ public final class GfxUtilImpl implements GfxUtil {
Path path = toPathLineString(multiLineString.getGeometries()[0]);
for (int i = 1; i < multiLineString.getGeometries().length; i++) {
Geometry lineString = multiLineString.getGeometries()[i];
- path.moveTo((int) lineString.getCoordinates()[0].getX(), (int) lineString.getCoordinates()[0].getY());
+ path.moveTo(lineString.getCoordinates()[0].getX(), lineString.getCoordinates()[0].getY());
for (int j = 1; j < lineString.getCoordinates().length; j++) {
Coordinate coordinate = lineString.getCoordinates()[j];
- path.lineTo((int) coordinate.getX(), (int) coordinate.getY());
+ path.lineTo(coordinate.getX(), coordinate.getY());
}
}
return path;
@@ -199,10 +199,10 @@ public final class GfxUtilImpl implements GfxUtil {
Geometry polygon = multiPolygon.getGeometries()[i];
for (int j = 0; j < polygon.getGeometries().length; j++) {
Geometry ring = polygon.getGeometries()[0];
- path.moveTo((int) ring.getCoordinates()[0].getX(), (int) ring.getCoordinates()[0].getY());
+ path.moveTo(ring.getCoordinates()[0].getX(), ring.getCoordinates()[0].getY());
for (int k = 1; k < ring.getCoordinates().length; k++) {
Coordinate coordinate = ring.getCoordinates()[k];
- path.lineTo((int) coordinate.getX(), (int) coordinate.getY());
+ path.lineTo(coordinate.getX(), coordinate.getY());
}
path.close();
} | PURE-<I>: When drawing an object on the map using a worldcontainer there are rounding errors when rendering | geomajas_geomajas-project-server | train |
9f864b3f36d2db29cb86d49381796b204be89fe8 | diff --git a/lib/aruba/cucumber/command.rb b/lib/aruba/cucumber/command.rb
index <HASH>..<HASH> 100644
--- a/lib/aruba/cucumber/command.rb
+++ b/lib/aruba/cucumber/command.rb
@@ -80,7 +80,7 @@ When(/^I stop the command(?: started last)? if (output|stdout|stderr) contains:$
sleep 0.1
end
end
- rescue ChildProcess::TimeoutError, TimeoutError
+ rescue ChildProcess::TimeoutError, Timeout::Error
last_command_started.terminate
end
end | TimeoutError is deprecated in favor of Timeout::Error | cucumber_aruba | train |
b21c1020c390a37d5edf2c1ebba3a3d4ff3653f4 | diff --git a/components/PaginationBoxView.js b/components/PaginationBoxView.js
index <HASH>..<HASH> 100644
--- a/components/PaginationBoxView.js
+++ b/components/PaginationBoxView.js
@@ -11,6 +11,7 @@ var PaginationBoxView = React.createClass({
previousLabel: React.PropTypes.string,
nextLabel: React.PropTypes.string,
breakLabel: React.PropTypes.string,
+ clickCallback: React.PropTypes.func
},
getDefaultProps: function() {
return {
@@ -27,6 +28,10 @@ var PaginationBoxView = React.createClass({
},
handlePageSelected: function(index) {
this.setState({selected: index});
+
+ if (typeof(this.props.clickCallback) !== "undefined" && typeof(this.props.clickCallback === "function"))
+ this.props.clickCallback({selected: index});
+
return false;
},
handlePreviousPage: function() { | Add a callback when a new page is selected | AdeleD_react-paginate | train |
4ec53746cce52a29abd9b51fef72178018384797 | diff --git a/library/src/main/java/com/sksamuel/jqm4gwt/list/JQMListItem.java b/library/src/main/java/com/sksamuel/jqm4gwt/list/JQMListItem.java
index <HASH>..<HASH> 100644
--- a/library/src/main/java/com/sksamuel/jqm4gwt/list/JQMListItem.java
+++ b/library/src/main/java/com/sksamuel/jqm4gwt/list/JQMListItem.java
@@ -220,9 +220,24 @@ public class JQMListItem extends CustomFlowPanel implements HasText<JQMListItem>
return headerElem != null ? headerElem.getInnerText() : null;
}
- private void moveAnchorChildrenTo(Element elt) {
+ private void moveAnchorChildrenTo(Element elt, Element... excludes) {
+ List<Node> move = new ArrayList<Node>();
for (int k = 0; k < anchor.getChildCount(); k++) {
Node node = anchor.getChild(k);
+ if (excludes.length > 0) {
+ boolean exclude = false;
+ for (int n = 0; n < excludes.length; n++) {
+ if (node == excludes[n]) {
+ exclude = true;
+ break;
+ }
+ }
+ if (exclude) continue;
+ }
+ move.add(node);
+ }
+ for (int i = 0; i < move.size(); i++) {
+ Node node = move.get(i);
anchor.removeChild(node);
elt.appendChild(node);
}
@@ -270,7 +285,8 @@ public class JQMListItem extends CustomFlowPanel implements HasText<JQMListItem>
*/
public JQMListItem removeImage() {
if (imageElem != null) {
- getElement().removeChild(imageElem);
+ if (anchor != null) anchor.removeChild(imageElem);
+ else removeChild(imageElem);
imageElem = null;
}
getElement().removeClassName("ui-li-has-thumb");
@@ -383,7 +399,9 @@ public class JQMListItem extends CustomFlowPanel implements HasText<JQMListItem>
if (imageElem == null) {
imageElem = Document.get().createImageElement();
- insertFirstChild(imageElem); // must be first child according to jquery.mobile-1.4.2.css
+ // must be first child according to jquery.mobile-1.4.2.css
+ if (anchor != null) anchor.insertFirst(imageElem);
+ else insertFirstChild(imageElem);
}
imageElem.setAttribute("src", src);
@@ -594,7 +612,7 @@ public class JQMListItem extends CustomFlowPanel implements HasText<JQMListItem>
LiControlGroup grp = new LiControlGroup(fldSet, "jqm4gwt-li-controls");
groupRoot.add(grp);
- if (anchor != null) moveAnchorChildrenTo(fldSet);
+ if (anchor != null) moveAnchorChildrenTo(fldSet, imageElem/*exclude*/);
controlGroupRoot = groupRoot;
controlGroup = grp;
if (anchor != null) checkAnchorPanel(); | JQMListItem - image element should be under anchor (in controlgroup
mode). | jqm4gwt_jqm4gwt | train |
6a1472371f3a39b8f69fefe49e35671f44c3cec8 | diff --git a/USERS.md b/USERS.md
index <HASH>..<HASH> 100644
--- a/USERS.md
+++ b/USERS.md
@@ -49,6 +49,7 @@ Currently, the following organizations are **officially** using Argo CD:
1. [END.](https://www.endclothing.com/)
1. [Energisme](https://energisme.com/)
1. [Fave](https://myfave.com)
+1. [Fonoa](https://www.fonoa.com/)
1. [Future PLC](https://www.futureplc.com/)
1. [Garner](https://www.garnercorp.com)
1. [G DATA CyberDefense AG](https://www.gdata-software.com/)
diff --git a/controller/metrics/metrics.go b/controller/metrics/metrics.go
index <HASH>..<HASH> 100644
--- a/controller/metrics/metrics.go
+++ b/controller/metrics/metrics.go
@@ -6,8 +6,8 @@ import (
"fmt"
"net/http"
"os"
+ "regexp"
"strconv"
- "strings"
"time"
"github.com/argoproj/gitops-engine/pkg/health"
@@ -196,11 +196,14 @@ func NewMetricsServer(addr string, appLister applister.ApplicationLister, appFil
}, nil
}
+// Prometheus invalid labels, more info: https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels.
+var invalidPromLabelChars = regexp.MustCompile(`[^a-zA-Z0-9_]`)
+
func normalizeLabels(prefix string, appLabels []string) []string {
results := []string{}
for _, label := range appLabels {
//prometheus labels don't accept dash in their name
- curr := strings.ReplaceAll(label, "-", "_")
+ curr := invalidPromLabelChars.ReplaceAllString(label, "_")
result := fmt.Sprintf("%s_%s", prefix, curr)
results = append(results, result)
}
diff --git a/controller/metrics/metrics_test.go b/controller/metrics/metrics_test.go
index <HASH>..<HASH> 100644
--- a/controller/metrics/metrics_test.go
+++ b/controller/metrics/metrics_test.go
@@ -33,6 +33,7 @@ metadata:
labels:
team-name: my-team
team-bu: bu-id
+ argoproj.io/cluster: test-cluster
spec:
destination:
namespace: dummy-namespace
@@ -57,6 +58,7 @@ metadata:
labels:
team-name: my-team
team-bu: bu-id
+ argoproj.io/cluster: test-cluster
spec:
destination:
namespace: dummy-namespace
@@ -87,6 +89,7 @@ metadata:
labels:
team-name: my-team
team-bu: bu-id
+ argoproj.io/cluster: test-cluster
spec:
destination:
namespace: dummy-namespace
@@ -254,14 +257,14 @@ func TestMetricLabels(t *testing.T) {
cases := []testCases{
{
description: "will return the labels metrics successfully",
- metricLabels: []string{"team-name", "team-bu"},
+ metricLabels: []string{"team-name", "team-bu", "argoproj.io/cluster"},
testCombination: testCombination{
applications: []string{fakeApp, fakeApp2, fakeApp3},
responseContains: `
# TYPE argocd_app_labels gauge
-argocd_app_labels{label_team_bu="bu-id",label_team_name="my-team",name="my-app",namespace="argocd",project="important-project"} 1
-argocd_app_labels{label_team_bu="bu-id",label_team_name="my-team",name="my-app-2",namespace="argocd",project="important-project"} 1
-argocd_app_labels{label_team_bu="bu-id",label_team_name="my-team",name="my-app-3",namespace="argocd",project="important-project"} 1
+argocd_app_labels{label_argoproj_io_cluster="test-cluster",label_team_bu="bu-id",label_team_name="my-team",name="my-app",namespace="argocd",project="important-project"} 1
+argocd_app_labels{label_argoproj_io_cluster="test-cluster",label_team_bu="bu-id",label_team_name="my-team",name="my-app-2",namespace="argocd",project="important-project"} 1
+argocd_app_labels{label_argoproj_io_cluster="test-cluster",label_team_bu="bu-id",label_team_name="my-team",name="my-app-3",namespace="argocd",project="important-project"} 1
`,
},
}, | Fix Kubernetes labels normalization for Prometheus (#<I>)
* When adding Kubernetes labels as Prometheus labels, replace with all the invalid Prometheus label chars | argoproj_argo-cd | train |
cfee95511f780da8addfd98cf1f256526732a2e0 | diff --git a/docs/api/cozy-client.md b/docs/api/cozy-client.md
index <HASH>..<HASH> 100644
--- a/docs/api/cozy-client.md
+++ b/docs/api/cozy-client.md
@@ -333,6 +333,16 @@ we have in the store.</p>
<dd></dd>
<dt><a href="#PermissionItem">PermissionItem</a> : <code>object</code></dt>
<dd></dd>
+<dt><a href="#Permission">Permission</a> : <code>object</code></dt>
+<dd><p>When a cozy to cozy sharing is created Cozy's stack creates a
+shortcut in <code>/Inbox of sharing</code> on the recipient's cozy to have a
+quick access even when the sharing is not accepted yet.</p>
+<p>However, this file is created only if the stack knows the URL of the cozy.
+This is not always the case.</p>
+<p>This method is here to tell us if the shortcut's file is created
+on the recipient's cozy. It can be used to make an UI distinction between the
+both situation.</p>
+</dd>
<dt><a href="#HydratedQueryState">HydratedQueryState</a> ⇒ <code><a href="#HydratedQueryState">HydratedQueryState</a></code></dt>
<dd><p>Returns the query from the store with hydrated documents.</p>
</dd>
@@ -2776,6 +2786,33 @@ Couchdb document like an io.cozy.files
| values | <code>Array.<string></code> | |
| type | <code>string</code> | a couch db database like 'io.cozy.files' |
+<a name="Permission"></a>
+
+## Permission : <code>object</code>
+When a cozy to cozy sharing is created Cozy's stack creates a
+shortcut in `/Inbox of sharing` on the recipient's cozy to have a
+quick access even when the sharing is not accepted yet.
+
+However, this file is created only if the stack knows the URL of the cozy.
+This is not always the case.
+
+This method is here to tell us if the shortcut's file is created
+on the recipient's cozy. It can be used to make an UI distinction between the
+both situation.
+
+**Kind**: global typedef
+
+| Param | Type | Description |
+| --- | --- | --- |
+| permission | [<code>Permission</code>](#Permission) | From getOwnPermissions mainly |
+
+**Properties**
+
+| Name | Type | Description |
+| --- | --- | --- |
+| data | <code>object</code> | Permission document |
+| included | <code>Array</code> | Member information from the sharing |
+
<a name="HydratedQueryState"></a>
## HydratedQueryState ⇒ [<code>HydratedQueryState</code>](#HydratedQueryState)
diff --git a/packages/cozy-client/src/models/permission.js b/packages/cozy-client/src/models/permission.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-client/src/models/permission.js
+++ b/packages/cozy-client/src/models/permission.js
@@ -139,3 +139,32 @@ export async function isDocumentReadOnly(args) {
return undefined
}
}
+
+/**
+ * When a cozy to cozy sharing is created Cozy's stack creates a
+ * shortcut in `/Inbox of sharing` on the recipient's cozy to have a
+ * quick access even when the sharing is not accepted yet.
+ *
+ * However, this file is created only if the stack knows the URL of the cozy.
+ * This is not always the case.
+ *
+ * This method is here to tell us if the shortcut's file is created
+ * on the recipient's cozy. It can be used to make an UI distinction between the
+ * both situation.
+ *
+ * @typedef {object} Permission
+ * @property {object} data Permission document
+ * @property {Array} included Member information from the sharing
+ *
+ * @param {Permission} permission From getOwnPermissions mainly
+ */
+export const isShortcutCreatedOnTheRecipientCozy = permission => {
+ if (!permission.included) return false
+ const sharingMember = permission.included.find(
+ item => item.type === 'io.cozy.sharings.members'
+ )
+ if (sharingMember && sharingMember.attributes.instance) {
+ return true
+ }
+ return false
+}
diff --git a/packages/cozy-client/src/models/permission.spec.js b/packages/cozy-client/src/models/permission.spec.js
index <HASH>..<HASH> 100644
--- a/packages/cozy-client/src/models/permission.spec.js
+++ b/packages/cozy-client/src/models/permission.spec.js
@@ -1,4 +1,9 @@
-import { isReadOnly, isDocumentReadOnly, fetchOwn } from './permission'
+import {
+ isReadOnly,
+ isDocumentReadOnly,
+ fetchOwn,
+ isShortcutCreatedOnTheRecipientCozy
+} from './permission'
function getById(id, doctype) {
const parents = {
@@ -161,3 +166,38 @@ describe('fetchOwn', () => {
])
})
})
+
+describe('isShortcutCreatedOnTheRecipientCozy', () => {
+ it('tests the return ', () => {
+ const includedWithInstance = [
+ {
+ type: 'io.cozy.sharings.members',
+ id: '',
+ attributes: {
+ status: 'seen',
+ name: '[email protected]',
+ email: '[email protected]',
+ instance: 'http://q2.cozy.tools:8080'
+ }
+ }
+ ]
+ expect(
+ isShortcutCreatedOnTheRecipientCozy({ included: includedWithInstance })
+ ).toBeTruthy()
+
+ const includedWithoutInstance = [
+ {
+ type: 'io.cozy.sharings.members',
+ id: '',
+ attributes: {
+ status: 'seen',
+ name: '[email protected]',
+ email: '[email protected]'
+ }
+ }
+ ]
+ expect(
+ isShortcutCreatedOnTheRecipientCozy({ included: includedWithoutInstance })
+ ).toBeFalsy()
+ })
+}) | feat: Add isShortcutCreated to permission's model | cozy_cozy-client | train |
c08088b640002ebcf0cc34a207bb386ac467b3e1 | diff --git a/compare.py b/compare.py
index <HASH>..<HASH> 100644
--- a/compare.py
+++ b/compare.py
@@ -1368,7 +1368,7 @@ def bootstrap(data_sets, num_samp=1000):
def bootstrap_moment(data1, data2, moment, CI=.95, num_samp=1000):
'''
- A bootstrap two-sample test of kurtosis or kurtosis. Returns the test_statistic
+ A bootstrap two-sample test of a moment. Returns the test_statistic
distribution and the confidence interval as specified by parameter CI.
Parameters
@@ -1377,8 +1377,9 @@ def bootstrap_moment(data1, data2, moment, CI=.95, num_samp=1000):
An array like object containing data
data2 : array-like object
An array-like object containing data
- moment : str
- Either skew or kurtosis
+ moment : list
+ Listof string skew, kurtosis, and/or variance. Will calculate the
+ bootstrap CI's for all the moments in the list
CI : float
The desired confidence interval
num_samp : int
@@ -1386,10 +1387,12 @@ def bootstrap_moment(data1, data2, moment, CI=.95, num_samp=1000):
Returns
-------
- : tuple
- A tuple with two elements. The first element is an array containing
- the distribution of the higher moment statistic. The second element is
- a tuple containing the confidence interval (lower_bound, upper_bound).
+ res : dict
+ A dictionary with key words equivalent to the strings found in moment.
+ Each keyword looks up tuple with two elements. The first element is an
+ array containing the distribution of the higher moment statistic. The
+ second element is a tuple containing the confidence interval
+ (lower_bound, upper_bound).
Notes
-----
@@ -1398,39 +1401,56 @@ def bootstrap_moment(data1, data2, moment, CI=.95, num_samp=1000):
However, more unit testing and investigation needs to be done.
'''
- # Set the higher order moment
- if moment == 'skew':
- moment_est = skew
- elif moment == 'kurtosis':
- moment_est = kurtosis
data1 = np.array(data1)
data2 = np.array(data2)
-
+ # Bootstrap the data
data1_boot = bootstrap([data1], num_samp=num_samp)[0]
data2_boot = bootstrap([data2], num_samp=num_samp)[0]
+
+ res = {}
+ # Set the higher order moment
+ if 'skew' in moment:
- # data1_samp_kurt = kurtosis([data1])
- # data1_samp_var = variance([data1])
- data1_boot_mom = np.array(moment_est(data1_boot))
- data1_boot_var = np.array(variance(data1_boot))
+ stat_1 = np.array(skew(data1_boot))
+ stat_2 = np.array(skew(data2_boot))
+ stat_dist = skew([data1])[0] - skew([data2])[0]
+ diff = stat_1 - stat_2
- # data2_samp_kurt = kurtosis([data2])
- # data2_samp_var = variance([data2])
- data2_boot_mom = np.array(moment_est(data2_boot))
- data2_boot_var = np.array(variance(data2_boot))
-
- # Test statistic for moment that accounts for variance
- # NOTE: not correcting for bias
- stat_dist = (data1_boot_mom - data2_boot_mom)\
- / (np.sqrt(data1_boot_var + data2_boot_var))
-
- lci = (1 - CI) / 2.
- uci = 1 - lci
- ci = (stats.scoreatpercentile(stat_dist, 100 * lci),\
- stats.scoreatpercentile(stat_dist, 100 * uci))
-
- return stat_dist, ci
+ lci = (1 - CI) / 2.
+ uci = 1 - lci
+ ci = (stats.scoreatpercentile(diff, 100 * lci),\
+ stats.scoreatpercentile(diff, 100 * uci))
+
+ res['skew'] = (stat_dist, ci)
+
+ if 'variance' in moment:
+ stat_1 = np.array(variance(data1_boot))
+ stat_2 = np.array(variance(data2_boot))
+ stat_dist = variance([data1])[0] - variance([data2])[0]
+ diff = stat_1 - stat_2
+
+ lci = (1 - CI) / 2.
+ uci = 1 - lci
+ ci = (stats.scoreatpercentile(diff, 100 * lci),\
+ stats.scoreatpercentile(diff, 100 * uci))
+
+ res['variance'] = (stat_dist, ci)
+
+ if 'kurtosis' in moment:
+ stat_1 = np.array(kurtosis(data1_boot))
+ stat_2 = np.array(kurtosis(data2_boot))
+ stat_dist = kurtosis([data1])[0] - kurtosis([data2])[0]
+ diff = stat_1 - stat_2
+
+ lci = (1 - CI) / 2.
+ uci = 1 - lci
+ ci = (stats.scoreatpercentile(diff, 100 * lci),\
+ stats.scoreatpercentile(diff, 100 * uci))
+
+ res['kurtosis'] = (stat_dist, ci)
+
+ return res
def mean_squared_error(obs, pred):
''' | Adding the ability to bootstrap moments | jkitzes_macroeco | train |
Subsets and Splits