hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
24585d5766de31455f538743260ac9a3cca1ab28 | diff --git a/lib/yard/server/commands/frames_command.rb b/lib/yard/server/commands/frames_command.rb
index <HASH>..<HASH> 100644
--- a/lib/yard/server/commands/frames_command.rb
+++ b/lib/yard/server/commands/frames_command.rb
@@ -6,7 +6,9 @@ module YARD
def run
main_url = request.path.gsub(/^(.+)?\/frames\/(#{path})$/, '\1/\2')
- if path && !path.empty?
+ if path =~ %r{^file/}
+ page_title = "File: #{$'}"
+ elsif !path.empty?
page_title = "Object: #{object_path}"
elsif options[:files] && options[:files].size > 0
page_title = "File: #{options[:files].first.sub(/^#{library.source_path}\/?/, '')}" | Fix titles for File urls in frames | lsegal_yard | train | rb |
6d24feba9811c5865d4ed5e634dfe6ebcd2b9d9b | diff --git a/DependencyInjection/DoctrineExtension.php b/DependencyInjection/DoctrineExtension.php
index <HASH>..<HASH> 100755
--- a/DependencyInjection/DoctrineExtension.php
+++ b/DependencyInjection/DoctrineExtension.php
@@ -154,7 +154,7 @@ class DoctrineExtension extends AbstractDoctrineExtension
{
$containerDef = new Definition($container->getParameter('doctrine.dbal.configuration_class'));
$containerDef->setPublic(false);
- if (isset($connection['logging']) && $connection['logging']) {
+ if (isset($connection['container']['logging']) && $connection['container']['logging']) {
$containerDef->addMethodCall('setSQLLogger', array(new Reference('doctrine.dbal.logger')));
}
$container->setDefinition(sprintf('doctrine.dbal.%s_connection.configuration', $connection['name']), $containerDef); | [DoctrineBundle] Fixed loggin in DoctrineExtension not being taken into account | doctrine_DoctrineBundle | train | php |
07f7da5c30608149339086087318afa214b1af63 | diff --git a/tests/rackspace/models/compute_v2/servers_tests.rb b/tests/rackspace/models/compute_v2/servers_tests.rb
index <HASH>..<HASH> 100644
--- a/tests/rackspace/models/compute_v2/servers_tests.rb
+++ b/tests/rackspace/models/compute_v2/servers_tests.rb
@@ -11,4 +11,10 @@ Shindo.tests('Fog::Compute::RackspaceV2 | servers', ['rackspace']) do
collection_tests(service.servers, options, false) do
@instance.wait_for { ready? }
end
+
+ tests("#bootstrap").succeeds do
+ @server = service.servers.bootstrap(options)
+ end
+ @server.destroy
+
end | [rackspace|computev2] aded test for bootstrap | fog_fog | train | rb |
b578d41d981eb504cf7b5d3b703f188e2d17c9ea | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,6 @@ setup(
packages=["geojson"],
package_dir={"geojson": "geojson"},
package_data={"geojson": ["*.rst"]},
- setup_requires=["nose==1.3.0"],
tests_require=["nose==1.3.0", "coverage==3.6"],
install_requires=["setuptools"],
test_suite="nose.collector", | Remove nose from setup_requires. Fixes #<I> | jazzband_python-geojson | train | py |
6eaf85495b6923413f60317abf86b50eee0bfba8 | diff --git a/sphinx-prompt/__init__.py b/sphinx-prompt/__init__.py
index <HASH>..<HASH> 100644
--- a/sphinx-prompt/__init__.py
+++ b/sphinx-prompt/__init__.py
@@ -103,7 +103,7 @@ class PromptDirective(rst.Directive):
).strip('\r\n')
)
statement = []
- line = line[len(prompt):].strip()
+ line = line[len(prompt):].rstrip()
prompt_class = cache.get_prompt_class(prompt)
break | Only strip trailing whitespace from lines to preserve indentation. | sbrunner_sphinx-prompt | train | py |
54130f91cef4e8246bb512486cbecb52aa399c5e | diff --git a/lib/schema/schema/compare/comparison.rb b/lib/schema/schema/compare/comparison.rb
index <HASH>..<HASH> 100644
--- a/lib/schema/schema/compare/comparison.rb
+++ b/lib/schema/schema/compare/comparison.rb
@@ -28,11 +28,11 @@ module Schema
def self.assure_schemas(control, compare)
if not control.is_a?(Schema)
- raise Error, 'Control object is not an implementation of Schema'
+ raise Error, 'Control object is not an implementation of Schema (Control Class: #{control.class.name})'
end
if not compare.is_a?(Schema)
- raise Error, 'Compare object is not an implementation of Schema'
+ raise Error, 'Compare object is not an implementation of Schema (Compare Class: #{compare.class.name})'
end
end | Control and compare classes are printed in the error message when inputs aren't Schema instances | eventide-project_schema | train | rb |
77944d1b313ad532d7f60cd8e7355af04617e4b5 | diff --git a/examples/run_swag.py b/examples/run_swag.py
index <HASH>..<HASH> 100644
--- a/examples/run_swag.py
+++ b/examples/run_swag.py
@@ -509,7 +509,7 @@ def main():
model.eval()
eval_loss, eval_accuracy = 0, 0
nb_eval_steps, nb_eval_examples = 0, 0
- for input_ids, input_mask, segment_ids, label_ids in eval_dataloader:
+ for input_ids, input_mask, segment_ids, label_ids in tqdm(eval_dataloader, desc="Evaluating"):
input_ids = input_ids.to(device)
input_mask = input_mask.to(device)
segment_ids = segment_ids.to(device) | add tqdm to the process of eval
Maybe better. | huggingface_pytorch-pretrained-BERT | train | py |
df45f3bf6ab4287e6b3ce9811cfd809bd96e6c15 | diff --git a/grails-bootstrap/src/main/groovy/org/codehaus/groovy/grails/resolve/GrailsRepoResolver.java b/grails-bootstrap/src/main/groovy/org/codehaus/groovy/grails/resolve/GrailsRepoResolver.java
index <HASH>..<HASH> 100644
--- a/grails-bootstrap/src/main/groovy/org/codehaus/groovy/grails/resolve/GrailsRepoResolver.java
+++ b/grails-bootstrap/src/main/groovy/org/codehaus/groovy/grails/resolve/GrailsRepoResolver.java
@@ -74,7 +74,7 @@ public class GrailsRepoResolver extends URLResolver{
public String transformGrailsRepositoryPattern(ModuleRevisionId mrid, String pattern) {
final String revision = mrid.getRevision();
String versionTag;
- if (revision.equals("latest.integration") || revision.equals("latest")) {
+ if (revision.equals("latest.integration") || revision.equals("latest")|| revision.equals("latest.release")) {
versionTag = "LATEST_RELEASE";
}
else { | fix for GRAILS-<I> "latest.release in plugin does not work in <I> RC2 against main grails repo" | grails_grails-core | train | java |
d098a0b220bc1bd3d9e8bbd432ae842aeae6c0a8 | diff --git a/lib/generators/engine_cart/install_generator.rb b/lib/generators/engine_cart/install_generator.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/engine_cart/install_generator.rb
+++ b/lib/generators/engine_cart/install_generator.rb
@@ -41,7 +41,7 @@ module EngineCart
return if (system('git', 'check-ignore', TEST_APP, '-q') rescue false)
append_file File.expand_path('.gitignore', git_root) do
- "#{EngineCart.destination}\n"
+ "\n#{EngineCart.destination}\n"
end
end | Handling entry into .gitignore
In cases where the last line of the .gitignore is a non-empty line,
this patch ensures that the injected entry isn't concatonated with the
non-empty line. | cbeer_engine_cart | train | rb |
b33f27f84a06766902076e94643fe6743d2ce871 | diff --git a/werkzeug/wsgi.py b/werkzeug/wsgi.py
index <HASH>..<HASH> 100644
--- a/werkzeug/wsgi.py
+++ b/werkzeug/wsgi.py
@@ -691,8 +691,8 @@ def make_line_iter(stream, limit=None, buffer_size=10 * 1024):
new_buf = []
for item in chain(buffer, new_data.splitlines(True)):
new_buf.append(item)
- if item and item[-1:] in b'\r\n':
- yield b''.join(new_buf)
+ if item and item[-1:] in '\r\n':
+ yield ''.join(new_buf)
new_buf = []
buffer = new_buf
if buffer: | Make make_line_iter operate on native strings
Previously it was inconsistently broken, now it is consistently broken
and produces more errors. That's not as bad as it sounds. | pallets_werkzeug | train | py |
3fbba1eee0ca1b2bd2a34085facc4f667fddb300 | diff --git a/networkzero/core.py b/networkzero/core.py
index <HASH>..<HASH> 100644
--- a/networkzero/core.py
+++ b/networkzero/core.py
@@ -267,6 +267,7 @@ def address(address=None):
except socket.gaierror as exc:
raise InvalidAddressError(host_or_ip, exc.errno)
+ _logger.debug("About to return %s:%s", ip, port)
return "%s:%s" % (ip, port)
split_command = shlex.split
\ No newline at end of file
diff --git a/networkzero/sockets.py b/networkzero/sockets.py
index <HASH>..<HASH> 100644
--- a/networkzero/sockets.py
+++ b/networkzero/sockets.py
@@ -144,7 +144,10 @@ class Sockets:
def send_reply(self, address, reply):
socket = self.get_socket(address, zmq.REP)
- return socket.send(_serialise(reply))
+ _logger.debug("Got socket for reply: %s", socket)
+ reply = _serialise(reply)
+ _logger.debug("Reply is: %r", reply)
+ return socket.send(reply)
def send_notification(self, address, topic, data):
socket = self.get_socket(address, zmq.PUB) | Put a bit of extra tracing in | tjguk_networkzero | train | py,py |
0f770ac9387e755050805df4ecac6c78218e033c | diff --git a/test/test_ops.py b/test/test_ops.py
index <HASH>..<HASH> 100644
--- a/test/test_ops.py
+++ b/test/test_ops.py
@@ -46,9 +46,11 @@ class RoIOpTester(ABC):
tol = 1e-3 if (x_dtype is torch.half or rois_dtype is torch.half) else 1e-5
torch.testing.assert_close(gt_y.to(y), y, rtol=tol, atol=tol)
+ @pytest.mark.parametrize("seed", range(10))
@pytest.mark.parametrize("device", cpu_and_gpu())
@pytest.mark.parametrize("contiguous", (True, False))
- def test_backward(self, device, contiguous):
+ def test_backward(self, seed, device, contiguous):
+ torch.random.manual_seed(seed)
pool_size = 2
x = torch.rand(1, 2 * (pool_size ** 2), 5, 5, dtype=self.dtype, device=device, requires_grad=True)
if not contiguous: | Setting seeds for TestRoiPool backward. (#<I>) | pytorch_vision | train | py |
55670558ed11979caedc7f8240ffbb523b9f9233 | diff --git a/openquake/engine/db/models.py b/openquake/engine/db/models.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/db/models.py
+++ b/openquake/engine/db/models.py
@@ -97,7 +97,7 @@ LOSS_TYPES = ["structural", "nonstructural", "fatalities", "contents"]
#: relative tolerance to consider two risk outputs (almost) equal
-RISK_RTOL = 0.05
+RISK_RTOL = 0.08
#: absolute tolerance to consider two risk outputs (almost) equal | increased tolerance to 8%
Former-commit-id: c<I>e7d4f<I>bdfc7a6ae2ec8d<I>dc9cf<I>c<I> | gem_oq-engine | train | py |
34b91b1c2dd28d2e72286e8f4caa4c648786f135 | diff --git a/openid/consumer/discover.py b/openid/consumer/discover.py
index <HASH>..<HASH> 100644
--- a/openid/consumer/discover.py
+++ b/openid/consumer/discover.py
@@ -433,7 +433,7 @@ def discoverXRI(iname):
def discoverNoYadis(uri):
http_resp = fetchers.fetch(uri)
- if http_resp.status != 200:
+ if http_resp.status not in (200, 206):
raise DiscoveryFailure(
'HTTP Response status from identity URL host is not 200. '
'Got status %r' % (http_resp.status,), http_resp) | [project @ Add <I> status check to openid.consumer.discover] | openid_python-openid | train | py |
2f0144169bb25bc396fb84480a40e6f4042bf090 | diff --git a/BimServer/src/org/bimserver/geometry/GeometryGenerationReport.java b/BimServer/src/org/bimserver/geometry/GeometryGenerationReport.java
index <HASH>..<HASH> 100644
--- a/BimServer/src/org/bimserver/geometry/GeometryGenerationReport.java
+++ b/BimServer/src/org/bimserver/geometry/GeometryGenerationReport.java
@@ -26,7 +26,7 @@ import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
-import java.util.TreeMap;
+import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.bimserver.emf.Schema;
@@ -59,7 +59,7 @@ public class GeometryGenerationReport {
private int numberOfTriangles;
private int numberOfTrianglesIncludingReuse;
private boolean reuseGeometry;
- private Map<Integer, String> debugFiles = new TreeMap<>();
+ private final Map<Integer, String> debugFiles = new ConcurrentSkipListMap<>();
public synchronized void incrementTriangles(int triangles) {
this.numberOfTriangles += triangles; | Fixed concurrency issue when writing debug files (endless loop...) | opensourceBIM_BIMserver | train | java |
066828c8c21021810cbc3ee0d7560307d2ee42ac | diff --git a/libraries/joomla/application/categories.php b/libraries/joomla/application/categories.php
index <HASH>..<HASH> 100644
--- a/libraries/joomla/application/categories.php
+++ b/libraries/joomla/application/categories.php
@@ -281,7 +281,7 @@ class JCategories
$this->_nodes[$result->id] = new JCategoryNode($result, $this);
// If this is not root and if the current node's parent is in the list or the current node parent is 0
- if ($result->id != 'root' && (isset($this->_nodes[$result->parent_id]) || $result->parent_id == 0)) {
+ if ($result->id != 'root' && (isset($this->_nodes[$result->parent_id]) || $result->parent_id == 1)) {
// Compute relationship between node and its parent - set the parent in the _nodes field
$this->_nodes[$result->id]->setParent($this->_nodes[$result->parent_id]);
}
@@ -648,7 +648,9 @@ class JCategoryNode extends JObject
$this->_parent = & $parent;
if ($this->id != 'root') {
- $this->_path = $parent->getPath();
+ if ($this->parent_id != 1 ) {
+ $this->_path = $parent->getPath();
+ }
$this->_path[] = $this->id.':'.$this->alias;
} | Correct fatal error when a category id of 0 is passed in; this was
exposed as a problem with system test failurs on the CMS whem there
was a failure to get the module params correctly. With this hange fall
back to default works. | joomla_joomla-framework | train | php |
cff9d3e294fdf074c8cdeded7e756e442c77e453 | diff --git a/src/client/index.js b/src/client/index.js
index <HASH>..<HASH> 100644
--- a/src/client/index.js
+++ b/src/client/index.js
@@ -115,7 +115,7 @@ const setOptions = ({
}
// Universal method (client + server)
-const getSession = async ({ req, ctx, triggerEvent = true } = {}) => {
+export const getSession = async ({ req, ctx, triggerEvent = true } = {}) => {
// If passed 'appContext' via getInitialProps() in _app.js then get the req
// object from ctx and use that for the req value to allow getSession() to
// work seemlessly in getInitialProps() on server side pages *and* in _app.js. | fix: export getSession [skip release]
somehow the default export does not work in the dev app | iaincollins_next-auth | train | js |
2b214bc1dc2283a22e54eab5769b837df558c487 | diff --git a/auth/db/auth.php b/auth/db/auth.php
index <HASH>..<HASH> 100644
--- a/auth/db/auth.php
+++ b/auth/db/auth.php
@@ -334,10 +334,12 @@ class auth_plugin_db extends auth_plugin_base {
// simplify down to usernames
$usernames = array();
- foreach ($users as $user) {
- array_push($usernames, $user->username);
+ if (!empty($users)) {
+ foreach ($users as $user) {
+ array_push($usernames, $user->username);
+ }
+ unset($users);
}
- unset($users);
$add_users = array_diff($userlist, $usernames);
unset($usernames); | Merged from MOODLE_<I>_STABLE: MDL-<I> - auth/db - suppress php warning when there are no users using db auth | moodle_moodle | train | php |
de199f5fdf0301ef8780cea395dc44788a38410a | diff --git a/sim_access.py b/sim_access.py
index <HASH>..<HASH> 100644
--- a/sim_access.py
+++ b/sim_access.py
@@ -6,9 +6,9 @@ import difflib
# read the contents of /etc/authorization
with open('/etc/authorization','r') as file:
content = file.read()
-match = re.search('<key>system.privilege.taskport.debug</key>\s*\n\s*<dict>\n\s*<key>allow-root</key>\n\s*(<[^>]+>)',content)
+match = re.search('<key>system.privilege.taskport</key>\s*\n\s*<dict>\n\s*<key>allow-root</key>\n\s*(<[^>]+>)',content)
if match is None:
- raise Exception('Could not find the system.privilege.taskport.debug key in /etc/authorization')
+ raise Exception('Could not find the system.privilege.taskport key in /etc/authorization')
elif re.search('<false/>', match.group(0)) is None:
print '/etc/authorization has already been modified'
exit(0) | removing the debug key - this is not the cause of the issue. Travis have opened a bug to resolve this on their VMs | ios-driver_ios-driver | train | py |
c89ace19b4b6d5c4f6feb8e02613243133434e70 | diff --git a/code/transform/QueuedExternalContentImporter.php b/code/transform/QueuedExternalContentImporter.php
index <HASH>..<HASH> 100644
--- a/code/transform/QueuedExternalContentImporter.php
+++ b/code/transform/QueuedExternalContentImporter.php
@@ -61,8 +61,8 @@ abstract class QueuedExternalContentImporter extends AbstractQueuedJob {
foreach ($children as $child) {
$count++;
if ($count > 20) {
- $this->totalSteps = 20;
- return QueuedJob::LARGE;
+ $this->totalSteps = $count;
+ return QueuedJob::QUEUED;
}
$subChildren = $child->stageChildren();
@@ -70,8 +70,8 @@ abstract class QueuedExternalContentImporter extends AbstractQueuedJob {
foreach ($subChildren as $sub) {
$count++;
if ($count > 20) {
- $this->totalSteps = 20;
- return QueuedJob::LARGE;
+ $this->totalSteps = $count;
+ return QueuedJob::QUEUED;
}
}
} | Changed queued external importer to use the normal queue | nyeholt_silverstripe-external-content | train | php |
7c3e1129a83c3ae8a9be54dadf78ae0b5f55ee99 | diff --git a/src/components/container_factory/container_factory.js b/src/components/container_factory/container_factory.js
index <HASH>..<HASH> 100644
--- a/src/components/container_factory/container_factory.js
+++ b/src/components/container_factory/container_factory.js
@@ -24,7 +24,7 @@ var ContainerFactory = BaseObject.extend({
}.bind(this));
},
findPlaybackPlugin: function(source) {
- return _.find(this.loader.playbackPlugins, function(p) { return p.canPlay(source) }, this);
+ return _.find(this.loader.playbackPlugins, function(p) { return p.canPlay("" + source) }, this);
},
createContainer: function(source) {
var playbackPlugin = this.findPlaybackPlugin(source); | container factory: change source type to string to ensure that canplay works correctly | clappr_clappr | train | js |
9131e5cdc919899894255a8b5c94d88e4289f51c | diff --git a/jsonschema/cli.py b/jsonschema/cli.py
index <HASH>..<HASH> 100644
--- a/jsonschema/cli.py
+++ b/jsonschema/cli.py
@@ -22,10 +22,6 @@ def _json_file(path):
return json.load(file)
-def _read_from_stdin(stdin):
- return json.loads(stdin.read())
-
-
parser = argparse.ArgumentParser(
description="JSON Schema Validation CLI",
)
@@ -87,7 +83,7 @@ def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin):
validator.check_schema(arguments["schema"])
errored = False
- for instance in arguments["instances"] or (_read_from_stdin(stdin),):
+ for instance in arguments["instances"] or [json.load(stdin)]:
for error in validator.iter_errors(instance):
stderr.write(error_format.format(error=error))
errored = True | Might as well just inline it. | Julian_jsonschema | train | py |
d4168f2e28945f4d9a0449bcf0d3c08ea7aefb67 | diff --git a/lib/shellter.rb b/lib/shellter.rb
index <HASH>..<HASH> 100644
--- a/lib/shellter.rb
+++ b/lib/shellter.rb
@@ -1,5 +1,4 @@
require 'escape'
-require 'popen4'
require 'shellter/core_ext'
diff --git a/lib/shellter/version.rb b/lib/shellter/version.rb
index <HASH>..<HASH> 100644
--- a/lib/shellter/version.rb
+++ b/lib/shellter/version.rb
@@ -1,3 +1,3 @@
module Shellter
- VERSION = "0.9.2"
+ VERSION = "0.9.3"
end | got rid of explicit popen4 requirement | ebertech_shellter | train | rb,rb |
dc3c211820e144e5921983a56ca7c80f6f601228 | diff --git a/test/Liip/RMT/Tests/Functional/TestsCheckTest.php b/test/Liip/RMT/Tests/Functional/TestsCheckTest.php
index <HASH>..<HASH> 100644
--- a/test/Liip/RMT/Tests/Functional/TestsCheckTest.php
+++ b/test/Liip/RMT/Tests/Functional/TestsCheckTest.php
@@ -10,10 +10,10 @@ class TestsCheckTest extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
- $informationCollector = $this->createMock('Liip\RMT\Information\InformationCollector');
+ $informationCollector = $this->getMock('Liip\RMT\Information\InformationCollector');
$informationCollector->method('getValueFor')->with(TestsCheck::SKIP_OPTION)->willReturn(false);
- $output = $this->createMock('Symfony\Component\Console\Output\OutputInterface');
+ $output = $this->getMock('Symfony\Component\Console\Output\OutputInterface');
$output->method('write');
$context = Context::getInstance(); | Make mocks added in test-check timeout tests PHPUnit <I>-compatible | liip_RMT | train | php |
0358658ff0097826251fef0fd67158529ccd8d16 | diff --git a/lib/wally/application.rb b/lib/wally/application.rb
index <HASH>..<HASH> 100644
--- a/lib/wally/application.rb
+++ b/lib/wally/application.rb
@@ -11,7 +11,7 @@ end
if ENV["MONGOHQ_URL"]
Mongoid.configure do |config|
- config.master = Mongo::Connection.from_uri(ENV["MONGOHQ_URL"]).db
+ config.master = Mongo::Connection.from_uri(ENV["MONGOHQ_URL"]).db("wally")
end
else
Mongoid.configure do |config| | Mongohq url still failing | BBC-News_wally | train | rb |
d58e65b4595fc1999fe91d1002cc7b0a1e54bc0c | diff --git a/poetry/console/commands/remove.py b/poetry/console/commands/remove.py
index <HASH>..<HASH> 100644
--- a/poetry/console/commands/remove.py
+++ b/poetry/console/commands/remove.py
@@ -39,7 +39,7 @@ list of installed packages
for key in poetry_content[section]:
if key.lower() == name.lower():
found = True
- requirements[name] = poetry_content[section][name]
+ requirements[key] = poetry_content[section][key]
break
if not found: | Fix remove's case insensitivity (#<I>) | sdispater_poetry | train | py |
1507e84e27badb53cc75161f16051ebd655214c6 | diff --git a/src/main/java/com/github/greengerong/PrerenderSeoService.java b/src/main/java/com/github/greengerong/PrerenderSeoService.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/github/greengerong/PrerenderSeoService.java
+++ b/src/main/java/com/github/greengerong/PrerenderSeoService.java
@@ -262,7 +262,8 @@ public class PrerenderSeoService {
return from(prerenderConfig.getExtensionsToIgnore()).anyMatch(new Predicate<String>() {
@Override
public boolean apply(String item) {
- return url.contains(item.toLowerCase());
+ return (url.indexOf('?') >= 0 ? url.substring(0, url.indexOf('?')) : url)
+ .toLowerCase().endsWith(item);
}
});
} | Improved file extension checking.
Updated logic to look for file extensions at the end of the URL, or just before the first question mark if one is present. This should prevent matching on subdomains or URL parameter values. | greengerong_prerender-java | train | java |
7fc56095bc354b57520914b4e1222e7f98e506f8 | diff --git a/hcl/structure.go b/hcl/structure.go
index <HASH>..<HASH> 100644
--- a/hcl/structure.go
+++ b/hcl/structure.go
@@ -33,9 +33,9 @@ type Blocks []*Block
type Attributes map[string]*Attribute
// Body is a container for attributes and blocks. It serves as the primary
-// unit of heirarchical structure within configuration.
+// unit of hierarchical structure within configuration.
//
-// The content of a body cannot be meaningfully intepreted without a schema,
+// The content of a body cannot be meaningfully interpreted without a schema,
// so Body represents the raw body content and has methods that allow the
// content to be extracted in terms of a given schema.
type Body interface { | hcl: fix minor typos in docs (#<I>) | hashicorp_hcl | train | go |
ee82ed7d5b86ce5dc6aafa1845871b2b31cd59d8 | diff --git a/src/frontend/org/voltdb/AdmissionControlGroup.java b/src/frontend/org/voltdb/AdmissionControlGroup.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/AdmissionControlGroup.java
+++ b/src/frontend/org/voltdb/AdmissionControlGroup.java
@@ -290,7 +290,11 @@ public class AdmissionControlGroup implements org.voltcore.network.QueueMonitor
procInfoMap.put(procedureName, info);
}
info.processInvocation((int)TimeUnit.NANOSECONDS.toMillis(deltaNanos), status);
- m_latencyInfo.recordValue(Math.max(1, Math.min(TimeUnit.NANOSECONDS.toMicros(deltaNanos), m_latencyInfo.getHighestTrackableValue())));
+ // ENG-7209 This is to not log the latency value for a snapshot restore, as this just creates
+ // a large initial value in the graph which is not actually relevant to the user.
+ if (!procedureName.equals("@SnapshotRestore")) {
+ m_latencyInfo.recordValue(Math.max(1, Math.min(TimeUnit.NANOSECONDS.toMicros(deltaNanos), m_latencyInfo.getHighestTrackableValue())));
+ }
if (needToInsert) {
m_connectionStates.put(connectionId, procInfoMap);
} | ENG-<I> removed @SnapshotRestore from latency stats | VoltDB_voltdb | train | java |
026f435167c3610247be778838e1cfab60f78db0 | diff --git a/shared/util.go b/shared/util.go
index <HASH>..<HASH> 100644
--- a/shared/util.go
+++ b/shared/util.go
@@ -385,7 +385,7 @@ func isSharedMount(file *os.File, pathName string) int {
for scanner.Scan() {
line := scanner.Text()
rows := strings.Fields(line)
- if !strings.HasSuffix(pathName, rows[3]) || rows[4] != pathName {
+ if !strings.HasSuffix(pathName, rows[3]) && rows[4] != pathName {
continue
}
if strings.HasPrefix(rows[6], "shared:") { | Fix logic for sharedmount check. Fixes #<I> | lxc_lxd | train | go |
37f7fa2ed564145ed3d647548d38195f34e0d4b4 | diff --git a/ayrton/tests/test_castt.py b/ayrton/tests/test_castt.py
index <HASH>..<HASH> 100644
--- a/ayrton/tests/test_castt.py
+++ b/ayrton/tests/test_castt.py
@@ -48,6 +48,16 @@ class TestBinding (unittest.TestCase):
self.assertTrue ('os' in self.c.seen_names)
+ def testTryExcept (self):
+ t= ast.parse ("""try:
+ foo()
+except Exception as e:
+ pass""")
+
+ t= self.c.modify (t)
+
+ self.assertTrue ('e' in self.c.seen_names)
+
def parse_expression (s):
# Module(body=[Expr(value=...)])
return ast.parse (s).body[0].value | [+] test handling names defined in except: clauses. | StyXman_ayrton | train | py |
b2755fd18b3cd4af434e413d5d6effad08e7ed6a | diff --git a/reana_commons/models.py b/reana_commons/models.py
index <HASH>..<HASH> 100644
--- a/reana_commons/models.py
+++ b/reana_commons/models.py
@@ -90,7 +90,7 @@ class Workflow(Base, Timestamp):
id_ = Column(UUIDType, primary_key=True)
name = Column(String(255))
run_number = Column(Integer)
- workspace_path = Column(String(255))
+ workspace_path = Column(String(2048))
status = Column(Enum(WorkflowStatus), default=WorkflowStatus.created)
owner_id = Column(UUIDType, ForeignKey('user_.id_'))
specification = Column(JSONType)
@@ -196,8 +196,8 @@ class Job(Base, Timestamp):
shared_file_system = Column(Boolean)
docker_img = Column(String(256))
experiment = Column(String(256))
- cmd = Column(String(1024))
- env_vars = Column(String(1024))
+ cmd = Column(String(10000))
+ env_vars = Column(String(10000))
restart_count = Column(Integer)
max_restart_count = Column(Integer)
deleted = Column(Boolean) | models: increase string sizes
* Increases the sizes of string fields that could
grow large. | reanahub_reana-commons | train | py |
6508cb37369dafaefb8e70e102536df139932b92 | diff --git a/map/__init__.py b/map/__init__.py
index <HASH>..<HASH> 100644
--- a/map/__init__.py
+++ b/map/__init__.py
@@ -1 +1 @@
-__all__ = ['MapArgumentParser', 'MapConstants', 'mapper', 'version']
+__all__ = ['map_argument_parser', 'map_constants', 'mapper', 'version'] | Updated due to the renaming of modules. | THLO_map | train | py |
7d25e3b164a830a106d5467dc5ff5914ff4bdc5c | diff --git a/src/PeskyCMF/Db/Traits/ResetsPasswordsViaAccessKey.php b/src/PeskyCMF/Db/Traits/ResetsPasswordsViaAccessKey.php
index <HASH>..<HASH> 100644
--- a/src/PeskyCMF/Db/Traits/ResetsPasswordsViaAccessKey.php
+++ b/src/PeskyCMF/Db/Traits/ResetsPasswordsViaAccessKey.php
@@ -19,6 +19,7 @@ trait ResetsPasswordsViaAccessKey {
'account_id' => $this->_getPkValue(),
'expires_at' => time() + config('auth.passwords.' . \Auth::getDefaultDriver() . 'expire', 60) * 60,
];
+ $this->reload(); //< needed to exclude situation with outdated data
foreach ($this->getAdditionalFieldsForPasswordRecoveryAccessKey() as $fieldName) {
$data[$fieldName] = $this->_getFieldValue($fieldName);
} | ResetsPasswordsViaAccessKey->getPasswordRecoveryAccessKey() - added user data reloading to fix problems with outdated timestamps | swayok_PeskyCMF | train | php |
e9b1cd10bd7887d5ee19bcfe297ca6958858e551 | diff --git a/lib/http/client.rb b/lib/http/client.rb
index <HASH>..<HASH> 100644
--- a/lib/http/client.rb
+++ b/lib/http/client.rb
@@ -3,13 +3,7 @@ module Http
class Client
# I swear I'll document that nebulous options hash
def initialize(uri, options = {})
- if uri.is_a? URI
- @uri = uri
- else
- # Why the FUCK can't Net::HTTP do this?
- @uri = URI(uri.to_s)
- end
-
+ @uri = uri
@options = {:response => :object}.merge(options)
end
@@ -88,8 +82,10 @@ module Http
private
def raw_http_call(method, uri, headers, form_data = nil)
- # Ensure uri and stringify keys :/
- uri = URI(uri.to_s) unless uri.is_a? URI
+ # Why the FUCK can't Net::HTTP do this?
+ uri = URI(uri.to_s) unless uri.is_a? URI
+
+ # Stringify keys :/
headers = Hash[headers.map{|k,v| [k.to_s, v]}]
http = Net::HTTP.new(uri.host, uri.port) | Last responsible moment for coercing URI | httprb_http | train | rb |
a07846b05c6ea9332e1d3892bda0ac4e94ece6f7 | diff --git a/adapters/src/main/java/org/jboss/jca/adapters/jdbc/WrappedConnection.java b/adapters/src/main/java/org/jboss/jca/adapters/jdbc/WrappedConnection.java
index <HASH>..<HASH> 100644
--- a/adapters/src/main/java/org/jboss/jca/adapters/jdbc/WrappedConnection.java
+++ b/adapters/src/main/java/org/jboss/jca/adapters/jdbc/WrappedConnection.java
@@ -2140,7 +2140,7 @@ public abstract class WrappedConnection extends JBossWrapper implements Connecti
private void sqlConnectionNotifyRequestBegin()
{
- Optional<MethodHandle> mh = mc.getEndRequestNotify();
+ Optional<MethodHandle> mh = mc.getBeginRequestNotify();
if (mh == null)
{
mh = lookupNotifyMethod("beginRequest"); | [JBJCA-<I>] JDBC adapter WrappedConnection: fix beginRequest lookup bug | ironjacamar_ironjacamar | train | java |
0a9f4392f03d02c7e67f1e3908768f48245e5a1e | diff --git a/bokeh/protocol.py b/bokeh/protocol.py
index <HASH>..<HASH> 100644
--- a/bokeh/protocol.py
+++ b/bokeh/protocol.py
@@ -108,7 +108,8 @@ class BokehJSONEncoder(json.JSONEncoder):
return self.transform_python_types(obj)
def serialize_json(obj, encoder=BokehJSONEncoder, **kwargs):
- return json.dumps(obj, cls=encoder, **kwargs)
+ rslt = json.dumps(obj, cls=encoder, **kwargs)
+ return rslt
deserialize_json = json.loads | Easier debugging. | bokeh_bokeh | train | py |
10aa2306ae01d27bff8374b99802f30ec572399e | diff --git a/cli.js b/cli.js
index <HASH>..<HASH> 100755
--- a/cli.js
+++ b/cli.js
@@ -23,6 +23,7 @@ var optimist = require('optimist')
var couchjs = require('./couchjs')
var console = require('./console')
+var LineStream = require('./stream')
var INPUT = { 'waiting': false
, 'queue' : []
@@ -54,11 +55,13 @@ function main() {
if(er)
throw er
+ var stdin = new LineStream
+ stdin.on('data', couchjs.stdin)
+
process.stdin.setEncoding('utf8')
- process.stdin.on('data', couchjs.stdin)
+ process.stdin.pipe(stdin)
process.stdin.resume()
-
; [Error, Function].forEach(function(type) {
type.prototype.toSource = type.prototype.toSource || toSource
type.prototype.toString = type.prototype.toString || toSource | Pipe stdin through the LineStream | iriscouch_couchjs | train | js |
cba6d1dcaee769f1cf7e6dd383d49a70a68b3e59 | diff --git a/src/RocknRoot/StrayFw/Database/Postgres/Schema.php b/src/RocknRoot/StrayFw/Database/Postgres/Schema.php
index <HASH>..<HASH> 100644
--- a/src/RocknRoot/StrayFw/Database/Postgres/Schema.php
+++ b/src/RocknRoot/StrayFw/Database/Postgres/Schema.php
@@ -71,6 +71,8 @@ class Schema extends ProviderSchema
*/
private function buildEnum($enumName, array $enumDefinition)
{
+ $mapping = Mapping::get($this->mapping);
+ $definition = $this->getDefinition();
$database = GlobalDatabase::get($mapping['config']['database']);
$enumRealName = null;
@@ -116,6 +118,8 @@ class Schema extends ProviderSchema
*/
private function buildModel($modelName, array $modelDefinition)
{
+ $mapping = Mapping::get($this->mapping);
+ $definition = $this->getDefinition();
$database = GlobalDatabase::get($mapping['config']['database']);
$tableName = null; | fix #<I> SQL data building | RocknRoot_strayFw | train | php |
333b1cbfb9c8df6e9f3e74401fa1ba5882e143dd | diff --git a/osbs/build/spec.py b/osbs/build/spec.py
index <HASH>..<HASH> 100644
--- a/osbs/build/spec.py
+++ b/osbs/build/spec.py
@@ -315,12 +315,13 @@ class BuildSpec(object):
self.name.value = make_name_from_git(self.git_uri.value, self.git_branch.value)
self.group_manifests.value = group_manifests or False
self.prefer_schema1_digest.value = prefer_schema1_digest
+ self.builder_build_json_dir.value = builder_build_json_dir
if not flatpak:
if not base_image:
raise OsbsValidationException("base_image must be provided")
self.trigger_imagestreamtag.value = get_imagestreamtag_from_image(base_image)
- self.builder_build_json_dir.value = builder_build_json_dir
+
if not name_label:
raise OsbsValidationException("name_label must be provided")
self.imagestream_name.value = name_label.replace('/', '-') | spec.py: set builder_build_json_dir value for flatpaks too | projectatomic_osbs-client | train | py |
55a96d5e2314ac75280bea67e777e5f07e4d8b73 | diff --git a/cmd/broker.go b/cmd/broker.go
index <HASH>..<HASH> 100644
--- a/cmd/broker.go
+++ b/cmd/broker.go
@@ -50,6 +50,7 @@ and personalized devices (with their network session keys) with the router.
}
hdlAdapter.Bind(handlers.Collect{})
hdlAdapter.Bind(handlers.PubSub{})
+ hdlAdapter.Bind(handlers.Applications{})
hdlAdapter.Bind(handlers.StatusPage{})
// Instantiate Storage | [issue#<I>] Add Applications handler to broker cli | TheThingsNetwork_ttn | train | go |
201c6678054d74ed6254d5cd3a60adc9125b9098 | diff --git a/api/client.js b/api/client.js
index <HASH>..<HASH> 100644
--- a/api/client.js
+++ b/api/client.js
@@ -57,7 +57,7 @@ module.exports = new (function(){
*/
this.observe = function(itemID, propName, callback) {
if (typeof itemID != 'number' || !propName || !callback) { log("observe requires three arguments", itemId, propName, callback); }
- var propertyChain = propName.split('.')
+ var propertyChain = (typeof propName == 'string' ? string.split('.') : propName
return this._observeChain(itemID, propertyChain, 0, callback, {})
} | Allow for a subscription chain to be passed in as an array, in addition to as a period "." seperated string | marcuswestin_fin | train | js |
7d0d3d3529c61af2bb4cb0dc3cd89ea07e7aa9d8 | diff --git a/src/main/java/hex/glm/GLM2.java b/src/main/java/hex/glm/GLM2.java
index <HASH>..<HASH> 100644
--- a/src/main/java/hex/glm/GLM2.java
+++ b/src/main/java/hex/glm/GLM2.java
@@ -562,7 +562,7 @@ public class GLM2 extends ModelJob {
callback.addToPendingCount(n_folds-1);
double proximal_penalty = 0;
for(int i = 0; i < n_folds; ++i)
- new GLM2(this.description + "xval " + i, self(), keys[i] = Key.make(destination_key + "_" + _lambdaIdx + "_xval" + i), _dinfo.getFold(i, n_folds),_glm,new double[]{lambda[_lambdaIdx]},model.alpha,0, model.beta_eps,self(),model.norm_beta(lambdaIxd),proximal_penalty).
+ new GLM2(this.description + "xval " + i, self(), keys[i] = Key.make(destination_key + "_" + _lambdaIdx + "_xval" + i), _dinfo.getFold(i, n_folds),_glm,new double[]{lambda[_lambdaIdx]},model.alpha,0, model.beta_eps,self(),model.norm_beta(lambdaIxd),higher_accuracy, proximal_penalty).
run(callback);
} | GLM2 update. added missing change from my previous commit. | h2oai_h2o-2 | train | java |
5dbd234048824bfc8e4db425737b974e28e5687f | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -153,7 +153,8 @@ Keeper.prototype._doPut = function (key, value) {
.then(function (exists) {
if (exists) {
debug('put aborted, value exists', key)
- throw new Error('value for this key already exists in Keeper')
+ return exists
+ // throw new Error('value for this key already exists in Keeper')
}
return self._save(key, value)
diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -25,6 +25,22 @@ test('test invalid keys', function (t) {
.done()
})
+test('put same data twice', function (t) {
+ t.plan(1)
+
+ var keeper = new Keeper({
+ storage: testDir
+ })
+
+ put()
+ .then(put)
+ .done(t.pass)
+
+ function put () {
+ return keeper.put('64fe16cc8a0c61c06bc403e02f515ce5614a35f1', new Buffer('1'))
+ }
+})
+
test('put, get', function (t) {
var keeper = new Keeper({
storage: testDir | don't throw err on put for existing key/val | tradle_offline-keeper | train | js,js |
8e32dde1aa4ec507f0ef901337e8281689404cf5 | diff --git a/tasks/lib/uglify.js b/tasks/lib/uglify.js
index <HASH>..<HASH> 100644
--- a/tasks/lib/uglify.js
+++ b/tasks/lib/uglify.js
@@ -152,7 +152,7 @@ exports.init = function(grunt) {
}
if (options.indentLevel !== undefined) {
- outputOptions.indent_level = options.indentLevel
+ outputOptions.indent_level = options.indentLevel;
}
return outputOptions; | Add missing semicolon in `tasks/lib/uglify.js`
JSHint didn't like it not being there. | gruntjs_grunt-contrib-uglify | train | js |
4465efbaa64fad192aeebbd2db8fdc5cd124e692 | diff --git a/eZ/Publish/Core/REST/Server/Input/Parser/Query.php b/eZ/Publish/Core/REST/Server/Input/Parser/Query.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/Core/REST/Server/Input/Parser/Query.php
+++ b/eZ/Publish/Core/REST/Server/Input/Parser/Query.php
@@ -29,10 +29,10 @@ abstract class Query extends CriterionParser
{
$query = $this->buildQuery();
- // Criteria
+ // @deprecated Criteria
// -- FullTextCriterion
if (array_key_exists('Criteria', $data) && is_array($data['Criteria'])) {
- $message = 'The Criteria element is deprecated since ezpublish-kernel 6.6.0, and will be removed in 7.0. Use Filter instead.';
+ $message = 'The Criteria element is deprecated since ezpublish-kernel 6.6, and will be removed in 8.0. Use Filter instead, or Query for criteria that should affect scoring.';
if (array_key_exists('Filter', $data) && is_array($data['Filter'])) {
$message .= ' The Criteria element will be merged into Filter.';
$data['Filter'] = array_merge($data['Filter'], $data['Criteria']); | Add mention of scoring in deprecation message for REST Criteria | ezsystems_ezpublish-kernel | train | php |
ba0ebb2389c062b08873c2f77b5cfae6e0fd6682 | diff --git a/salt/state.py b/salt/state.py
index <HASH>..<HASH> 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -966,7 +966,7 @@ class HighState(object):
errors.append(err)
else:
for sub_sls in state.pop('include'):
- if not list(mods).count(sub_sls):
+ if sub_sls not in mods:
nstate, mods, err = self.render_state(
sub_sls,
env, | Use in operator to check for existence in set. | saltstack_salt | train | py |
3b5fde3b069042af2ae4b0ec3b89407476456d92 | diff --git a/sample/src/main/java/com/github/pedrovgs/sample/MainActivity.java b/sample/src/main/java/com/github/pedrovgs/sample/MainActivity.java
index <HASH>..<HASH> 100644
--- a/sample/src/main/java/com/github/pedrovgs/sample/MainActivity.java
+++ b/sample/src/main/java/com/github/pedrovgs/sample/MainActivity.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2014 Pedro Vicente Gomez Sanchez.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.github.pedrovgs.sample;
import android.os.Bundle; | Add copyright to the first project java file | pedrovgs_Lynx | train | java |
81f136145cfe1f9e2feb567ee4a73e481b5e485d | diff --git a/jumper_logging_agent/agent.py b/jumper_logging_agent/agent.py
index <HASH>..<HASH> 100644
--- a/jumper_logging_agent/agent.py
+++ b/jumper_logging_agent/agent.py
@@ -12,14 +12,14 @@ import logging
import errno
import threading
from importlib import import_module
-import itertools
-import keen
import time
import signal
from future import standard_library
# noinspection PyUnresolvedReferences
from future.builtins import *
+from keen import KeenClient
+
standard_library.install_aliases()
DEFAULT_INPUT_FILENAME = '/var/run/jumper_logging_agent/events'
@@ -67,6 +67,15 @@ class RecurringTimer(threading.Thread):
self.stop_event.set()
+def keen_event_store(project_id, write_key):
+ return KeenClient(
+ project_id=project_id,
+ write_key=write_key,
+ read_key='',
+ base_url='https://eventsapi.jumper.io'
+ )
+
+
class Agent(object):
EVENT_TYPE_PROPERTY = 'type'
@@ -81,7 +90,7 @@ class Agent(object):
self.flush_interval = flush_interval
self.event_count = 0
self.pending_events = []
- self.event_store = event_store or keen
+ self.event_store = event_store or keen_event_store(project_id, write_key)
self.default_event_type = default_event_type
self.on_listening = on_listening | changed keen event store to log to jumper's url | Jumperr-labs_jumper-logging-agent | train | py |
88247750fd4c0180a2c27e292a17c2208ea28307 | diff --git a/karma.conf.js b/karma.conf.js
index <HASH>..<HASH> 100644
--- a/karma.conf.js
+++ b/karma.conf.js
@@ -31,8 +31,6 @@ module.exports = function (config) {
{pattern: 'node_modules/@angular/**/*.js', included: false, watched: false},
{pattern: 'node_modules/@angular/**/*.js.map', included: false, watched: false},
- {pattern: 'systemjs.config.js', included: false, watched: false},
-
'karma-test-shim.js',
{ pattern: 'lib/**/*.js', included: false }, | fix(build): removed unneeded path | Stabzs_Angular2-Toaster | train | js |
ef7de819d9a5e5aa9e47eeaf4e6726410df734d9 | diff --git a/pydoop/hdfs/fs.py b/pydoop/hdfs/fs.py
index <HASH>..<HASH> 100644
--- a/pydoop/hdfs/fs.py
+++ b/pydoop/hdfs/fs.py
@@ -627,6 +627,8 @@ class hdfs(object):
def _walk(self, top):
""
+ if isinstance(top, basestring):
+ top = self.get_path_info(top)
yield top
if top['kind'] == 'directory':
for info in self.list_directory(top['name']):
@@ -650,6 +652,4 @@ class hdfs(object):
"""
if not top:
raise ValueError("Empty path")
- if isinstance(top, basestring):
- top = self.get_path_info(top)
return _walker_wrapper(self._walk(top)) | fs.walk now fails only if iterated.
Back to its previous behavior. | crs4_pydoop | train | py |
6248c81c21a0fe825089311b17f2c302eea614a2 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ from setuptools import setup
setup(
name='xdot',
- version='1.0',
+ version='1.1',
author='Jose Fonseca',
author_email='[email protected]',
url='https://github.com/jrfonseca/xdot.py', | Bump to version <I>. | jrfonseca_xdot.py | train | py |
52530074370235cd0b39c6bb9c15c2c930d8a231 | diff --git a/WrightTools/kit.py b/WrightTools/kit.py
index <HASH>..<HASH> 100644
--- a/WrightTools/kit.py
+++ b/WrightTools/kit.py
@@ -853,7 +853,7 @@ def diff(xi, yi, order=1):
def fft(xi, yi, axis=0):
- """Take the 1D FFT of an N-dimensional array and return "sensible" arrays which are shifted properly.
+ """Take the 1D FFT of an N-dimensional array and return "sensible" properly shifted arrays.
Parameters
---------- | Reword fft docstring to adhere to line length convention | wright-group_WrightTools | train | py |
93dd4f11f686d2455c980281d3a6606b719f925a | diff --git a/host.go b/host.go
index <HASH>..<HASH> 100644
--- a/host.go
+++ b/host.go
@@ -206,9 +206,26 @@ func (h *Host) ConfigureAuth() error {
return nil
}
- ip, err := h.Driver.GetIP()
- if err != nil {
- return err
+ var (
+ ip = ""
+ ipErr error
+ maxRetries = 4
+ )
+
+ for i := 0; i < maxRetries; i++ {
+ ip, ipErr = h.Driver.GetIP()
+ if ip != "" {
+ break
+ }
+ time.Sleep(5 * time.Second)
+ }
+
+ if ipErr != nil {
+ return ipErr
+ }
+
+ if ip == "" {
+ return fmt.Errorf("unable to get machine IP")
}
serverCertPath := filepath.Join(h.storePath, "server.pem") | allow retries for getIp when issuing cert | docker_machine | train | go |
dbbf86a08dfce55e5f69bc101f446e03cef6e2bf | diff --git a/main/core/Manager/ResourceManager.php b/main/core/Manager/ResourceManager.php
index <HASH>..<HASH> 100644
--- a/main/core/Manager/ResourceManager.php
+++ b/main/core/Manager/ResourceManager.php
@@ -1152,12 +1152,12 @@ class ResourceManager
$obj = $event->getItem();
if ($obj !== null) {
- $archive->addFile($obj, iconv(mb_detect_encoding($filename), $this->getEncoding(), $filename));
+ $archive->addFile($obj, iconv($this->ut->detectEncoding($filename), $this->getEncoding(), $filename));
} else {
- $archive->addFromString(iconv(mb_detect_encoding($filename), $this->getEncoding(), $filename), '');
+ $archive->addFromString(iconv($this->ut->detectEncoding($filename), $this->getEncoding(), $filename), '');
}
} else {
- $archive->addEmptyDir(iconv(mb_detect_encoding($filename), $this->getEncoding(), $filename));
+ $archive->addEmptyDir(iconv($this->ut->detectEncoding($filename), $this->getEncoding(), $filename));
}
$this->dispatcher->dispatch('log', 'Log\LogResourceExport', [$node]);
@@ -1571,7 +1571,7 @@ class ResourceManager
private function getEncoding()
{
- return $this->ut->getDefaultEncoding();
+ return 'UTF-8//TRANSLIT';
}
/** | [CoreBundle] Uses utf-8 string with ZiPArchive and #<I> detection. (#<I>) | claroline_Distribution | train | php |
df1d6abd7c54f2a3beab088a8eea67276f2eca28 | diff --git a/src/Psalm/Checker/FunctionLikeChecker.php b/src/Psalm/Checker/FunctionLikeChecker.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Checker/FunctionLikeChecker.php
+++ b/src/Psalm/Checker/FunctionLikeChecker.php
@@ -853,6 +853,16 @@ abstract class FunctionLikeChecker extends SourceChecker implements StatementsSo
if (!$return_type) {
if ($inferred_return_type && !$inferred_return_type->isMixed()) {
+ $inferred_return_type = TypeChecker::simplifyUnionType(
+ ExpressionChecker::fleshOutTypes(
+ $inferred_return_type,
+ [],
+ $this->source->getFQCLN(),
+ ''
+ ),
+ $this->getFileChecker()
+ );
+
FileChecker::addDocblockReturnType(
$this->source->getFileName(),
$this->function->getLine(),
@@ -935,6 +945,8 @@ abstract class FunctionLikeChecker extends SourceChecker implements StatementsSo
$this->getFileChecker()
);
+ var_dump($inferred_return_type);
+
$return_types_different = false;
if (!TypeChecker::isContainedBy($inferred_return_type, $declared_return_type, $this->getFileChecker())) { | Make brand-new return types more accurate | vimeo_psalm | train | php |
44757af716ddcce0dc3ed78b3056e6e7b0241681 | diff --git a/src/support/shotgun_toolkit/rez_app_launch.py b/src/support/shotgun_toolkit/rez_app_launch.py
index <HASH>..<HASH> 100755
--- a/src/support/shotgun_toolkit/rez_app_launch.py
+++ b/src/support/shotgun_toolkit/rez_app_launch.py
@@ -54,8 +54,14 @@ class AppLaunch(tank.Hook):
from rez.resolved_context import ResolvedContext
from rez.config import config
+ # Define variables used to bootstrap tank from overwrite on first reference
+ # PYTHONPATH is used by tk-maya
+ # NUKE_PATH is used by tk-nuke
+ # HIERO_PLUGIN_PATH is used by tk-nuke (nukestudio)
+ # KATANA_RESOURCES is used by tk-katana
+ config.parent_variables = ["PYTHONPATH", "HOUDINI_PATH", "NUKE_PATH", "HIERO_PLUGIN_PATH", "KATANA_RESOURCES"]
+
rez_packages = extra["rez_packages"]
- config.parent_variables = ["PYTHONPATH"]
context = ResolvedContext(rez_packages)
use_rez = True | add shotgun toolkit support for nuke, houdini and katana | nerdvegas_rez | train | py |
b75b2853af25ce11e9376f02f7e8dbb8221f19a5 | diff --git a/xclim/ensembles/_reduce.py b/xclim/ensembles/_reduce.py
index <HASH>..<HASH> 100644
--- a/xclim/ensembles/_reduce.py
+++ b/xclim/ensembles/_reduce.py
@@ -190,6 +190,7 @@ def kmeans_reduce_ensemble(
--------
>>> import xclim
>>> from xclim.ensembles import create_ensemble, kmeans_reduce_ensemble
+ >>> from xclim.indices import hot_spell_frequency
Start with ensemble datasets for temperature:
@@ -205,7 +206,7 @@ def kmeans_reduce_ensemble(
Then, Hotspell frequency as second indicator:
- >>> hs = xclim.atmos.hot_spell_frequency(tasmax=ensTas.tas, window=2, thresh_tasmax='10 degC')
+ >>> hs = hot_spell_frequency(tasmax=ensTas.tas, window=2, thresh_tasmax='10 degC')
>>> his_hs = hs.sel(time=slice('1990','2019')).mean(dim='time')
>>> fut_hs = hs.sel(time=slice('2020','2050')).mean(dim='time')
>>> dhs = fut_hs - his_hs | Use indices to bypass metadata checks | Ouranosinc_xclim | train | py |
bd337f6c21bf25d66d4f51a9340789cf07eaeed7 | diff --git a/config/karma.conf-ci.js b/config/karma.conf-ci.js
index <HASH>..<HASH> 100644
--- a/config/karma.conf-ci.js
+++ b/config/karma.conf-ci.js
@@ -117,7 +117,7 @@ module.exports = function (config) {
* possible values: 'dots', 'progress'
* available reporters: https://npmjs.org/browse/keyword/karma-reporter
*/
- reporters: ['mocha', 'dots', 'coverage'],
+ reporters: ['mocha', 'coverage'],
// web server port
port: 9876, | Removed dots reporter (#<I>) | blackbaud_skyux2 | train | js |
23f890a3baf28bb0454348da143c835804f5faac | diff --git a/cmd/burrow/commands/deploy.go b/cmd/burrow/commands/deploy.go
index <HASH>..<HASH> 100644
--- a/cmd/burrow/commands/deploy.go
+++ b/cmd/burrow/commands/deploy.go
@@ -39,7 +39,7 @@ func Deploy(output Output) func(cmd *cli.Cmd) {
defaultGasOpt := cmd.StringOpt("g gas", "1111111111",
"default gas to use; can be overridden for any single job")
- jobsOpt := cmd.IntOpt("j jobs", 8,
+ jobsOpt := cmd.IntOpt("j jobs", 2,
"default number of concurrent solidity compilers to run")
addressOpt := cmd.StringOpt("a address", "", | Reduce the number of concurrent solc's we run
This is causing file systems errors on Mac. | hyperledger_burrow | train | go |
9b29ae7c103cc8607bb12a6937c5a993620e148f | diff --git a/configs/tslint-vscode.js b/configs/tslint-vscode.js
index <HASH>..<HASH> 100644
--- a/configs/tslint-vscode.js
+++ b/configs/tslint-vscode.js
@@ -9,7 +9,6 @@ const PICKING_RULE_NAMES = [
'import-groups',
'scoped-modules',
'import-path-base-url',
- 'explicit-return-type',
];
const {rules, rulesDirectory} = TSLint.Configuration.loadConfigurationFromPath( | Remove explicit-return-type from vscode lint config | makeflow_magicspace | train | js |
339eba52400dde8732b03e32624607a4517a556c | diff --git a/specs-go/version.go b/specs-go/version.go
index <HASH>..<HASH> 100644
--- a/specs-go/version.go
+++ b/specs-go/version.go
@@ -11,7 +11,7 @@ const (
VersionPatch = 0
// VersionDev indicates development branch. Releases will be empty string.
- VersionDev = "-rc3-dev"
+ VersionDev = "-rc4"
)
// Version is the specification version that the package types support. | version: release <I>-rc4 | opencontainers_runtime-spec | train | go |
5efd5995df9138f54b68be04432f5e6cd4af711f | diff --git a/qgispluginreleaser/entry_point.py b/qgispluginreleaser/entry_point.py
index <HASH>..<HASH> 100644
--- a/qgispluginreleaser/entry_point.py
+++ b/qgispluginreleaser/entry_point.py
@@ -5,11 +5,12 @@ import os
import shutil
import subprocess
import time
-
+import codecs
def prerequisites_ok():
if os.path.exists('metadata.txt'):
- if 'qgisMinimumVersion' in open('metadata.txt').read():
+ if 'qgisMinimumVersion' in codecs.open(
+ 'metadata.txt', 'r', 'utf-8').read():
return True
@@ -53,10 +54,10 @@ def fix_version(context):
"""
if not prerequisites_ok():
return
- lines = open('metadata.txt', 'rU').readlines()
+ lines = codecs.open('metadata.txt', 'rU', 'utf-8').readlines()
for index, line in enumerate(lines):
if line.startswith('version'):
new_line = 'version=%s\n' % context['new_version']
lines[index] = new_line
time.sleep(1)
- open('metadata.txt', 'w').writelines(lines)
+ codecs.open('metadata.txt', 'w', 'utf-8').writelines(lines) | use codec in conjunction with "utf8" to read and write files | nens_qgispluginreleaser | train | py |
2b1cf43ccb82a5117794356869817d5f15e69171 | diff --git a/app/code/community/Aoe/Scheduler/Model/Observer.php b/app/code/community/Aoe/Scheduler/Model/Observer.php
index <HASH>..<HASH> 100644
--- a/app/code/community/Aoe/Scheduler/Model/Observer.php
+++ b/app/code/community/Aoe/Scheduler/Model/Observer.php
@@ -259,13 +259,14 @@ class Aoe_Scheduler_Model_Observer extends Mage_Cron_Model_Observer {
$tmp[$schedule->getJobCode()][$schedule->getScheduledAt()] = array('key' => $key, 'schedule' => $schedule);
}
- foreach ($tmp as $schedules) {
+ foreach ($tmp as $jobCode => $schedules) {
ksort($schedules);
array_pop($schedules); // we remove the newest one
foreach ($schedules as $data) { /* @var $data array */
$this->_pendingSchedules->removeItemByKey($data['key']);
$schedule = $data['schedule']; /* @var $schedule Aoe_Scheduler_Model_Schedule */
$schedule
+ ->setMessages('Mulitple tasks with the same job code were piling up. Skipping execution of duplicates.')
->setStatus(Mage_Cron_Model_Schedule::STATUS_MISSED)
->save();
} | Added message to skipped tasks because of piles | AOEpeople_Aoe_Scheduler | train | php |
2f3aa22aeadc825af58438f1313cbe00abfd1bdb | diff --git a/fermipy/diffuse/gt_merge_srcmaps.py b/fermipy/diffuse/gt_merge_srcmaps.py
index <HASH>..<HASH> 100755
--- a/fermipy/diffuse/gt_merge_srcmaps.py
+++ b/fermipy/diffuse/gt_merge_srcmaps.py
@@ -102,7 +102,8 @@ class GtMergeSourceMaps(object):
for source_name in source_names:
try:
source = source_factory.releaseSource(source_name)
- like.addSource(source)
+ # EAC, add the source directly to the model
+ like.logLike.addSource(source)
srcs_to_merge.append(source_name)
except KeyError:
missing_sources.append(source_name) | Speed up gt_merge_srcmaps.py by adding source directly to c++ likelihood object | fermiPy_fermipy | train | py |
9c08d6cfb0c349aa9441201441a5a4692797ed99 | diff --git a/jgrassgears/src/main/java/eu/hydrologis/jgrass/jgrassgears/i18n/MessageHandler.java b/jgrassgears/src/main/java/eu/hydrologis/jgrass/jgrassgears/i18n/MessageHandler.java
index <HASH>..<HASH> 100644
--- a/jgrassgears/src/main/java/eu/hydrologis/jgrass/jgrassgears/i18n/MessageHandler.java
+++ b/jgrassgears/src/main/java/eu/hydrologis/jgrass/jgrassgears/i18n/MessageHandler.java
@@ -37,7 +37,7 @@ public class MessageHandler {
private MessageHandler() {
}
- public static MessageHandler getInstance() {
+ public synchronized static MessageHandler getInstance() {
if (messageHandler == null) {
messageHandler = new MessageHandler();
messageHandler.initResourceBundle(); | fixes for: Bug: Incorrect lazy initialization and update of static field | TheHortonMachine_hortonmachine | train | java |
4e336970cc6b2bc19f39b139769535d60a645b25 | diff --git a/environs/jujutest/livetests.go b/environs/jujutest/livetests.go
index <HASH>..<HASH> 100644
--- a/environs/jujutest/livetests.go
+++ b/environs/jujutest/livetests.go
@@ -224,7 +224,10 @@ func (t *LiveTests) checkUpgradeMachineAgent(c *C, st *state.State, m *state.Mac
}
tools, err := m.AgentTools()
c.Assert(err, IsNil)
- c.Assert(tools, DeepEquals, upgradeTools)
+ // N.B. We can't test that the URL is the same because there's
+ // no guarantee that it is, even though it might be referring to
+ // the same thing.
+ c.Assert(tools.Binary, DeepEquals, upgradeTools.Binary)
c.Logf("upgrade successful!")
} | environs/jujutest: fix tools comparison | juju_juju | train | go |
fa1c569cd5047b7993bd2593d4fb943e9a3f4b1b | diff --git a/test/integration/contract-aci.js b/test/integration/contract-aci.js
index <HASH>..<HASH> 100644
--- a/test/integration/contract-aci.js
+++ b/test/integration/contract-aci.js
@@ -395,6 +395,17 @@ describe('Contract instance', function () {
}
}])
})
+
+ it('calls a contract that emits events with no defined events', async () => {
+ const contract = await sdk.getContractInstance({
+ source:
+ 'contract FooContract =\n' +
+ ' entrypoint emitEvents(f: bool) = ()',
+ contractAddress: remoteContract.deployInfo.address
+ })
+ const result = await contract.methods.emitEvents(false, { omitUnknown: true })
+ expect(result.decodedEvents).to.be.eql([])
+ })
})
describe('Arguments Validation and Casting', function () { | fix(contract events): don't throw error if events emitted by remote | aeternity_aepp-sdk-js | train | js |
48db77d3ca6be544e6710143a045d046de1d8628 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,3 @@
-# coding=utf-8
"""
GitHub-Flask
------------
@@ -29,7 +28,7 @@ setup(
version=get_version(),
url='http://github.com/cenkalti/github-flask',
license='MIT',
- author=u'Cenk Altı',
+ author='Cenk Alti',
author_email='[email protected]',
description='Adds support for authorizing users with GitHub to Flask.',
long_description=__doc__, | workaround for python3 install bug | cenkalti_github-flask | train | py |
7e4094af9ab551f3fe708e578a1aa669422d8f6a | diff --git a/lib/frameit/offsets.rb b/lib/frameit/offsets.rb
index <HASH>..<HASH> 100644
--- a/lib/frameit/offsets.rb
+++ b/lib/frameit/offsets.rb
@@ -9,13 +9,13 @@ module Frameit
case screenshot.screen_size
when size::IOS_55
return {
- 'offset' => '+42+147',
- 'width' => 539
+ 'offset' => '+41+146',
+ 'width' => 541
}
when size::IOS_47
return {
- 'offset' => '+41+154',
- 'width' => 530
+ 'offset' => '+40+153',
+ 'width' => 532
}
when size::IOS_40
return {
@@ -64,4 +64,4 @@ module Frameit
end
end
end
-end
\ No newline at end of file
+end | Fix little offset on iPhone 6 and iPhone 6 Plus | fastlane_fastlane | train | rb |
11bcb944e9be13084d8d2ab70f48c019e1bc0eb3 | diff --git a/src/core/vr-scene.js b/src/core/vr-scene.js
index <HASH>..<HASH> 100644
--- a/src/core/vr-scene.js
+++ b/src/core/vr-scene.js
@@ -127,7 +127,7 @@ var VRScene = module.exports = registerElement(
},
elementLoaded: {
- value: function (node) {
+ value: function () {
this.pendingElements--;
// If we still need to wait for more elements.
if (this.pendingElements > 0) { return; }
@@ -252,6 +252,8 @@ var VRScene = module.exports = registerElement(
// We create a default camera
defaultCamera = document.createElement('vr-object');
defaultCamera.setAttribute('camera', {fov: 45});
+ defaultCamera.setAttribute('position', {x: 0, y: 0, z: 20});
+ this.pendingElements++;
defaultCamera.addEventListener('loaded', this.elementLoaded.bind(this));
this.appendChild(defaultCamera);
} | It accounts for the default camera in the pending elements. This ensures that rendering triggers only when the camera is ready | aframevr_aframe | train | js |
0604ee160351cb0901d7bc27d5a379d345fb2e35 | diff --git a/commerce-service/src/main/java/com/liferay/commerce/model/impl/CommerceOrderImpl.java b/commerce-service/src/main/java/com/liferay/commerce/model/impl/CommerceOrderImpl.java
index <HASH>..<HASH> 100644
--- a/commerce-service/src/main/java/com/liferay/commerce/model/impl/CommerceOrderImpl.java
+++ b/commerce-service/src/main/java/com/liferay/commerce/model/impl/CommerceOrderImpl.java
@@ -269,6 +269,17 @@ public class CommerceOrderImpl extends CommerceOrderBaseImpl {
}
@Override
+ public boolean isSubscription() {
+ if (getOrderStatus() ==
+ CommerceOrderConstants.ORDER_STATUS_SUBSCRIPTION) {
+
+ return true;
+ }
+
+ return false;
+ }
+
+ @Override
public void setShippingDiscounts(
CommerceDiscountValue commerceDiscountValue) { | COMMERCE-<I> Added new method to commerce order implementation | liferay_com-liferay-commerce | train | java |
692c5427d0541f5d6daab0f42508974a85097638 | diff --git a/src/frontend/org/voltdb/planner/StatementPartitioning.java b/src/frontend/org/voltdb/planner/StatementPartitioning.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/planner/StatementPartitioning.java
+++ b/src/frontend/org/voltdb/planner/StatementPartitioning.java
@@ -535,7 +535,7 @@ public class StatementPartitioning implements Cloneable{
m_countOfIndependentlyPartitionedTables = m_countOfPartitionedTables;
}
-
+
public void resetAnalysisState() {
m_countOfIndependentlyPartitionedTables = -1;
m_countOfPartitionedTables = -1;
diff --git a/src/frontend/org/voltdb/plannodes/DeletePlanNode.java b/src/frontend/org/voltdb/plannodes/DeletePlanNode.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/plannodes/DeletePlanNode.java
+++ b/src/frontend/org/voltdb/plannodes/DeletePlanNode.java
@@ -67,7 +67,7 @@ public class DeletePlanNode extends AbstractOperationPlanNode {
}
return "DELETE " + m_targetTableName;
}
-
+
@Override
public boolean isOrderDeterministic() {
assert(m_children != null); | Fix trailing whitespace left from earlier changes | VoltDB_voltdb | train | java,java |
70108a9c0c7c8c7e891359d9c89b932f9b6f90f5 | diff --git a/src/mako/file/FileSystem.php b/src/mako/file/FileSystem.php
index <HASH>..<HASH> 100644
--- a/src/mako/file/FileSystem.php
+++ b/src/mako/file/FileSystem.php
@@ -131,25 +131,30 @@ class FileSystem
}
/**
- * Returns TRUE if a directory is empty and FALSE if not.
+ * Returns TRUE if a file or directory is empty and FALSE if not.
*
* @access public
* @param string $path Path to directory
* @return boolean
*/
- public function isDirectoryEmpty($path)
+ public function isEmpty($path)
{
- $files = scandir($path);
-
- foreach($files as $file)
+ if(is_dir($path))
{
- if($file !== '.' && $file !== '..')
+ $files = scandir($path);
+
+ foreach($files as $file)
{
- return false;
+ if($file !== '.' && $file !== '..')
+ {
+ return false;
+ }
}
+
+ return true;
}
- return true;
+ return filesize($path) === 0;
}
/** | Renamed isDirectoryEmpty to isEmpty
Will now work on both files and directories | mako-framework_framework | train | php |
26e17f5806212e714918b117abdd50957f504be6 | diff --git a/modules/relative-urls.php b/modules/relative-urls.php
index <HASH>..<HASH> 100755
--- a/modules/relative-urls.php
+++ b/modules/relative-urls.php
@@ -92,7 +92,7 @@ if ( ! class_exists(__NAMESPACE__ . '\\RelativeUrls') ) {
public static function content_return_absolute_url_filter( $content ) {
// This might be issue in really big sites so save results to transient using hash
- $letter_count = count($content);
+ $letter_count = strlen($content);
$hash = crc32($content);
$transient_key = 'seravo_feed_' . $letter_count . '_' . $hash; | Use strlen() instead of count() when manipulating strings (Closes: #<I>)
The function count() should only be used with arrays, objects and alike. | Seravo_seravo-plugin | train | php |
07a773ef07a3c0fb7296fb1f2cbee74309bc3de6 | diff --git a/falafel/mappers/chkconfig.py b/falafel/mappers/chkconfig.py
index <HASH>..<HASH> 100644
--- a/falafel/mappers/chkconfig.py
+++ b/falafel/mappers/chkconfig.py
@@ -4,6 +4,7 @@ chkconfig - command
"""
from collections import namedtuple
from .. import Mapper, mapper
+import re
@mapper('chkconfig')
@@ -54,16 +55,22 @@ class ChkConfig(Mapper):
Args:
content (context.content): Mapper context content
"""
- valid_states = {':on', ':off'}
+
+ on_state = re.compile(r':\s*on(?:\s+|$)')
+ off_state = re.compile(r':\s*off(?:\s+|$)')
+
+ valid_states = [on_state, off_state]
for line in content:
- if any(state in line for state in valid_states):
- service = line.split()[0].strip()
- enabled = ':on' in line # Store boolean value
+ if any(state.search(line) for state in valid_states):
+ service = line.split()[0].strip(' \t:')
+ enabled = on_state.search(line) is not None
self.services[service] = enabled
self.parsed_lines[service] = line
states = []
for level in line.split()[1:]:
+ if len(level.split(':')) < 2:
+ continue
num, state = level.split(':')
states.append(self.LevelState(num.strip(), state.strip()))
self.level_states[service] = states | Add support for xinetd-based services | RedHatInsights_insights-core | train | py |
ee43428625b42ba3982cc4e4e0babe40c30d4de8 | diff --git a/tinytag/tinytag.py b/tinytag/tinytag.py
index <HASH>..<HASH> 100644
--- a/tinytag/tinytag.py
+++ b/tinytag/tinytag.py
@@ -449,6 +449,7 @@ class ID3(TinyTag):
'TPE2': 'albumartist', 'TCOM': 'composer',
'WXXX': 'extra.url',
'TXXX': 'extra.text',
+ 'TKEY': 'extra.initial_key',
}
IMAGE_FRAME_IDS = {'APIC', 'PIC'}
PARSABLE_FRAME_IDS = set(FRAME_ID_TO_FIELD.keys()).union(IMAGE_FRAME_IDS) | added support for TKEY id3 meta data (Initial Key) as part of the `extra` field #<I> | devsnd_tinytag | train | py |
72a9c526e5790f63d913447b80c3503e0a2d7bbd | diff --git a/pub/index.php b/pub/index.php
index <HASH>..<HASH> 100644
--- a/pub/index.php
+++ b/pub/index.php
@@ -9,5 +9,4 @@ require_once '../vendor/autoload.php';
$request = HttpRequest::fromGlobalState(file_get_contents('php://input'));
$website = new SampleWebFront($request);
-$website->registerFactory(new SampleFactory());
$website->run();
diff --git a/src/SampleWebFront.php b/src/SampleWebFront.php
index <HASH>..<HASH> 100644
--- a/src/SampleWebFront.php
+++ b/src/SampleWebFront.php
@@ -17,6 +17,7 @@ class SampleWebFront extends WebFront
protected function registerFactories(MasterFactory $masterFactory)
{
$masterFactory->register(new CommonFactory());
+ $masterFactory->register(new SampleFactory());
$masterFactory->register(new FrontendFactory($this->getRequest()));
} | Issue #<I>: Register SampleFactory in SampleWebFront | lizards-and-pumpkins_catalog | train | php,php |
708afd692f7c12a0a1564b688e0c83dd22709b09 | diff --git a/mtglib/__init__.py b/mtglib/__init__.py
index <HASH>..<HASH> 100644
--- a/mtglib/__init__.py
+++ b/mtglib/__init__.py
@@ -1,2 +1,2 @@
-__version__ = '1.3.3'
+__version__ = '1.4.0'
__author__ = 'Cameron Higby-Naquin' | Increment minor version for release. | chigby_mtg | train | py |
eefe464659f93f944d51e70a167d87b9051ac4e5 | diff --git a/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/NavigationStackView.java b/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/NavigationStackView.java
index <HASH>..<HASH> 100644
--- a/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/NavigationStackView.java
+++ b/NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/NavigationStackView.java
@@ -163,6 +163,16 @@ public class NavigationStackView extends ViewGroup {
}
currentActivity.overridePendingTransition(enter, exit);
}
+ if (crumb == currentCrumb) {
+ Intent intent = new Intent(getContext(), SceneActivity.getActivity(crumb));
+ intent.putExtra(SceneActivity.CRUMB, crumb);
+ sceneItems.get(crumb).intent = intent;
+ int enter = this.getAnimationResourceId(this.enterAnim, this.activityOpenEnterAnimationId);
+ int exit = this.getAnimationResourceId(this.exitAnim, this.activityOpenExitAnimationId);
+ currentActivity.finish();
+ currentActivity.startActivity(intent);
+ currentActivity.overridePendingTransition(enter, exit);
+ }
oldCrumb = sceneItems.size() - 1;
} | Finished and started if crumbs equal
It only comes in here when replacing the current view and changing State | grahammendick_navigation | train | java |
2369d2220de6a9bbd8dc3529b9936130c0f682eb | diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -74,9 +74,9 @@ Server.prototype.server = function () {
if (logging) app.use(morgan(logging));
app.use(compression());
app.use(errorhandler());
- app.get('/', this.handleRender.bind(this));
app.get('/build/*', this.handleBuild.bind(this));
app.use('/build', serveStatic(this.root()));
+ app.get('*', this.handleRender.bind(this));
return app;
};
diff --git a/test/server.js b/test/server.js
index <HASH>..<HASH> 100644
--- a/test/server.js
+++ b/test/server.js
@@ -27,6 +27,13 @@ describe('Web Server', function () {
.end(done);
});
+ it('should render the root even for alternate paths', function (done) {
+ request(app)
+ .get('/hello/world')
+ .expect(200, read(fixture('simple/out.html'), 'utf8'))
+ .end(done);
+ });
+
it('should render the expected css', function (done) {
request(app)
.get('/build/index.css') | allowing any path to serve the root (for spa-like apps) | dominicbarnes_duo-serve | train | js,js |
9510df78456c341652c2abc2ac7158c405d84b89 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -35,7 +35,7 @@ module.exports = function(opts) {
width: res.info.width,
height: res.info.height,
data: [
- 'url(data:image/svg+xml;utf8,',
+ 'url(data:image/svg+xml;charset=utf8,',
encodeURIComponent(res.data),
')'
].join(''), | Data charset format fix for IE
Adding charset= before utf8 makes the SVG data URIs work in Internet Explorer | sunify_gulp-baseimg | train | js |
c54e3ab2bf7cde2433b39e28be85b7800d2883ed | diff --git a/demos/chat/chat.js b/demos/chat/chat.js
index <HASH>..<HASH> 100644
--- a/demos/chat/chat.js
+++ b/demos/chat/chat.js
@@ -118,8 +118,9 @@ irc.onerror = function(command) {
// 433 ERR_NICKNAMEINUSE
nickname += '_'
irc.nick(nickname)
- self.onUsernameTaken();
+ irc.join(CHANNEL)
}
+}
irc.onresponse = function(command) { | fixed parsing error in chat demo; actually join channel when nickname was taken | gameclosure_js.io | train | js |
402e336fe37ec36d1c900c67c18d632adf734ef5 | diff --git a/go/teams/delete_test.go b/go/teams/delete_test.go
index <HASH>..<HASH> 100644
--- a/go/teams/delete_test.go
+++ b/go/teams/delete_test.go
@@ -23,7 +23,7 @@ func TestDeleteRoot(t *testing.T) {
_, err := GetTeamByNameForTest(context.Background(), tc.G, teamname, false, false)
require.Error(t, err, "no error getting deleted team")
- require.IsType(t, TeamDoesNotExistError{}, err)
+ require.True(t, IsTeamReadError(err))
}
func TestDeleteSubteamAdmin(t *testing.T) { | Fix teams test (#<I>) | keybase_client | train | go |
7af5e4af7b72716875ddf8c2ccaadb42bc71312c | diff --git a/salt/output/highstate.py b/salt/output/highstate.py
index <HASH>..<HASH> 100644
--- a/salt/output/highstate.py
+++ b/salt/output/highstate.py
@@ -146,8 +146,8 @@ def output(data):
# Append result counts to end of output
colorfmt = '{0}{1}{2[ENDC]}'
rlabel = {True: 'Succeeded', False: 'Failed', None: 'Not Run'}
- count_max_len = max([len(str(x)) for x in rcounts.values()])
- label_max_len = max([len(x) for x in rlabel.values()])
+ count_max_len = max([len(str(x)) for x in rcounts.values()] or [0])
+ label_max_len = max([len(x) for x in rlabel.values()] or [0])
line_max_len = label_max_len + count_max_len + 2 # +2 for ': '
hstrs.append(
colorfmt.format( | Don't fail if there's nothing to feed to `max()`. | saltstack_salt | train | py |
38eec08bf2fa50061edef4df9af8be65e496538b | diff --git a/containers.go b/containers.go
index <HASH>..<HASH> 100644
--- a/containers.go
+++ b/containers.go
@@ -15,7 +15,14 @@ func getContainers(config string) Containers {
if len(config) > 0 {
return unmarshal([]byte(config))
}
- return readCranefile("Cranefile")
+ if _, err := os.Stat("crane.json"); err == nil {
+ return readCranefile("crane.json")
+ }
+ if _, err := os.Stat("Cranefile"); err == nil {
+ printNotice("Using a Cranefile is deprecated. Please use crane.json instead.\n")
+ return readCranefile("Cranefile")
+ }
+ panic("No crane.json found!")
}
func readCranefile(filename string) Containers { | Use crane.json going forward and deprecate Cranefile usage
Closes #4. | michaelsauter_crane | train | go |
f3ab34a5a769bac4561a96f77181a2cca803de37 | diff --git a/lib/bibformat_migration_kit.py b/lib/bibformat_migration_kit.py
index <HASH>..<HASH> 100644
--- a/lib/bibformat_migration_kit.py
+++ b/lib/bibformat_migration_kit.py
@@ -130,9 +130,9 @@ def migrate_behaviours():
# The conditions on which we will iterate will maybe need to be split
# in many conditions, as the new format does not support conditions with
# multiple arguments
+ add_default_case = True
for cond in behaviour_conditions:
previous_tag = ""
- add_default_case = True
evaluation_order = cond[0]
e_conditions = extract_cond(cond[1]) | Fixed add_default_case variable initialization location, thanks to
Ferran Jorba. | inveniosoftware_invenio-formatter | train | py |
5ecbcddee08dada40e56a4edc503e804c397793d | diff --git a/src/main/java/com/googlecode/rocoto/simpleconfig/DefaultPropertiesReader.java b/src/main/java/com/googlecode/rocoto/simpleconfig/DefaultPropertiesReader.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/googlecode/rocoto/simpleconfig/DefaultPropertiesReader.java
+++ b/src/main/java/com/googlecode/rocoto/simpleconfig/DefaultPropertiesReader.java
@@ -30,6 +30,7 @@ import java.util.Properties;
*
* @author Simone Tripodi
* @version $Id$
+ * @since 3.2
*/
final class DefaultPropertiesReader implements PropertiesReader { | added missing @since javadoc tag | 99soft_rocoto | train | java |
9ed2740c6df4e9e0e1a651401844242f7cd7bcaf | diff --git a/apis/connection.js b/apis/connection.js
index <HASH>..<HASH> 100644
--- a/apis/connection.js
+++ b/apis/connection.js
@@ -15,6 +15,9 @@
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
+var MIN_MODBUSRTU_FRAMESZ = 5;
+var MIN_MODBUSASCII_FRAMESZ = 11;
+
/**
* Adds connection shorthand API to a Modbus objext
*
@@ -65,6 +68,8 @@ var addConnctionAPI = function(Modbus) {
}
// disable auto open, as we handle the open
options.autoOpen = false;
+ // set vmin to smallest modbus packet size
+ options.platformOptions = { vmin: MIN_MODBUSRTU_FRAMESZ, vtime: 0 };
// create the SerialPort
var SerialPort = require("serialport");
@@ -196,6 +201,8 @@ var addConnctionAPI = function(Modbus) {
next = options;
options = {};
}
+ // set vmin to smallest modbus packet size
+ options.platformOptions = { vmin: MIN_MODBUSASCII_FRAMESZ, vtime: 0 };
// create the ASCII SerialPort
var SerialPortAscii = require("../ports/asciiport"); | added vmin support for ascii and RTU | yaacov_node-modbus-serial | train | js |
2cc2ec51c49b610505cb5d1b6fe14520495a5589 | diff --git a/lib/listen/change.rb b/lib/listen/change.rb
index <HASH>..<HASH> 100644
--- a/lib/listen/change.rb
+++ b/lib/listen/change.rb
@@ -17,14 +17,23 @@ module Listen
unless cookie
# TODO: remove silencing here (it's done later)
- return if _silencer.silenced?(path, options[:type])
+ if _silencer.silenced?(path, options[:type])
+ _log :debug, "(silenced): #{path.inspect}"
+ return
+ end
end
+ _log :debug, "got change: #{[path, options].inspect}"
+
if change
_notify_listener(change, path, cookie ? { cookie: cookie } : {})
else
send("_#{options[:type].downcase}_change", path, options)
end
+ rescue
+ _log :error, '................CHANGE CRASHED.................'
+ STDERR.puts ".. #{$!.inspect}:#{[email protected]("\n")}"
+ raise
end
private
@@ -47,5 +56,9 @@ module Listen
def _silencer
listener.registry[:silencer]
end
+
+ def _log(type, message)
+ Celluloid.logger.send(type, message)
+ end
end
end | add extra debugging (Change) | guard_listen | train | rb |
487696bec0102297a7599510cf4214abaa96a404 | diff --git a/lib/camel_patrol/middleware.rb b/lib/camel_patrol/middleware.rb
index <HASH>..<HASH> 100644
--- a/lib/camel_patrol/middleware.rb
+++ b/lib/camel_patrol/middleware.rb
@@ -17,14 +17,17 @@ module CamelPatrol
def underscore_params(env)
if ::Rails::VERSION::MAJOR < 5
env["action_dispatch.request.request_parameters"].deep_transform_keys!(&:underscore)
- else
- request_body = JSON.parse(env["rack.input"].read)
- request_body.deep_transform_keys!(&:underscore)
- req = StringIO.new(request_body.to_json)
+ return
+ end
- env["rack.input"] = req
- env["CONTENT_LENGTH"] = req.length
+ if !(request_body = safe_json_parse(env["rack.input"].read))
+ return
end
+
+ request_body.deep_transform_keys!(&:underscore)
+ req = StringIO.new(request_body.to_json)
+ env["rack.input"] = req
+ env["CONTENT_LENGTH"] = req.length
end
def camelize_response(response) | Abort translation on parse errors
For requests, we should abort translating key format if a parsing error
occurs due to invalid json. Let the request continue untouched, similar
to the scenario when content-type header is missing
It shouldn't be this middleware's responcibility to deal with invalid
json. The app server can handle these scenarios on a case by case basis
<URL> | coverhound_camel_patrol | train | rb |
ac0d6e32515e492ea4e318d59173f170a7de5c3f | diff --git a/Factory/MailMotorFactory.php b/Factory/MailMotorFactory.php
index <HASH>..<HASH> 100644
--- a/Factory/MailMotorFactory.php
+++ b/Factory/MailMotorFactory.php
@@ -15,12 +15,12 @@ class MailMotorFactory
/** @var Container */
protected $container;
- /** @var string */
+ /** @var string|null */
protected $mailEngine;
public function __construct(
Container $container,
- string $mailEngine
+ ?string $mailEngine
) {
$this->container = $container;
$this->setMailEngine($mailEngine);
@@ -31,7 +31,7 @@ class MailMotorFactory
return $this->container->get('mailmotor.' . $this->mailEngine . '.subscriber.gateway');
}
- protected function setMailEngine(string $mailEngine): void
+ protected function setMailEngine(?string $mailEngine): void
{
if ($mailEngine == null) {
$mailEngine = 'not_implemented'; | Fix error when the mail engine is not set | mailmotor_mailmotor-bundle | train | php |
a8f6984f42ea530db3a72f71376a6a43452cfb58 | diff --git a/lib/Doctrine/DBAL/Statement.php b/lib/Doctrine/DBAL/Statement.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/DBAL/Statement.php
+++ b/lib/Doctrine/DBAL/Statement.php
@@ -129,7 +129,7 @@ class Statement implements \IteratorAggregate, DriverStatement
*/
public function execute($params = null)
{
- if(is_array($params)) {
+ if (is_array($params)) {
$this->params = $params;
} | fixed missing space between if and open parenthesis | doctrine_dbal | train | php |
686ca7c994b59f02232a5ebda638ffe2a062c44b | diff --git a/src/extract_slides.js b/src/extract_slides.js
index <HASH>..<HASH> 100644
--- a/src/extract_slides.js
+++ b/src/extract_slides.js
@@ -266,13 +266,13 @@ inlineTokenRules['paragraph_close'] = function(token, env) {
inlineTokenRules['fence'] = function(token, env) {
- startStyle({fontFamily: 'Courier New, monospace'}, env);
- if(token.info) {
- const htmlTokens = low.highlight(token.info, token.content);
+ startStyle({fontFamily: 'Courier New'}, env);
+ const language = token.info ? token.info.trim() : undefined;
+ if(language) {
+ const htmlTokens = low.highlight(language, token.content);
for(let token of htmlTokens.value) {
processHtmlToken(token, env);
}
-
} else {
// For code blocks, replace line feeds with vertical tabs to keep
// the block as a single paragraph. This avoid the extra vertical | Fix font family for code blocks, trim language to avoid invalid language errors when trailing whitespace present | gsuitedevs_md2googleslides | train | js |
d113111886efeffa6e58de41a590944f09b17896 | diff --git a/salt/modules/boto_cloudtrail.py b/salt/modules/boto_cloudtrail.py
index <HASH>..<HASH> 100644
--- a/salt/modules/boto_cloudtrail.py
+++ b/salt/modules/boto_cloudtrail.py
@@ -488,7 +488,8 @@ def list_tags(Name,
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
rid = _get_trail_arn(Name,
- region=None, key=None, keyid=None, profile=None)
+ region=region, key=key, keyid=keyid,
+ profile=profile)
ret = conn.list_tags(ResourceIdList=[rid])
tlist = ret.get('ResourceTagList', []).pop().get('TagsList')
tagdict = {} | Fix cut-and-paste error or whatever it was | saltstack_salt | train | py |
d84aefbd4324e29130539fa882b5ef55f5add148 | diff --git a/pkg/maps/ctmap/ctmap.go b/pkg/maps/ctmap/ctmap.go
index <HASH>..<HASH> 100644
--- a/pkg/maps/ctmap/ctmap.go
+++ b/pkg/maps/ctmap/ctmap.go
@@ -393,7 +393,7 @@ func DeleteIfUpgradeNeeded(e CtEndpoint) {
continue
}
if oldMap.CheckAndUpgrade(&newMap.Map.MapInfo) {
- scopedLog.Info("CT Map upgraded, expect brief disruption of ongoing connections")
+ scopedLog.Warning("CT Map upgraded, expect brief disruption of ongoing connections")
}
oldMap.Close()
} | ctmap: Warn if CT map was upgraded as it results in disruption
Related: #<I> | cilium_cilium | train | go |
ade9e90a349dc03af43d3c84593cf2d43aebab81 | diff --git a/test/db/mysql/simple_test.rb b/test/db/mysql/simple_test.rb
index <HASH>..<HASH> 100644
--- a/test/db/mysql/simple_test.rb
+++ b/test/db/mysql/simple_test.rb
@@ -136,15 +136,15 @@ class MySQLSimpleTest < Test::Unit::TestCase
t.integer :value
end
connection.create_table :bs do |t|
- t.references :b, index: true
+ t.references :a, :index => true, :foreign_key => false
end
- assert_nothing_raised do
- connection.add_foreign_key :bs, :as
- end
+ #assert_nothing_raised do
+ connection.add_foreign_key :bs, :as
+ #end
- connection.drop_table :as rescue nil
- connection.drop_table :bs rescue nil
+ connection.drop_table :bs
+ connection.drop_table :as
end if ar_version("4.2")
def test_find_in_other_schema_with_include | adjust foreign key test introduced in #<I> (was failing with MRI under AR <I>) | jruby_activerecord-jdbc-adapter | train | rb |
59aa4ee8a0f56325f3df3fd95ce397ed2ee12f4d | diff --git a/src/littleparsers.js b/src/littleparsers.js
index <HASH>..<HASH> 100644
--- a/src/littleparsers.js
+++ b/src/littleparsers.js
@@ -173,8 +173,9 @@
LittleParsers.prototype.variable = function () {
var _this = this;
return this.cacheDo("variable", function () {
- var v = _this.regex(/^[a-zA-Z_$][a-zA-Z0-9_$]*/);
+ var v = _this.regex(/^[a-zA-Z_$@][a-zA-Z0-9_$]*/);
if (v === 'self') return 'this';
+ if (v[0] === '@') return 'this.'+v.substring(1); //@foo -> this.foo
return v;
});
}; | "@foo" compiles into "this.foo" | ympbyc_LittleSmallscript | train | js |
2cb09324dc8d6a62590c6999accfff2efb059e55 | diff --git a/lib/MwbExporter/Formatter/Doctrine2/Annotation/Model/Table.php b/lib/MwbExporter/Formatter/Doctrine2/Annotation/Model/Table.php
index <HASH>..<HASH> 100644
--- a/lib/MwbExporter/Formatter/Doctrine2/Annotation/Model/Table.php
+++ b/lib/MwbExporter/Formatter/Doctrine2/Annotation/Model/Table.php
@@ -205,7 +205,7 @@ class Table extends BaseTable
public function writeTable(WriterInterface $writer)
{
- if (!$this->isExternal() && !$this->isManyToMany()) {
+ if (!$this->isExternal()) {
$namespace = $this->getEntityNamespace();
if ($repositoryNamespace = $this->getDocument()->getConfig()->get(Formatter::CFG_REPOSITORY_NAMESPACE)) {
$repositoryNamespace .= '\\';
@@ -251,15 +251,9 @@ class Table extends BaseTable
;
return self::WRITE_OK;
- } else {
- switch (true) {
- case $this->isManyToMany():
- return self::WRITE_M2M;
-
- case $this->isExternal():
- return self::WRITE_EXTERNAL;
- }
}
+
+ return self::WRITE_EXTERNAL;
}
public function writeUsedClasses(WriterInterface $writer) | Doctrine Annotation: always generate M2M table entity. | mysql-workbench-schema-exporter_doctrine2-exporter | train | php |
f6f22b50c25ca0d3b8b31f3cb1daa02f9ec3f3a6 | diff --git a/etrago/appl.py b/etrago/appl.py
index <HASH>..<HASH> 100644
--- a/etrago/appl.py
+++ b/etrago/appl.py
@@ -154,6 +154,7 @@ def etrago(args):
minimize_loading : bool
False,
+ ...
k_mean_clustering : bool
False, | changed RTD see #<I> | openego_eTraGo | train | py |
Subsets and Splits