hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
d3deba51e3453160a62d2ce075877ef364df4667 | diff --git a/src/Mysql/MysqlDriver.php b/src/Mysql/MysqlDriver.php
index <HASH>..<HASH> 100644
--- a/src/Mysql/MysqlDriver.php
+++ b/src/Mysql/MysqlDriver.php
@@ -77,7 +77,7 @@ class MysqlDriver extends PdoDriver implements UTF8MB4SupportInterface
* @var string
* @since __DEPLOY_VERSION__
*/
- protected static $dbMinMariadb = '10.2';
+ protected static $dbMinMariadb = '10.0';
/**
* Constructor.
diff --git a/src/Mysqli/MysqliDriver.php b/src/Mysqli/MysqliDriver.php
index <HASH>..<HASH> 100644
--- a/src/Mysqli/MysqliDriver.php
+++ b/src/Mysqli/MysqliDriver.php
@@ -91,7 +91,7 @@ class MysqliDriver extends DatabaseDriver implements UTF8MB4SupportInterface
* @var string
* @since __DEPLOY_VERSION__
*/
- protected static $dbMinMariadb = '10.2';
+ protected static $dbMinMariadb = '10.0';
/**
* Constructor. | Lower min. db requirement for MariaDB to <I> | joomla-framework_database | train |
71157c5ed9880de4dc8dcf97e48dead284386d04 | diff --git a/contrib/externs/youtubeplayer.js b/contrib/externs/youtubeplayer.js
index <HASH>..<HASH> 100644
--- a/contrib/externs/youtubeplayer.js
+++ b/contrib/externs/youtubeplayer.js
@@ -159,6 +159,31 @@ YouTubePlayer.prototype.getPlayerState = function() {};
/**
+ * Returns the current playback rate setting of the player.
+ * @return {number} The current playback rate setting of the player.
+ */
+YouTubePlayer.prototype.getPlaybackRate = function() {};
+
+
+/**
+ * Sets the playback rate for the player based on the rate suggested by the
+ * user. The actually applied rate is the closest supported rate that lies
+ * between 1.0 and the suggested rate (inclusive). 1.0 will always be one
+ * of the supported playback rates.
+ * @param {number} suggestedRate The playback rate suggested by the user.
+ */
+YouTubePlayer.prototype.setPlaybackRate = function(suggestedRate) {};
+
+
+/**
+ * Gets an array of playback rates supported by the video player, sorted in
+ * ascending order. This array is guaranteed to have the entry 1.0.
+ * @return {Array.<number>} Playback rates supported by the player.
+ */
+YouTubePlayer.prototype.getAvailablePlaybackRates = function() {};
+
+
+/**
* @return {string} The current quality the player has loaded or is playing.
*/
YouTubePlayer.prototype.getPlaybackQuality = function() {}; | playbackRate api modifications
- cueVideoBy/loadVideoBy reset playback rate to 1
- iframe api user gets callback when playback rate changes
- playback rate set when in cued mode carries over when playVideo is called
R=schechter
DELTA=<I> (<I> added, 4 deleted, 2 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL> | google_closure-compiler | train |
193f7d4787045bdf7f3a2c84144a6f4d11c04c1c | diff --git a/lib/neo4j/node.rb b/lib/neo4j/node.rb
index <HASH>..<HASH> 100644
--- a/lib/neo4j/node.rb
+++ b/lib/neo4j/node.rb
@@ -1,4 +1,39 @@
module Neo4j
+
+
+
+ module Property
+
+# Returns true if this property container has a property accessible through the given key, false otherwise.
+ def property?(key)
+ has_property?(key.to_s)
+ end
+
+ # Returns the given property if it exist or nil if it does not exist.
+ def [](key)
+ return unless property?(key)
+ get_property(key.to_s)
+ end
+
+ # Sets the given property to given value.
+ # Will generate an event if the property does not start with '_' (which could be an internal property, like _classname)
+ #
+ def []=(key, value)
+ k = key.to_s
+ if value.nil?
+ delete_property(k)
+ else
+# value = java.lang.Double.new(value) if value.is_a? Float
+ setProperty(k, value)
+ end
+ end
+ end
+
+ org.neo4j.kernel.impl.core.NodeProxy.class_eval do
+ include Neo4j::Property
+ end
+
+
class Node
def self.new(*args)
# creates a new node using the default db instance when given no args
@@ -26,7 +61,7 @@ module Neo4j
end
def self.exist?(node_or_node_id, instance = Neo4j.instance)
- id = node_or_node_id.respond_to?(:id)? node_or_node_id.id : node_or_node_id
+ id = node_or_node_id.respond_to?(:id) ? node_or_node_id.id : node_or_node_id
self.load(id, instance) != nil
end
end
diff --git a/spec/behaviour/node_spec.rb b/spec/behaviour/node_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/behaviour/node_spec.rb
+++ b/spec/behaviour/node_spec.rb
@@ -2,21 +2,68 @@ $LOAD_PATH.unshift File.join(File.dirname(__FILE__))
require 'spec_helper'
describe Neo4j::Node do
- before(:all) { FileUtils.rm_rf Neo4j.config[:storage_path]; FileUtils.mkdir_p(Neo4j.config[:storage_path]) }
+ before(:all) { FileUtils.rm_rf Neo4j.config[:storage_path]; FileUtils.mkdir_p(Neo4j.config[:storage_path]) }
after(:all) { Neo4j.shutdown }
- it "created node should exist in db after transaction finish" do
- Neo4j::Transaction.new
- new_node = Neo4j::Node.new
- Neo4j::Transaction.finish
- Neo4j::Node.should exist(new_node)
- end
+ describe "Create" do
+ it "created node should exist in db after transaction finish" do
+ Neo4j::Transaction.new
+ new_node = Neo4j::Node.new
+ Neo4j::Transaction.finish
+ Neo4j::Node.should exist(new_node)
+ end
- it "created node should exist in db before transaction finish" do
- Neo4j::Transaction.new
- new_node = Neo4j::Node.new
- Neo4j::Node.should exist(new_node)
- Neo4j::Transaction.finish
+ it "created node should exist in db before transaction finish" do
+ Neo4j::Transaction.new
+ new_node = Neo4j::Node.new
+ Neo4j::Node.should exist(new_node)
+ Neo4j::Transaction.finish
+ end
end
+ describe "Properties" do
+ it "set and get String properties with the [] operator" do
+ Neo4j::Transaction.new
+ new_node = Neo4j::Node.new
+ new_node[:key] = 'myvalue'
+ new_node[:key].should == 'myvalue'
+ Neo4j::Transaction.finish
+ end
+
+ it "set and get Fixnum properties with the [] operator" do
+ Neo4j::Transaction.new
+ new_node = Neo4j::Node.new
+ new_node[:key] = 42
+ new_node[:key].should == 42
+ Neo4j::Transaction.finish
+ end
+
+
+ it "set and get Float properties with the [] operator" do
+ Neo4j::Transaction.new
+ new_node = Neo4j::Node.new
+ new_node[:key] = 3.1415
+ new_node[:key].should == 3.1415
+ Neo4j::Transaction.finish
+ end
+
+ it "set and get Boolean properties with the [] operator" do
+ Neo4j::Transaction.new
+ new_node = Neo4j::Node.new
+ new_node[:key] = true
+ new_node[:key].should == true
+ new_node[:key] = false
+ new_node[:key].should == false
+ Neo4j::Transaction.finish
+ end
+
+
+ it "set and get properties with the [] operator and String key" do
+ Neo4j::Transaction.new
+ new_node = Neo4j::Node.new
+ new_node["a"] = 'foo'
+ new_node["a"].should == 'foo'
+ Neo4j::Transaction.finish
+ end
+ end
end
\ No newline at end of file | Implemented [] and []= operator on Neo4j::Node | neo4jrb_neo4j | train |
30e5b573281078a8388ef3037a51bff61d1a8635 | diff --git a/bugzilla/_cli.py b/bugzilla/_cli.py
index <HASH>..<HASH> 100755
--- a/bugzilla/_cli.py
+++ b/bugzilla/_cli.py
@@ -272,8 +272,8 @@ def _parser_add_bz_fields(rootp, command):
" --field cf_my_field=VALUE")
# Used by unit tests, not for end user consumption
- p.add_argument('--test-return-result', action="store_true",
- help=argparse.SUPPRESS)
+ p.add_argument('--__test-return-result', action="store_true",
+ dest="test_return_result", help=argparse.SUPPRESS)
if not cmd_modify:
_parser_add_output_options(rootp)
diff --git a/tests/createbug.py b/tests/createbug.py
index <HASH>..<HASH> 100644
--- a/tests/createbug.py
+++ b/tests/createbug.py
@@ -29,7 +29,7 @@ class CreatebugTest(unittest.TestCase):
return self.assertEqual(*args, **kwargs)
def clicomm(self, argstr, out):
- comm = "bugzilla new --test-return-result " + argstr
+ comm = "bugzilla new --__test-return-result " + argstr
if out is None:
self.assertRaises(RuntimeError, tests.clicomm, comm, self.bz)
diff --git a/tests/modify.py b/tests/modify.py
index <HASH>..<HASH> 100644
--- a/tests/modify.py
+++ b/tests/modify.py
@@ -29,7 +29,7 @@ class ModifyTest(unittest.TestCase):
return self.assertEqual(*args, **kwargs)
def clicomm(self, argstr, out, wbout=None, tags_add=None, tags_rm=None):
- comm = "bugzilla modify --test-return-result 123456 224466 " + argstr
+ comm = "bugzilla modify --__test-return-result 123456 224466 " + argstr
# pylint: disable=unpacking-non-sequence
if out is None:
diff --git a/tests/query.py b/tests/query.py
index <HASH>..<HASH> 100644
--- a/tests/query.py
+++ b/tests/query.py
@@ -35,7 +35,7 @@ class BZ34Test(unittest.TestCase):
return self.assertEqual(*args, **kwargs)
def clicomm(self, argstr, out):
- comm = "bugzilla query --test-return-result " + argstr
+ comm = "bugzilla query --__test-return-result " + argstr
if out is None:
self.assertRaises(RuntimeError, tests.clicomm, comm, self.bz) | cli: s/--test-return-result/--__test-return-result/g
Makes it harder to potentially conflict | python-bugzilla_python-bugzilla | train |
73867b3c02a4af2c9e493817d4b2cb7159ed55d9 | diff --git a/_Check_Versions.py b/_Check_Versions.py
index <HASH>..<HASH> 100644
--- a/_Check_Versions.py
+++ b/_Check_Versions.py
@@ -2,47 +2,21 @@
## SECTION: Imports #
##==============================================================#
-import os.path as op
-import qprompt
from verace import VerChecker, VerInfo
##==============================================================#
-## SECTION: Class Definitions #
+## SECTION: Global Definitions #
##==============================================================#
-class QpromptChecker(VerChecker):
- """Check versions in the Qprompt project."""
- NAME = "Qprompt"
- def check_setup(self):
- path = basepath(r"lib\setup.py")
- with open(path) as f:
- for num,line in enumerate(f.readlines(), 1):
- if line.find("version =") > -1:
- return [VerInfo(path, num, line.split('"')[1].strip())]
- def check_main(self):
- path = basepath(r"lib\qprompt.py")
- with open(path) as f:
- for num,line in enumerate(f.readlines(), 1):
- if line.find("__version__ =") > -1:
- return [VerInfo(path, num, line.split('"')[1].strip())]
- def check_log(self):
- path = basepath(r"CHANGELOG.md")
- with open(path) as f:
- for num,line in enumerate(f.readlines(), 1):
- if line.find("qprompt-") > -1:
- return [VerInfo(path, num, line.split('-')[1].split(" ")[0].strip())]
-
-##==============================================================#
-## SECTION: Function Definitions #
-##==============================================================#
-
-#: Returns path as absolute from base.
-basepath = lambda x: op.join(op.abspath(op.dirname(op.realpath(__file__))), x)
+VERCHK = VerChecker("Verace", __file__)
+VERCHK.include(r"lib\setup.py", opts={'match':"version = ", 'delim':'"'})
+VERCHK.include(r"lib\qprompt.py", match="__version__ = ", delim='"')
+VERCHK.include(r"CHANGELOG.adoc", match="qprompt-", delim="-", delim2=" ")
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
- QpromptChecker().show()
- qprompt.pause()
+ VERCHK.run()
+ raw_input("Press ENTER to continue...") | Updated version check script to use new release of Verace. | jeffrimko_Qprompt | train |
426abe2bc2b894d792a5d4de5594117fd4f705ca | diff --git a/lib/fastlane_core/cert_checker.rb b/lib/fastlane_core/cert_checker.rb
index <HASH>..<HASH> 100644
--- a/lib/fastlane_core/cert_checker.rb
+++ b/lib/fastlane_core/cert_checker.rb
@@ -45,11 +45,12 @@ module FastlaneCore
end
def self.install_wwdr_certificate
- Dir.chdir('/tmp')
- url = 'https://developer.apple.com/certificationauthority/AppleWWDRCA.cer'
- filename = File.basename(url)
- `curl -O #{url} && security import #{filename} -k login.keychain`
- UI.user_error!("Could not install WWDR certificate") unless $?.success?
+ Dir.chdir('/tmp') do
+ url = 'https://developer.apple.com/certificationauthority/AppleWWDRCA.cer'
+ filename = File.basename(url)
+ `curl -O #{url} && security import #{filename} -k login.keychain`
+ UI.user_error!("Could not install WWDR certificate") unless $?.success?
+ end
end
def self.sha1_fingerprint(path) | Do not modify permanently the current dir when installing the wwdr certificate | fastlane_fastlane | train |
ce5217763848fa172f87ded595d77af872e0d125 | diff --git a/pytorch_pretrained_bert/__init__.py b/pytorch_pretrained_bert/__init__.py
index <HASH>..<HASH> 100644
--- a/pytorch_pretrained_bert/__init__.py
+++ b/pytorch_pretrained_bert/__init__.py
@@ -1,3 +1,4 @@
+__version__ = 0.4.0
from .tokenization import BertTokenizer, BasicTokenizer, WordpieceTokenizer
from .modeling import (BertConfig, BertModel, BertForPreTraining,
BertForMaskedLM, BertForNextSentencePrediction,
diff --git a/requirements.txt b/requirements.txt
index <HASH>..<HASH> 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,5 @@
-# This installs Pytorch for CUDA 8 only. If you are using a newer version,
-# please visit http://pytorch.org/ and install the relevant version.
-torch>=0.4.1,<0.5.0
+# PyTorch
+torch>=0.4.1
# progress bars in model download and training scripts
tqdm
# Accessing files from S3 directly.
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import find_packages, setup
setup(
name="pytorch_pretrained_bert",
- version="0.3.0",
+ version="0.4.0",
author="Thomas Wolf, Victor Sanh, Tim Rault, Google AI Language Team Authors",
author_email="[email protected]",
description="PyTorch version of Google AI BERT model with script to load Google pre-trained models", | added version in __init__.py | huggingface_pytorch-pretrained-BERT | train |
7124d00e1886760744c82b18f8b43e8a9b35e3d9 | diff --git a/lib/python/vdm/server/voltdbserver.py b/lib/python/vdm/server/voltdbserver.py
index <HASH>..<HASH> 100644
--- a/lib/python/vdm/server/voltdbserver.py
+++ b/lib/python/vdm/server/voltdbserver.py
@@ -264,7 +264,7 @@ class VoltDatabase:
verb = 'rejoin'
if verb == 'create':
- if pause == 'True':
+ if pause.lower() == 'true':
voltdb_cmd = ['nohup', os.path.join(voltdb_dir, 'voltdb'), verb, '--pause', '--force', '-d', filename, '-H', primary]
else:
voltdb_cmd = ['nohup', os.path.join(voltdb_dir, 'voltdb'), verb, '--force', '-d', filename, '-H', primary]
@@ -274,7 +274,7 @@ class VoltDatabase:
else:
voltdb_cmd = ['nohup', os.path.join(voltdb_dir, 'voltdb'), verb, '-d', filename, '-H', primary, '--host=' + server_ip]
elif verb == "recover":
- if pause == 'True':
+ if pause.lower() == 'true':
voltdb_cmd = ['nohup', os.path.join(voltdb_dir, 'voltdb'), verb, '--pause', '-d', filename, '-H', primary]
else:
voltdb_cmd = ['nohup', os.path.join(voltdb_dir, 'voltdb'), verb, '-d', filename, '-H', primary] | VDM-<I>: case changed to solve unsatisfied condition | VoltDB_voltdb | train |
4bc6a45273114c0cd76c87e7d22eb17a89cfd34b | diff --git a/src/main/resources/META-INF/resources/primefaces/clock/clock.js b/src/main/resources/META-INF/resources/primefaces/clock/clock.js
index <HASH>..<HASH> 100644
--- a/src/main/resources/META-INF/resources/primefaces/clock/clock.js
+++ b/src/main/resources/META-INF/resources/primefaces/clock/clock.js
@@ -335,7 +335,9 @@ PrimeFaces.widget.Clock = PrimeFaces.widget.BaseWidget.extend({
name: this.id + '_sync', value: true
}],
oncomplete: function(xhr, status, args) {
+ $this.stop();
$this.current = new Date(args.datetime);
+ $this.jq.text($this.cfg.dateFormat.format($this.current));
$this.start();
}
}; | Fix #<I> Clock synchronization issue. | primefaces_primefaces | train |
a6a8e63bc7af1a7499715d1d8f72aec1a4e52b25 | diff --git a/test/instrument/PolySynth.js b/test/instrument/PolySynth.js
index <HASH>..<HASH> 100644
--- a/test/instrument/PolySynth.js
+++ b/test/instrument/PolySynth.js
@@ -69,6 +69,16 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
polySynth.dispose();
});
+ it ("can pass in the volume and detune", function(){
+ var polySynth = new PolySynth({
+ "volume" : -12,
+ "detune" : 120,
+ });
+ expect(polySynth.volume.value).to.be.closeTo(-12, 0.1);
+ expect(polySynth.detune.value).to.be.closeTo(120, 1);
+ polySynth.dispose();
+ });
+
it ("can get/set attributes", function(){
var polySynth = new PolySynth();
polySynth.set({ | testing passing in values to PolySynth | Tonejs_Tone.js | train |
08e05d4a49c1ba1327e3e6821eba1f0c93361ab2 | diff --git a/activesupport/CHANGELOG.md b/activesupport/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/activesupport/CHANGELOG.md
+++ b/activesupport/CHANGELOG.md
@@ -1,3 +1,11 @@
+* Add `Time.rfc3339` parsing method
+
+ The `Time.xmlschema` and consequently its alias `iso8601` accepts timestamps
+ without a offset in contravention of the RFC 3339 standard. This method
+ enforces that constraint and raises an `ArgumentError` if it doesn't.
+
+ *Andrew White*
+
* Add `ActiveSupport::TimeZone.rfc3339` parsing method
Previously there was no way to get a RFC 3339 timestamp into a specific
diff --git a/activesupport/lib/active_support/core_ext/time/calculations.rb b/activesupport/lib/active_support/core_ext/time/calculations.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/core_ext/time/calculations.rb
+++ b/activesupport/lib/active_support/core_ext/time/calculations.rb
@@ -53,6 +53,29 @@ class Time
end
alias_method :at_without_coercion, :at
alias_method :at, :at_with_coercion
+
+ # Creates a +Time+ instance from an RFC 3339 string.
+ #
+ # Time.rfc3339('1999-12-31T14:00:00-10:00') # => 2000-01-01 00:00:00 -1000
+ #
+ # If the time or offset components are missing then an +ArgumentError+ will be raised.
+ #
+ # Time.rfc3339('1999-12-31') # => ArgumentError: invalid date
+ def rfc3339(str)
+ parts = Date._rfc3339(str)
+
+ raise ArgumentError, "invalid date" if parts.empty?
+
+ Time.new(
+ parts.fetch(:year),
+ parts.fetch(:mon),
+ parts.fetch(:mday),
+ parts.fetch(:hour),
+ parts.fetch(:min),
+ parts.fetch(:sec) + parts.fetch(:sec_fraction, 0),
+ parts.fetch(:offset)
+ )
+ end
end
# Returns the number of seconds since 00:00:00.
diff --git a/activesupport/test/core_ext/time_ext_test.rb b/activesupport/test/core_ext/time_ext_test.rb
index <HASH>..<HASH> 100644
--- a/activesupport/test/core_ext/time_ext_test.rb
+++ b/activesupport/test/core_ext/time_ext_test.rb
@@ -910,6 +910,37 @@ class TimeExtCalculationsTest < ActiveSupport::TestCase
def test_all_year
assert_equal Time.local(2011, 1, 1, 0, 0, 0)..Time.local(2011, 12, 31, 23, 59, 59, Rational(999999999, 1000)), Time.local(2011, 6, 7, 10, 10, 10).all_year
end
+
+ def test_rfc3339_parse
+ time = Time.rfc3339("1999-12-31T19:00:00.125-05:00")
+
+ assert_equal 1999, time.year
+ assert_equal 12, time.month
+ assert_equal 31, time.day
+ assert_equal 19, time.hour
+ assert_equal 0, time.min
+ assert_equal 0, time.sec
+ assert_equal 125000, time.usec
+ assert_equal(-18000, time.utc_offset)
+
+ exception = assert_raises(ArgumentError) do
+ Time.rfc3339("1999-12-31")
+ end
+
+ assert_equal "invalid date", exception.message
+
+ exception = assert_raises(ArgumentError) do
+ Time.rfc3339("1999-12-31T19:00:00")
+ end
+
+ assert_equal "invalid date", exception.message
+
+ exception = assert_raises(ArgumentError) do
+ Time.rfc3339("foobar")
+ end
+
+ assert_equal "invalid date", exception.message
+ end
end
class TimeExtMarshalingTest < ActiveSupport::TestCase | Add `Time.rfc<I>` parsing method
The `Time.xmlschema` and consequently its alias `iso<I>` accepts
timestamps without a offset in contravention of the RFC <I>
standard. This method enforces that constraint and raises an
`ArgumentError` if it doesn't. | rails_rails | train |
c805c4e2d478da37ca6c2a514aaa0c7c2a1b200e | diff --git a/src/python/grpcio/grpc/_auth.py b/src/python/grpcio/grpc/_auth.py
index <HASH>..<HASH> 100644
--- a/src/python/grpcio/grpc/_auth.py
+++ b/src/python/grpcio/grpc/_auth.py
@@ -30,7 +30,7 @@ class GoogleCallCredentials(grpc.AuthMetadataPlugin):
self._credentials = credentials
# Hack to determine if these are JWT creds and we need to pass
# additional_claims when getting a token
- self._is_jwt = 'additional_claims' in inspect.getargspec( # pylint: disable=deprecated-method
+ self._is_jwt = 'additional_claims' in inspect.getfullargspec(
credentials.get_access_token).args
def __call__(self, context, callback): | Replace deprecated Python “inspect.getargspec” (#<I>)
This has been deprecated since Python <I> and is removed in Python <I>.
We can use “inspect.getfullargspec” instead.
Fixes #<I>. | grpc_grpc | train |
1798ff6374950bcc1e44d35be64febfe1cf60d0d | diff --git a/__init__.py b/__init__.py
index <HASH>..<HASH> 100644
--- a/__init__.py
+++ b/__init__.py
@@ -56,6 +56,12 @@ def as_quat_array(a):
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4).
+ We will not convert back from a spinor array because there is no
+ unique convention for the spinors, so I don't want to mess with
+ that. Also, we want to discourage users from the slow,
+ memory-copying process of swapping columns required for useful
+ definitions of the spinors.
+
"""
assert a.dtype == np.dtype(np.float)
av = a.view(np.quaternion) | Explain our refusal to reshape spinor arrays as quaternions [skip ci] | moble_quaternion | train |
c02ad50519b40e0097bff5cf70a8d732765440ae | diff --git a/pester.go b/pester.go
index <HASH>..<HASH> 100644
--- a/pester.go
+++ b/pester.go
@@ -335,8 +335,13 @@ func (c *Client) pester(p params) (*http.Response, error) {
resp.Body.Close()
}
+ select {
// prevent a 0 from causing the tick to block, pass additional microsecond
- <-time.After(c.Backoff(i) + 1*time.Microsecond)
+ case <-time.After(c.Backoff(i) + 1*time.Microsecond):
+ // allow context cancellation to cancel during backoff
+ case <-req.Context().Done():
+ return
+ }
}
}(n, request) | Listen for context cancellation during backoff period (#<I>)
awesome! | sethgrid_pester | train |
f7ac6ba4275c51abf430053ad81fe5944317428b | diff --git a/openpnm/network/Cubic.py b/openpnm/network/Cubic.py
index <HASH>..<HASH> 100644
--- a/openpnm/network/Cubic.py
+++ b/openpnm/network/Cubic.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
===============================================================================
Cubic: Generate lattice-like networks
@@ -245,23 +244,19 @@ class Cubic(GenericNetwork):
dims = topotools.dimensionality(self)
# Ensure vectors point in n-dims unique directions
c = {tuple(row): 1 for row in unit_vec}
+ mag = np.atleast_1d(mag.squeeze()).astype(float)
if len(c.keys()) > sum(dims):
raise Exception(
- "Spacing is undefined when throats point in "
- + "more directions than network has dimensions"
+ "Spacing is undefined when throats point in more directions"
+ " than network has dimensions."
)
- mag = np.float64(mag.squeeze())
for ax in [0, 1, 2]:
if dims[ax]:
inds = np.where(unit_vec[:, ax] == unit_vec[:, ax].max())[0]
- if np.ndim(mag) != 0:
- temp = np.unique(mag[inds])
- if not np.allclose(temp, temp[0]):
- raise Exception("A unique value of spacing could not be found")
- spacing[ax] = temp[0]
- else:
- temp = mag
- spacing[ax] = temp
+ temp = np.unique(mag[inds])
+ if not np.allclose(temp, temp[0]):
+ raise Exception("A unique value of spacing could not be found.")
+ spacing[ax] = temp[0]
self.settings['spacing'] = spacing
return np.array(spacing)
diff --git a/tests/unit/network/CubicTest.py b/tests/unit/network/CubicTest.py
index <HASH>..<HASH> 100644
--- a/tests/unit/network/CubicTest.py
+++ b/tests/unit/network/CubicTest.py
@@ -11,7 +11,6 @@ class CubicTest:
pass
def test_spacing_1D(self):
- # in _get_spacing it will be np.ndim(mag) == 0 (scalar value)
net = op.network.Cubic(shape=[2, 1, 1], spacing=1)
assert np.all(net.spacing == [1.0, 0.0, 0.0])
@@ -119,13 +118,13 @@ class CubicTest:
net = op.network.Cubic(shape=[3, 4, 5])
net['pore.coords'] += np.random.rand(net.Np, 3)
with pytest.raises(Exception):
- net.spacing
+ _ = net.spacing
def test_spacing_on_network_with_boundary_pores(self):
net = op.network.Cubic(shape=[3, 4, 5])
net.add_boundary_pores()
with pytest.raises(Exception):
- net.spacing
+ _ = net.spacing
def test_connectivity(self):
clist = [6, 14, 18, 20, 26]
diff --git a/tests/unit/network/GenericNetworkTest.py b/tests/unit/network/GenericNetworkTest.py
index <HASH>..<HASH> 100644
--- a/tests/unit/network/GenericNetworkTest.py
+++ b/tests/unit/network/GenericNetworkTest.py
@@ -1,5 +1,4 @@
import numpy as np
-import scipy as sp
import openpnm as op | Refactor (minor) _get_spacing and its unit test | PMEAL_OpenPNM | train |
30b62e9693b605281bac32bb8366a8816835fb0f | diff --git a/ipywidgets/static/widgets/js/widget_link.js b/ipywidgets/static/widgets/js/widget_link.js
index <HASH>..<HASH> 100644
--- a/ipywidgets/static/widgets/js/widget_link.js
+++ b/ipywidgets/static/widgets/js/widget_link.js
@@ -41,6 +41,10 @@ define([
}, this);
this.updating = false;
},
+ }, {
+ serializers: _.extend({
+ widgets: {deserialize: widget.unpack_models}
+ }, widget.WidgetModel.serializers)
});
var DirectionalLinkModel = widget.WidgetModel.extend({
@@ -77,6 +81,11 @@ define([
}, this);
this.updating = false;
},
+ }, {
+ serializers: _.extend({
+ source: {deserialize: widget.unpack_models},
+ targets: {deserialize: widget.unpack_models},
+ }, widget.WidgetModel.serializers)
});
return {
diff --git a/ipywidgets/widgets/widget_link.py b/ipywidgets/widgets/widget_link.py
index <HASH>..<HASH> 100644
--- a/ipywidgets/widgets/widget_link.py
+++ b/ipywidgets/widgets/widget_link.py
@@ -6,7 +6,7 @@ Propagate changes between widgets on the javascript side
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
-from .widget import Widget
+from .widget import Widget, widget_serialization
from traitlets import Unicode, Tuple, List,Instance, TraitError
class WidgetTraitTuple(Tuple):
@@ -37,7 +37,7 @@ class Link(Widget):
widgets, a list of (widget, 'trait_name') tuples which should be linked in the frontend.
"""
_model_name = Unicode('LinkModel', sync=True)
- widgets = List(WidgetTraitTuple, sync=True)
+ widgets = List(WidgetTraitTuple, sync=True, **widget_serialization)
def __init__(self, widgets, **kwargs):
if len(widgets) < 2:
@@ -73,8 +73,8 @@ class DirectionalLink(Widget):
when the source trait changes.
"""
_model_name = Unicode('DirectionalLinkModel', sync=True)
- targets = List(WidgetTraitTuple, sync=True)
- source = WidgetTraitTuple(sync=True)
+ targets = List(WidgetTraitTuple, sync=True, **widget_serialization)
+ source = WidgetTraitTuple(sync=True, **widget_serialization)
# Does not quite behave like other widgets but reproduces
# the behavior of traitlets.directional_link | fix serialization of javascript link | jupyter-widgets_ipywidgets | train |
ac64999e3bf2db17de645ea33fee049a736d7515 | diff --git a/openquake/calculators/base.py b/openquake/calculators/base.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/base.py
+++ b/openquake/calculators/base.py
@@ -306,6 +306,13 @@ class BaseCalculator(with_metaclass(abc.ABCMeta)):
Collect the realizations and set the attributes nbytes
"""
if 'csm_info' in self.datastore and hasattr(self, 'rlzs_assoc'):
+ # sanity check on eff_ruptures
+ for sm in self.datastore['csm_info'].source_models:
+ for sg in sm.src_groups:
+ if sg.eff_ruptures == -1:
+ logging.warn('eff_ruptures not set in %s', sg)
+
+ # save realizations
sm_by_rlz = self.datastore['csm_info'].get_sm_by_rlz(
self.rlzs_assoc.realizations) or collections.defaultdict(
lambda: 'NA') | Added a warning on eff_ruptures=-1 [skip hazardlib][demos] | gem_oq-engine | train |
51f42c714f8d9512b97b5377311bdc9e14b77483 | diff --git a/src/Propel/Runtime/Connection/StatementPdo.php b/src/Propel/Runtime/Connection/StatementPdo.php
index <HASH>..<HASH> 100644
--- a/src/Propel/Runtime/Connection/StatementPdo.php
+++ b/src/Propel/Runtime/Connection/StatementPdo.php
@@ -17,7 +17,7 @@ use Propel\Runtime\Connection\StatementInterface;
*/
class StatementPdo extends \PDOStatement implements StatementInterface
{
- public function __construct()
+ protected function __construct()
{
}
}
\ No newline at end of file | Finally the constructor HAS TO be protected, otherwise tests dont pass anymore | propelorm_Propel2 | train |
729e90e925cf43c745e83456ac06456f36018e3d | diff --git a/src/main/java/rx/internal/schedulers/EventLoopsScheduler.java b/src/main/java/rx/internal/schedulers/EventLoopsScheduler.java
index <HASH>..<HASH> 100644
--- a/src/main/java/rx/internal/schedulers/EventLoopsScheduler.java
+++ b/src/main/java/rx/internal/schedulers/EventLoopsScheduler.java
@@ -117,10 +117,7 @@ public class EventLoopsScheduler extends Scheduler {
if (isUnsubscribed()) {
return Subscriptions.unsubscribed();
}
- ScheduledAction s = poolWorker.scheduleActual(action, 0, null);
-
- serial.add(s);
- s.addParent(serial);
+ ScheduledAction s = poolWorker.scheduleActual(action, 0, null, serial);
return s;
}
diff --git a/src/main/java/rx/internal/util/SubscriptionList.java b/src/main/java/rx/internal/util/SubscriptionList.java
index <HASH>..<HASH> 100644
--- a/src/main/java/rx/internal/util/SubscriptionList.java
+++ b/src/main/java/rx/internal/util/SubscriptionList.java
@@ -15,12 +15,7 @@
*/
package rx.internal.util;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.locks.ReentrantLock;
+import java.util.*;
import rx.Subscription;
import rx.exceptions.Exceptions;
@@ -34,7 +29,6 @@ public final class SubscriptionList implements Subscription {
private LinkedList<Subscription> subscriptions;
private volatile boolean unsubscribed;
- private final ReentrantLock lock = new ReentrantLock();
public SubscriptionList() {
}
@@ -66,8 +60,7 @@ public final class SubscriptionList implements Subscription {
return;
}
if (!unsubscribed) {
- lock.lock();
- try {
+ synchronized (this) {
if (!unsubscribed) {
LinkedList<Subscription> subs = subscriptions;
if (subs == null) {
@@ -77,8 +70,6 @@ public final class SubscriptionList implements Subscription {
subs.add(s);
return;
}
- } finally {
- lock.unlock();
}
}
// call after leaving the synchronized block so we're not holding a lock while executing this
@@ -88,15 +79,12 @@ public final class SubscriptionList implements Subscription {
public void remove(final Subscription s) {
if (!unsubscribed) {
boolean unsubscribe = false;
- lock.lock();
- try {
+ synchronized (this) {
LinkedList<Subscription> subs = subscriptions;
if (unsubscribed || subs == null) {
return;
}
unsubscribe = subs.remove(s);
- } finally {
- lock.unlock();
}
if (unsubscribe) {
// if we removed successfully we then need to call unsubscribe on it (outside of the lock)
@@ -113,16 +101,13 @@ public final class SubscriptionList implements Subscription {
public void unsubscribe() {
if (!unsubscribed) {
List<Subscription> list;
- lock.lock();
- try {
+ synchronized (this) {
if (unsubscribed) {
return;
}
unsubscribed = true;
list = subscriptions;
subscriptions = null;
- } finally {
- lock.unlock();
}
// we will only get here once
unsubscribeFromAll(list);
@@ -150,12 +135,9 @@ public final class SubscriptionList implements Subscription {
public void clear() {
if (!unsubscribed) {
List<Subscription> list;
- lock.lock();
- try {
+ synchronized (this) {
list = subscriptions;
subscriptions = null;
- } finally {
- lock.unlock();
}
unsubscribeFromAll(list);
}
@@ -166,11 +148,8 @@ public final class SubscriptionList implements Subscription {
*/
public boolean hasSubscriptions() {
if (!unsubscribed) {
- lock.lock();
- try {
+ synchronized (this) {
return !unsubscribed && subscriptions != null && !subscriptions.isEmpty();
- } finally {
- lock.unlock();
}
}
return false; | Fix the performance degradation due to different schedule execution and
SubscriptionList.add() and thread unparking. | ReactiveX_RxJava | train |
fdb392bf0c6600e1fd33d33d23060ab322f4eeda | diff --git a/ddsc/cmdparser.py b/ddsc/cmdparser.py
index <HASH>..<HASH> 100644
--- a/ddsc/cmdparser.py
+++ b/ddsc/cmdparser.py
@@ -160,6 +160,21 @@ def _add_auth_role_arg(arg_parser, default_permissions):
default=default_permissions)
+def _add_project_filter_auth_role_arg(arg_parser):
+ """
+ Adds optional auth_role filtering parameter to a parser.
+ :param arg_parser: ArgumentParser parser to add this argument to.
+ """
+ help_text = "Filters project listing to just those projects with the specified role. "
+ help_text += "See command list_auth_roles for AuthRole values."
+ arg_parser.add_argument("--auth-role",
+ metavar='AuthRole',
+ type=to_unicode,
+ dest='auth_role',
+ help=help_text,
+ default=None)
+
+
def _add_copy_project_arg(arg_parser):
"""
Adds optional copy_project parameter to a parser.
@@ -378,7 +393,9 @@ class CommandParser(object):
"""
description = "Show a list of project names or folders/files of a single project."
list_parser = self.subparsers.add_parser('list', description=description)
- add_project_name_arg(list_parser, required=False, help_text="Name of the project to show details for.")
+ project_name_or_auth_role = list_parser.add_mutually_exclusive_group(required=False)
+ _add_project_filter_auth_role_arg(project_name_or_auth_role)
+ add_project_name_arg(project_name_or_auth_role, required=False, help_text="Name of the project to show details for.")
list_parser.set_defaults(func=list_func)
def register_delete_command(self, delete_func):
diff --git a/ddsc/core/remotestore.py b/ddsc/core/remotestore.py
index <HASH>..<HASH> 100644
--- a/ddsc/core/remotestore.py
+++ b/ddsc/core/remotestore.py
@@ -254,6 +254,23 @@ class RemoteStore(object):
names.append(project['name'])
return names
+ def get_projects_with_auth_role(self, auth_role):
+ """
+ Return the list of projects that have the specified auth role from the list that the current user has access to.
+ :param auth_role: str: auth role we are filtering for
+ :return: [dict]: list of projects that have auth_role permissions for the current user
+ """
+ user = self.get_current_user()
+ # user.id
+ projects = []
+ response = self.data_service.get_projects().json()
+ for project in response['results']:
+ project_id = project['id']
+ permissions = self.data_service.get_user_project_permission(project_id, user.id).json()
+ if auth_role == permissions['auth_role']['id']:
+ projects.append(project)
+ return projects
+
def delete_project_by_name(self, project_name):
"""
Find the project named project_name and delete it raise error if not found.
diff --git a/ddsc/ddsclient.py b/ddsc/ddsclient.py
index <HASH>..<HASH> 100644
--- a/ddsc/ddsclient.py
+++ b/ddsc/ddsclient.py
@@ -305,20 +305,30 @@ class ListCommand(object):
Lists project names.
:param args Namespace arguments parsed from the command line
"""
+ # project_name and auth_role args are mutually exclusive
if args.project_name:
project = self.remote_store.fetch_remote_project(args.project_name, must_exist=True)
self.print_project_details(project)
else:
- self.print_project_names()
+ self.print_project_names(args.auth_role)
- def print_project_details(self, project):
+ @staticmethod
+ def print_project_details(project):
filename_list = ProjectFilenameList()
filename_list.walk_project(project)
for info in filename_list.details:
print(info)
- def print_project_names(self):
- names = self.remote_store.get_project_names()
+ def print_project_names(self, filter_auth_role):
+ """
+ Prints project names to stdout for all projects or just those with the specified auth_role
+ :param filter_auth_role: str: optional auth_role to filter project list
+ """
+ if filter_auth_role:
+ projects = self.remote_store.get_projects_with_auth_role(auth_role=filter_auth_role)
+ names = [project['name'] for project in projects]
+ else:
+ names = self.remote_store.get_project_names()
if names:
for name in names:
print(pipes.quote(name)) | Adds --auth-role to list command
Adds --auth-role <auth-role> argument to the `list` command.
This option is mutually exclusive with the -p <project_name> argument. | Duke-GCB_DukeDSClient | train |
35292a50cf7eb28ec2c7895772fcf530e8eb652a | diff --git a/stdlib/test/unit.rb b/stdlib/test/unit.rb
index <HASH>..<HASH> 100644
--- a/stdlib/test/unit.rb
+++ b/stdlib/test/unit.rb
@@ -5,6 +5,19 @@ module Test
module Unit
class TestCase < Minitest::Test
alias assert_raise assert_raises
+
+ def assert_nothing_raised(*)
+ yield
+ end
+
+ def assert_raise_with_message(exception, err_message, msg = nil)
+ err = assert_raises(exception, msg) { yield }
+ if err_message.is_a?(Regexp)
+ assert_matches err_message, err.message
+ else
+ assert_equal err_message, err.message
+ end
+ end
end
end
end | Backport assertion in test-unit/minitest adapter | opal_opal | train |
9373463309ab80326ad8d006fcfe40c36ce5de13 | diff --git a/intranet/apps/eighth/serializers.py b/intranet/apps/eighth/serializers.py
index <HASH>..<HASH> 100644
--- a/intranet/apps/eighth/serializers.py
+++ b/intranet/apps/eighth/serializers.py
@@ -282,7 +282,7 @@ class EighthBlockDetailSerializer(serializers.Serializer):
roomings = EighthActivity.rooms.through.objects.filter(eighthactivity_id__in=activity_ids).select_related("eighthroom", "eighthactivity")
overridden_roomings = EighthScheduledActivity.rooms.through.objects.filter(
eighthscheduledactivity_id__in=scheduled_activity_ids
- ).select_related("eighthroom", "eighthscheduledactivity")
+ ).select_related("eighthroom")
for rooming in roomings:
activity_id = rooming.eighthactivity.id
@@ -297,7 +297,7 @@ class EighthBlockDetailSerializer(serializers.Serializer):
activities_rooms_overridden = []
for rooming in overridden_roomings:
- scheduled_activity_id = rooming.eighthscheduledactivity.id
+ scheduled_activity_id = rooming.eighthscheduledactivity_id
activity_id = scheduled_activity_to_activity_map[scheduled_activity_id]
if activity_id not in activities_rooms_overridden: | perf(eighth): don't query unneeded EighthScheduledActivity information | tjcsl_ion | train |
494f97b688a07dad71a6e6574c6f21d3715936ed | diff --git a/coalaip/coalaip.py b/coalaip/coalaip.py
index <HASH>..<HASH> 100644
--- a/coalaip/coalaip.py
+++ b/coalaip/coalaip.py
@@ -66,7 +66,9 @@ class CoalaIp:
# TODO: could probably have a 'safe' check to make sure the entities are actually created
def register_manifestation(self, manifestation_data, *, copyright_holder,
- existing_work=None, work_data=None, **kwargs):
+ existing_work=None, work_data=None,
+ create_work=True, create_copyright=True,
+ **kwargs):
"""Register a Manifestation and automatically assign its
corresponding Copyright to the given :attr:`user`.
@@ -97,6 +99,11 @@ class CoalaIp:
See :class:`~.Work` for requirements.
If not specified, the Work will be created using only
the name of the Manifestation.
+ create_work (bool, keyword, optional): To allow for the creation
+ of a Manifestation without attaching a Work. Default is True.
+ create_copyright (bool, keyword, optional): To allow for the
+ creation of a Manifestation without attaching a Copyright.
+ Default is True.
**kwargs: Keyword arguments passed through to each model's
:meth:`~.Entity.create` (e.g. ``data_format``).
@@ -137,7 +144,8 @@ class CoalaIp:
# we confirm that an entity has actually been created
work = None
- if not manifestation_data.get('manifestationOfWork'):
+ manifestation_copyright = None
+ if not manifestation_data.get('manifestationOfWork') and create_work:
if existing_work is None:
if work_data is None:
work_data = {'name': manifestation_data.get('name')}
@@ -166,10 +174,11 @@ class CoalaIp:
plugin=self.plugin)
manifestation.create(copyright_holder, **kwargs)
- copyright_data = {'rightsOf': manifestation.persist_id}
- manifestation_copyright = Copyright.from_data(copyright_data,
- plugin=self.plugin)
- manifestation_copyright.create(copyright_holder, **kwargs)
+ if create_copyright:
+ copyright_data = {'rightsOf': manifestation.persist_id}
+ manifestation_copyright = Copyright.from_data(copyright_data,
+ plugin=self.plugin)
+ manifestation_copyright.create(copyright_holder, **kwargs)
return RegistrationResult(manifestation_copyright, manifestation, work)
diff --git a/coalaip/model_validators.py b/coalaip/model_validators.py
index <HASH>..<HASH> 100644
--- a/coalaip/model_validators.py
+++ b/coalaip/model_validators.py
@@ -78,7 +78,7 @@ def is_manifestation_model(instance, attribute, value):
"'{value}'").format(attr=attribute.name,
cls=instance_name,
value=manifestation_of)
- raise ModelDataError(err_str)
+ print(err_str)
@does_not_contain('rightsOf', error_cls=ModelDataError)
diff --git a/tests/test_coalaip.py b/tests/test_coalaip.py
index <HASH>..<HASH> 100644
--- a/tests/test_coalaip.py
+++ b/tests/test_coalaip.py
@@ -32,6 +32,50 @@ def test_generate_user(mock_plugin, mock_coalaip, alice_user):
**generate_user_kwargs)
+def test_register_manifestation_without_creating_work(mock_plugin,
+ mock_coalaip, manifestation_data, alice_user,
+ mock_manifestation_create_id):
+ from tests.utils import (
+ assert_key_values_present_in_dict,
+ create_entity_id_setter,
+ )
+
+ # Remove the 'manifestationOfWork' key to create a new Work
+ del manifestation_data['manifestationOfWork']
+
+ manifestation_copyright, manifestation, work = mock_coalaip.register_manifestation(
+ manifestation_data,
+ copyright_holder=alice_user,
+ create_work=False,
+ create_copyright=False
+ )
+
+ manifestation_persisted_data = manifestation.to_jsonld()
+ if manifestation_data:
+ assert_key_values_present_in_dict(manifestation_persisted_data,
+ **manifestation_data)
+ assert_key_values_present_in_dict(manifestation.data,
+ **manifestation_data)
+
+ assert manifestation_copyright == None
+ assert work == None
+ assert 'manifestationOfWork' not in manifestation.data
+
+ # Test the entities were persisted with the set persisted ids
+ assert manifestation.persist_id is not None
+
+ # Test the correct data format was persisted
+ manifestation_persisted_data = manifestation.to_jsonld()
+
+ # Check we called plugin.save() with the correct data
+ mock_save_call_list = mock_plugin.save.call_args_list
+ assert len(mock_save_call_list) == 1
+ assert mock_save_call_list[0] == (
+ (manifestation_persisted_data,),
+ {'user': alice_user},
+ )
+
+
@mark.parametrize('use_data_format_enum', [True, False])
@mark.parametrize('data_format', [None, 'json', 'jsonld', mark.skip('ipld')])
def test_register_manifestation(mock_plugin, mock_coalaip, manifestation_data,
diff --git a/tests/test_entities.py b/tests/test_entities.py
index <HASH>..<HASH> 100644
--- a/tests/test_entities.py
+++ b/tests/test_entities.py
@@ -634,7 +634,7 @@ def test_manifestation_init_raises_if_no_name(mock_plugin, manifestation_data):
with raises(ModelDataError):
Manifestation.from_data(manifestation_data, plugin=mock_plugin)
-
[email protected](reason="We decided to go with Manifestation's that do not need works to be registered.")
def test_manifestation_init_raises_without_manifestation_of_work(
mock_plugin, manifestation_data):
from coalaip.entities import Manifestation
@@ -644,6 +644,7 @@ def test_manifestation_init_raises_without_manifestation_of_work(
Manifestation.from_data(manifestation_data, plugin=mock_plugin)
[email protected](reason="We decided to go with Manifestation's that do not need works to be registered.")
def test_manifestation_init_raises_without_str_manifestation_of_work(
mock_plugin, manifestation_data):
from coalaip.entities import Manifestation | Work creation optional on Manifestation creation | COALAIP_pycoalaip | train |
9ec7e830d2c28aa03b39f861b9ad87cf74c0ff1f | diff --git a/openid/server/server.py b/openid/server/server.py
index <HASH>..<HASH> 100644
--- a/openid/server/server.py
+++ b/openid/server/server.py
@@ -105,7 +105,8 @@ from openid.dh import DiffieHellman
from openid.store.nonce import mkNonce
from openid.server.trustroot import TrustRoot
from openid.association import Association, default_negotiator, getSecretSize
-from openid.message import Message, OPENID_NS, OPENID2_NS, IDENTIFIER_SELECT
+from openid.message import Message, OPENID_NS, OPENID1_NS, \
+ OPENID2_NS, IDENTIFIER_SELECT
HTTP_OK = 200
HTTP_REDIRECT = 302
@@ -550,6 +551,10 @@ class CheckIDRequest(OpenIDRequest):
self.identity = message.getArg(OPENID_NS, 'identity')
+ if self.identity is None and self.namespace == OPENID1_NS:
+ s = "OpenID 1 message did not contain openid.identity"
+ raise ProtocolError(message, text=s)
+
# There's a case for making self.trust_root be a TrustRoot
# here. But if TrustRoot isn't currently part of the "public" API,
# I'm not sure it's worth doing.
@@ -654,6 +659,12 @@ class CheckIDRequest(OpenIDRequest):
"supplied %r" % (identity,))
response_identity = None
+ if self.namespace == OPENID1_NS and response_identity is None:
+ raise ValueError(
+ "Request was an OpenID 1 request, so response must "
+ "include an identifier."
+ )
+
response.fields.updateArgs(OPENID_NS, {
'mode': mode,
'return_to': self.return_to,
diff --git a/openid/test/test_server.py b/openid/test/test_server.py
index <HASH>..<HASH> 100644
--- a/openid/test/test_server.py
+++ b/openid/test/test_server.py
@@ -145,8 +145,9 @@ class TestDecode(unittest.TestCase):
self.failUnlessEqual(r.trust_root, self.tr_url)
self.failUnlessEqual(r.return_to, self.rt_url)
- def test_checkidSetupNoIdentity(self):
+ def test_checkidSetupNoIdentityOpenID2(self):
args = {
+ 'openid.ns': OPENID2_NS,
'openid.mode': 'checkid_setup',
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
@@ -604,6 +605,23 @@ class TestCheckID(unittest.TestCase):
self.failUnlessRaises(ValueError, self.request.answer, True,
identity="http://pebbles.unittest/")
+ def test_answerAllowNoIdentityOpenID1(self):
+ self.request.namespace = OPENID1_NS
+ self.request.identity = None
+ self.failUnlessRaises(ValueError, self.request.answer, True,
+ identity=None)
+
+ def test_checkIDWithNoIdentityOpenID1(self):
+ msg = Message(OPENID1_NS)
+ msg.setArg(OPENID_NS, 'return_to', 'bogus')
+ msg.setArg(OPENID_NS, 'trust_root', 'bogus')
+ msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
+ msg.setArg(OPENID_NS, 'assoc_handle', 'bogus')
+
+ self.failUnlessRaises(server.ProtocolError,
+ server.CheckIDRequest.fromMessage,
+ msg)
+
def test_answerAllowNoTrustRoot(self):
self.request.trust_root = None
answer = self.request.answer(True) | [project @ Fix #<I>: Server does not accept OpenID1 checkid_* without identity, and requires identity to be present in response] | necaris_python3-openid | train |
f7820c4abd7eeac78e6315a53f4cce3b0495c220 | diff --git a/src/extension.js b/src/extension.js
index <HASH>..<HASH> 100644
--- a/src/extension.js
+++ b/src/extension.js
@@ -5,7 +5,14 @@ function positionFactory (positionObj) {
return new vscode.Position(positionObj._line, positionObj._character)
}
-function rangeFactory (selection) {
+function rangeFactory (selection, length) {
+ if (length === 0) {
+ selection.start._character = 0
+ selection.end._character = vscode.window.activeTextEditor.document.lineAt(
+ selection.start.line
+ ).text.length
+ }
+
return new vscode.Range(
positionFactory(selection.start),
positionFactory(selection.end)
@@ -24,14 +31,12 @@ function activate (context) {
}
const selection = editor.selection
- const text = editor.document.getText(selection)
+ const lineText = editor.document.lineAt(selection.start.line).text
+ const selectedText = editor.document.getText(selection)
+ const convertableText = selectedText || lineText
+ const range = rangeFactory(selection, selectedText.length)
- if (text.length > 0) {
- const range = rangeFactory(selection)
- editor.edit(builder => {
- return builder.replace(range, convert(text))
- })
- }
+ editor.edit(builder => builder.replace(range, convert(convertableText)))
}
) | Change current line if nothing is selected | ansumanshah_css-in-js | train |
c1fb5d3e3406ebea49b655e5e64891ab688a518f | diff --git a/src/jquery.documentsize.js b/src/jquery.documentsize.js
index <HASH>..<HASH> 100644
--- a/src/jquery.documentsize.js
+++ b/src/jquery.documentsize.js
@@ -117,7 +117,7 @@
var iframe = document.createElement( "iframe" ),
body = document.body;
- iframe.style.cssText = "position: absolute; top: -600px; left: -600px; width: 500px; height: 500px; margin: 0px; padding: 0px; border: none;";
+ iframe.style.cssText = "position: absolute; top: -600px; left: -600px; width: 500px; height: 500px; margin: 0px; padding: 0px; border: none; display: block;";
iframe.frameborder = "0";
body.appendChild( iframe ); | Guarded against inherited display styles for the test iframe | hashchange_jquery.documentsize | train |
b3c892924bea670db2714f73e1b474b67b257086 | diff --git a/args4j/src/org/kohsuke/args4j/spi/Parameters.java b/args4j/src/org/kohsuke/args4j/spi/Parameters.java
index <HASH>..<HASH> 100644
--- a/args4j/src/org/kohsuke/args4j/spi/Parameters.java
+++ b/args4j/src/org/kohsuke/args4j/spi/Parameters.java
@@ -16,9 +16,9 @@ public interface Parameters {
*
* @param idx
* specifying 0 will retrieve the token next to the option.
- * For example, if the command line looks like <samp>-o abc -d x</samp>,
- * then {@code getParameter(0)} for <samp>-o</samp> returns {@code abc}
- * and {@code getParameter(1)} will return <samp>-d</samp>.
+ * For example, if the command line looks like <code>-o abc -d x</code>,
+ * then {@code getParameter(0)} for <code>-o</code> returns {@code abc}
+ * and {@code getParameter(1)} will return <code>-d</code>.
*
* @return
* Always return non-{@code null} valid {@code String}. If an attempt is
diff --git a/args4j/src/org/kohsuke/args4j/spi/RestOfArgumentsHandler.java b/args4j/src/org/kohsuke/args4j/spi/RestOfArgumentsHandler.java
index <HASH>..<HASH> 100644
--- a/args4j/src/org/kohsuke/args4j/spi/RestOfArgumentsHandler.java
+++ b/args4j/src/org/kohsuke/args4j/spi/RestOfArgumentsHandler.java
@@ -11,8 +11,8 @@ import org.kohsuke.args4j.CmdLineException;
* <p>
* Used with {@link Argument}, this implements a semantics where
* non-option token causes the option parsing to terminate.
- * An example of this is <tt>ssh(1)</tt>, where <samp>ssh -p 222 abc</samp> will treat
- * <samp>-p</samp> as an option to <tt>ssh</tt>, but <samp>ssh abc -p 222</samp> is
+ * An example of this is <tt>ssh(1)</tt>, where <code>ssh -p 222 abc</code> will treat
+ * <code>-p</code> as an option to <tt>ssh</tt>, but <code>ssh abc -p 222</code> is
* considered to have no option for <tt>ssh</tt>.
*
* @author Kohsuke Kawaguchi
diff --git a/args4j/src/org/kohsuke/args4j/spi/StringArrayOptionHandler.java b/args4j/src/org/kohsuke/args4j/spi/StringArrayOptionHandler.java
index <HASH>..<HASH> 100644
--- a/args4j/src/org/kohsuke/args4j/spi/StringArrayOptionHandler.java
+++ b/args4j/src/org/kohsuke/args4j/spi/StringArrayOptionHandler.java
@@ -15,17 +15,17 @@ import org.kohsuke.args4j.OptionDef;
* <p>
* Example for parameter {@code -s}, which is type {@code String[]}:</p>
*
- * <code><pre>
+ * <pre>{@code
* java -jar aaa.jar -s banan hruska jablko
* java -jar aaa.jar -s banan "hruska jablko"
* java -jar aaa.jar -s "banan hruska jablko"
* java -jar aaa.jar -s banan hruska jablko -l 4 -r
* java -jar aaa.jar -t 222 -s banan hruska jablko -r
- * </pre></code>
+ * }</pre>
*
* <p>
* All of them result in a single string array that contains three tokens:
- * <samp>banan</samp>, <samp>hruska</samp>, and <samp>jablko</samp>.</p>
+ * <code>banan</code>, <code>hruska</code>, and <code>jablko</code>.</p>
*
* <p>
* This {@code OptionHandler} scans for parameter which begins with <tt>-</tt>. If found, it will stop.</p> | <samp> vs <code> again. Javadoc is picky for this. | kohsuke_args4j | train |
93d09a2723df8ea45be5ba98bbe9e1f38cf452b3 | diff --git a/src/s2repoze/plugins/sp.py b/src/s2repoze/plugins/sp.py
index <HASH>..<HASH> 100644
--- a/src/s2repoze/plugins/sp.py
+++ b/src/s2repoze/plugins/sp.py
@@ -161,11 +161,15 @@ class SAML2Plugin(FormPluginBase):
post_env = environ.copy()
post_env['QUERY_STRING'] = ''
- if environ["CONTENT_LENGTH"]:
- body = environ["wsgi.input"].read(int(environ["CONTENT_LENGTH"]))
- from StringIO import StringIO
- environ['wsgi.input'] = StringIO(body)
- environ['s2repoze.body'] = body
+ try:
+ if environ["CONTENT_LENGTH"]:
+ len = int(environ["CONTENT_LENGTH"])
+ body = environ["wsgi.input"].read(len)
+ from StringIO import StringIO
+ environ['wsgi.input'] = StringIO(body)
+ environ['s2repoze.body'] = body
+ except KeyError:
+ pass
post = cgi.FieldStorage(
fp=environ['wsgi.input'], | Some webservices don't set CONTENT_LENGTH if zero | IdentityPython_pysaml2 | train |
a6d7c466e2284ba2e63a9600edf458025c8e48f3 | diff --git a/src/crypto/public_key/elliptic/eddsa.js b/src/crypto/public_key/elliptic/eddsa.js
index <HASH>..<HASH> 100644
--- a/src/crypto/public_key/elliptic/eddsa.js
+++ b/src/crypto/public_key/elliptic/eddsa.js
@@ -32,17 +32,18 @@ nacl.hash = bytes => new Uint8Array(sha512().update(bytes).digest());
/**
* Sign a message using the provided key
- * @param {module:type/oid} oid Elliptic curve object identifier
- * @param {module:enums.hash} hash_algo Hash algorithm used to sign
- * @param {Uint8Array} m Message to sign
- * @param {Uint8Array} d Private key used to sign
- * @param {Uint8Array} hashed The hashed message
+ * @param {module:type/oid} oid Elliptic curve object identifier
+ * @param {module:enums.hash} hash_algo Hash algorithm used to sign
+ * @param {Uint8Array} message Message to sign
+ * @param {Uint8Array} publicKey Public key
+ * @param {Uint8Array} privateKey Private key used to sign the message
+ * @param {Uint8Array} hashed The hashed message
* @returns {{R: Uint8Array,
- * S: Uint8Array}} Signature of the message
+ * S: Uint8Array}} Signature of the message
* @async
*/
-async function sign(oid, hash_algo, m, d, hashed) {
- const { secretKey } = nacl.sign.keyPair.fromSeed(d);
+async function sign(oid, hash_algo, message, publicKey, privateKey, hashed) {
+ const secretKey = util.concatUint8Array([privateKey, publicKey.subarray(1)]);
const signature = nacl.sign.detached(hashed, secretKey);
// EdDSA signature params are returned in little-endian format
return {
diff --git a/src/crypto/signature.js b/src/crypto/signature.js
index <HASH>..<HASH> 100644
--- a/src/crypto/signature.js
+++ b/src/crypto/signature.js
@@ -132,8 +132,9 @@ export default {
}
case enums.publicKey.eddsa: {
const oid = key_params[0];
+ const Q = key_params[1].toUint8Array('be', 33);
const d = key_params[2].toUint8Array('be', 32);
- const signature = await publicKey.elliptic.eddsa.sign(oid, hash_algo, data, d, hashed);
+ const signature = await publicKey.elliptic.eddsa.sign(oid, hash_algo, data, Q, d, hashed);
return util.concatUint8Array([
util.Uint8Array_to_MPI(signature.R),
util.Uint8Array_to_MPI(signature.S) | Use serialized EdDSA public key when signing instead of deriving it | openpgpjs_openpgpjs | train |
a9032b52b86d9d8a2b89ebbf47a50bc233565f3f | diff --git a/cmd/storage-rest-client.go b/cmd/storage-rest-client.go
index <HASH>..<HASH> 100644
--- a/cmd/storage-rest-client.go
+++ b/cmd/storage-rest-client.go
@@ -25,7 +25,6 @@ import (
"net/url"
"path"
"strconv"
- "time"
"encoding/gob"
"encoding/hex"
@@ -38,10 +37,6 @@ import (
xnet "github.com/minio/minio/pkg/net"
)
-// The timeout of TCP connect and sending/receiving
-// data for all internode storage REST requests.
-const storageRESTTimeout = 5 * time.Minute
-
func isNetworkError(err error) bool {
if err == nil {
return false
@@ -405,7 +400,7 @@ func newStorageRESTClient(endpoint Endpoint) (*storageRESTClient, error) {
}
}
- restClient, err := rest.NewClient(serverURL, tlsConfig, storageRESTTimeout, newAuthToken)
+ restClient, err := rest.NewClient(serverURL, tlsConfig, rest.DefaultRESTTimeout, newAuthToken)
if err != nil {
return nil, err
} | Change storageRESTTimeout to 1minute (#<I>) | minio_minio | train |
fba1d8deb32ab65e9c3f2d9b39e242bec734cde9 | diff --git a/presto-hive/src/test/java/com/facebook/presto/hive/TestHivePushdownFilterQueries.java b/presto-hive/src/test/java/com/facebook/presto/hive/TestHivePushdownFilterQueries.java
index <HASH>..<HASH> 100644
--- a/presto-hive/src/test/java/com/facebook/presto/hive/TestHivePushdownFilterQueries.java
+++ b/presto-hive/src/test/java/com/facebook/presto/hive/TestHivePushdownFilterQueries.java
@@ -390,6 +390,21 @@ public class TestHivePushdownFilterQueries
}
@Test
+ public void testArrayOfMaps()
+ {
+ getQueryRunner().execute("CREATE TABLE test AS\n" +
+ "SELECT orderkey, ARRAY[MAP(ARRAY[1, 2, 3], ARRAY[orderkey, partkey, suppkey]), MAP(ARRAY[1, 2, 3], ARRAY[orderkey + 1, partkey + 1, suppkey + 1])] as array_of_maps\n" +
+ "FROM lineitem");
+
+ try {
+ assertQuery("SELECT t.maps[1] FROM test CROSS JOIN UNNEST(array_of_maps) AS t(maps)", "SELECT orderkey FROM lineitem UNION ALL SELECT orderkey + 1 FROM lineitem");
+ }
+ finally {
+ getQueryRunner().execute("DROP TABLE test");
+ }
+ }
+
+ @Test
public void testStructs()
{
assertQueryUsingH2Cte("SELECT orderkey, info, dates FROM lineitem_ex");
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java
index <HASH>..<HASH> 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java
@@ -146,8 +146,11 @@ public final class SelectiveStreamReaders
Optional<ListFilter> childFilter = parentFilter.map(HierarchicalFilter::getChild).map(ListFilter.class::cast);
return new ListSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), ImmutableList.of(), childFilter.orElse(null), level, outputType, hiveStorageTimeZone, systemMemoryContext.newAggregatedMemoryContext());
case STRUCT:
+ checkArgument(!parentFilter.isPresent(), "Filters on nested structs are not supported yet");
return new StructSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), ImmutableList.of(), outputType, hiveStorageTimeZone, systemMemoryContext.newAggregatedMemoryContext());
case MAP:
+ checkArgument(!parentFilter.isPresent(), "Filters on nested maps are not supported yet");
+ return new MapSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), ImmutableList.of(), outputType, hiveStorageTimeZone, systemMemoryContext.newAggregatedMemoryContext());
case UNION:
default:
throw new IllegalArgumentException("Unsupported type: " + streamDescriptor.getOrcTypeKind());
diff --git a/presto-orc/src/test/java/com/facebook/presto/orc/TestSelectiveOrcReader.java b/presto-orc/src/test/java/com/facebook/presto/orc/TestSelectiveOrcReader.java
index <HASH>..<HASH> 100644
--- a/presto-orc/src/test/java/com/facebook/presto/orc/TestSelectiveOrcReader.java
+++ b/presto-orc/src/test/java/com/facebook/presto/orc/TestSelectiveOrcReader.java
@@ -474,6 +474,8 @@ public class TestSelectiveOrcReader
{
Random random = new Random(0);
+ tester.testRoundTrip(mapType(INTEGER, INTEGER), createList(NUM_ROWS, i -> createMap(i)));
+
// map column with no nulls
tester.testRoundTripTypes(
ImmutableList.of(INTEGER, mapType(INTEGER, INTEGER)), | Fix selective readers for arrays of maps | prestodb_presto | train |
ceefc403d359efbdc4fe967c8164d150b4a86519 | diff --git a/test/OfferCommandHandlerTestTrait.php b/test/OfferCommandHandlerTestTrait.php
index <HASH>..<HASH> 100644
--- a/test/OfferCommandHandlerTestTrait.php
+++ b/test/OfferCommandHandlerTestTrait.php
@@ -160,7 +160,6 @@ trait OfferCommandHandlerTestTrait
]
)
->when(
-
new $commandClass($id, $image)
)
->then([new $eventClass($id, $image)]); | III-<I>: Fix coding standard violation | cultuurnet_udb3-php | train |
0eadfcdea711a5255392704dcd863477ca7c46f0 | diff --git a/gffutils/test/test.py b/gffutils/test/test.py
index <HASH>..<HASH> 100644
--- a/gffutils/test/test.py
+++ b/gffutils/test/test.py
@@ -1,4 +1,5 @@
import warnings
+from textwrap import dedent
from . import expected
from gffutils import example_filename, create, parser, feature
import gffutils
@@ -1153,14 +1154,25 @@ def test_unquoting_iter():
assert list(gffutils.iterators.DataIterator(tmp))[0]['ID'][0] == ','
def test_db_unquoting():
- s = 'chr1\t.\tgene\t1\t2\t.\t-\t.\tID=%2C;'
+ s = dedent(
+ '''
+ chr1\t.\tgene\t1\t2\t.\t-\t.\tID=a;Note=%2C;
+ chr1\t.\tgene\t1\t2\t.\t-\t.\tID=b;Note=%2C;
+ chr1\t.\tgene\t1\t2\t.\t-\t.\tID=c;Note=%2C;
+ chr1\t.\tgene\t1\t2\t.\t-\t.\tID=d;Note=%2C;
+ chr1\t.\tgene\t1\t2\t.\t-\t.\tID=e;Note=%2C;
+ chr1\t.\tgene\t1\t2\t.\t-\t.\tID=f;Note=%2C;
+ ''')
tmp = tempfile.NamedTemporaryFile(delete=False).name
with open(tmp, 'w') as fout:
fout.write(s + '\n')
- db = gffutils.create_db(tmp, ':memory:')
- f = next(db.all_features())
- n = f['ID']
- assert n == [',']
+ db = gffutils.create_db(tmp, ':memory:', checklines=1)
+ assert db['a']['Note'] == [',']
+ assert db['b']['Note'] == [',']
+ assert db['c']['Note'] == [',']
+ assert db['d']['Note'] == [',']
+ assert db['e']['Note'] == [',']
+ assert db['f']['Note'] == [',']
if __name__ == "__main__":
# this test case fails | improve test to look at more than `checklines` features | daler_gffutils | train |
be05c9d7ef5d755268bfed5d6d5bf0b77a1cb737 | diff --git a/modules/ve2/ce/nodes/ve.ce.ImageNode.js b/modules/ve2/ce/nodes/ve.ce.ImageNode.js
index <HASH>..<HASH> 100644
--- a/modules/ve2/ce/nodes/ve.ce.ImageNode.js
+++ b/modules/ve2/ce/nodes/ve.ce.ImageNode.js
@@ -51,6 +51,7 @@ ve.ce.ImageNode.rules = {
* @method
*/
ve.ce.ImageNode.prototype.onUpdate = function() {
+ // TODO needs to support height/width
var source = this.model.getAttribute( 'html/src' );
if ( source !== this.currentSource ) {
this.currentSource = source; | Add a TODO to the image code about width&height
Change-Id: I<I>c<I>a<I>e4a4af2dbcd<I>cb<I>c | wikimedia_parsoid | train |
eedbb1ee9a2164cd58e9fd305bc719a4c643f1a2 | diff --git a/p2p/discover/udp.go b/p2p/discover/udp.go
index <HASH>..<HASH> 100644
--- a/p2p/discover/udp.go
+++ b/p2p/discover/udp.go
@@ -413,7 +413,7 @@ func decodePacket(buf []byte) (packet, NodeID, []byte, error) {
default:
return nil, fromID, hash, fmt.Errorf("unknown type: %d", ptype)
}
- err = rlp.Decode(bytes.NewReader(sigdata[1:]), req)
+ err = rlp.DecodeBytes(sigdata[1:], req)
return req, fromID, hash, err
} | p2p/discover: use rlp.DecodeBytes | ethereum_go-ethereum | train |
89736f82f6f864c400c77c9a6da22da50f5e297a | diff --git a/lib/rfxcom.js b/lib/rfxcom.js
index <HASH>..<HASH> 100644
--- a/lib/rfxcom.js
+++ b/lib/rfxcom.js
@@ -575,9 +575,32 @@ RfxCom.prototype.lighting5Handler = function(data) {
subtype: subtype,
id: id,
unitcode: unitcode,
- command: command
+ command: command,
+ seqnbr: seqnbr
};
self.emit("lighting5", evt);
};
+
+ /**
+ *
+ * Called by the data event handler when data arrives from rfxmeter
+ * devices.
+ *
+ */
+RfxCom.prototype.rfxmeterHandler = function(data) {
+ var self = this,
+ subtype = data[0],
+ seqnbr = data[1],
+ id = "0x" + self.dumpHex(data.slice(2, 4), false).join(""),
+ counter = self.dumpHex(data.slice(4, 8), false).join(""),
+ evt = {
+ subtype: subtype,
+ id: id,
+ seqnbr: seqnbr,
+ counter: parseInt(counter,16),
+ };
+ self.emit("rfxmeter", evt);
+};
+
module.exports = RfxCom;
diff --git a/test/rfxcom.spec.js b/test/rfxcom.spec.js
index <HASH>..<HASH> 100644
--- a/test/rfxcom.spec.js
+++ b/test/rfxcom.spec.js
@@ -299,6 +299,7 @@ describe("RfxCom", function() {
expect(evt.id).toBe("0xF09AC7");
expect(evt.unitcode).toBe(1);
expect(evt.command).toBe("Off");
+ expect(evt.seqnbr).toBe(1);
done();
});
device.lighting5Handler([0x00, 0x01, 0xF0, 0x9A, 0xC7, 0x01, 0x00, 0x00, 0x80]);
@@ -630,5 +631,21 @@ describe("RfxCom", function() {
device.temphumidity19Handler([0x03, 0x04, 0xAF, 0x01, 0x00, 0x90, 0x36, 0x02, 0x59]);
});
});
+
+ describe(".rfxmeterHandler", function() {
+ var device;
+ beforeEach(function() {
+ device = new rfxcom.RfxCom("/dev/ttyUSB0");
+ });
+ it("should emit a rfxmeter message when called", function(done) {
+ device.on("rfxmeter", function(evt) {
+ expect(evt.subtype).toBe(0x00);
+ expect(evt.seqnbr).toBe(55);
+ expect(evt.counter).toBe(9069671);
+ done();
+ });
+ device.rfxmeterHandler([0x00, 0x37, 0x08, 0xF8, 0x00, 0x8A, 0x64, 0x67, 0x70]);
+ });
+ });
});
}); | Merge rfxmeter parser code from bwired-nl. | rfxcom_node-rfxcom | train |
35f4d44ed2d182b5adb77377a16ee3e5dc8d3c44 | diff --git a/src/net/sf/mpxj/primavera/PrimaveraDatabaseReader.java b/src/net/sf/mpxj/primavera/PrimaveraDatabaseReader.java
index <HASH>..<HASH> 100644
--- a/src/net/sf/mpxj/primavera/PrimaveraDatabaseReader.java
+++ b/src/net/sf/mpxj/primavera/PrimaveraDatabaseReader.java
@@ -91,7 +91,7 @@ public final class PrimaveraDatabaseReader implements ProjectReader
m_reader = new PrimaveraReader();
processProjectHeader();
- //processCalendars();
+ processCalendars();
processResources();
processTasks();
processPredecessors();
@@ -174,6 +174,17 @@ public final class PrimaveraDatabaseReader implements ProjectReader
}
/**
+ * Process calendars.
+ *
+ * @throws SQLException
+ */
+ private void processCalendars() throws SQLException
+ {
+ List<Row> rows = getRows("select * from " + m_schema + "calendar where (proj_id is null or proj_id=?) and delete_date is null", m_projectID);
+ m_reader.processCalendars(rows);
+ }
+
+ /**
* Process resource assignments.
*
* @throws SQLException
diff --git a/src/net/sf/mpxj/primavera/PrimaveraReader.java b/src/net/sf/mpxj/primavera/PrimaveraReader.java
index <HASH>..<HASH> 100644
--- a/src/net/sf/mpxj/primavera/PrimaveraReader.java
+++ b/src/net/sf/mpxj/primavera/PrimaveraReader.java
@@ -116,58 +116,67 @@ final class PrimaveraReader
// Process data
String calendarData = row.getString("clndr_data");
- Record root = new Record(calendarData);
- // Retrieve working hours ...
- Record daysOfWeek = root.getChild("DaysOfWeek");
- for (Record recDay : daysOfWeek.getChildren())
+ if (calendarData != null && !calendarData.isEmpty())
{
- // ... for each day of the week
- Day day = Day.getInstance(Integer.parseInt(recDay.getField()));
- // Get hours
- List<Record> recHours = recDay.getChildren();
- if (recHours.size() == 0)
+ Record root = new Record(calendarData);
+ // Retrieve working hours ...
+ Record daysOfWeek = root.getChild("DaysOfWeek");
+ if (daysOfWeek != null)
{
- // No data -> not working
- calendar.setWorkingDay(day, false);
- }
- else
- {
- calendar.setWorkingDay(day, true);
- // Read hours
- ProjectCalendarHours hours = calendar.addCalendarHours(day);
- for (Record recWorkingHours : recHours)
+ for (Record recDay : daysOfWeek.getChildren())
{
- String[] wh = recWorkingHours.getValue().split("\\|");
- try
+ // ... for each day of the week
+ Day day = Day.getInstance(Integer.parseInt(recDay.getField()));
+ // Get hours
+ List<Record> recHours = recDay.getChildren();
+ if (recHours.size() == 0)
{
- Date start = m_calendarTimeFormat.parse(wh[1]);
- Date end = m_calendarTimeFormat.parse(wh[3]);
- hours.addRange(new DateRange(start, end));
+ // No data -> not working
+ calendar.setWorkingDay(day, false);
}
- catch (ParseException e)
+ else
{
- // silently ignore date parse exceptions
+ calendar.setWorkingDay(day, true);
+ // Read hours
+ ProjectCalendarHours hours = calendar.addCalendarHours(day);
+ for (Record recWorkingHours : recHours)
+ {
+ if (recWorkingHours.getValue() != null)
+ {
+ String[] wh = recWorkingHours.getValue().split("\\|");
+ try
+ {
+ Date start = m_calendarTimeFormat.parse(wh[1]);
+ Date end = m_calendarTimeFormat.parse(wh[3]);
+ hours.addRange(new DateRange(start, end));
+ }
+ catch (ParseException e)
+ {
+ // silently ignore date parse exceptions
+ }
+ }
+ }
}
}
}
- }
- // Retrieve exceptions
- Record exceptions = root.getChild("Exceptions");
- if (exceptions == null)
- {
- continue;
- }
+ // Retrieve exceptions
+ Record exceptions = root.getChild("Exceptions");
+ if (exceptions == null)
+ {
+ continue;
+ }
- for (Record exception : exceptions.getChildren())
- {
- int daysFrom1900 = Integer.parseInt(exception.getValue().split("\\|")[1]);
- int daysFrom1970 = daysFrom1900 - 25567 - 2;
- // 25567 -> Number of days between 1900 and 1970.
- // During tests a 2 days offset was necessary to obtain good dates
- // However I didn't figured out why there is such a difference.
- Date startEx = new Date(daysFrom1970 * 24l * 60l * 60l * 1000);
- calendar.addCalendarException(startEx, startEx);
+ for (Record exception : exceptions.getChildren())
+ {
+ int daysFrom1900 = Integer.parseInt(exception.getValue().split("\\|")[1]);
+ int daysFrom1970 = daysFrom1900 - 25567 - 2;
+ // 25567 -> Number of days between 1900 and 1970.
+ // During tests a 2 days offset was necessary to obtain good dates
+ // However I didn't figured out why there is such a difference.
+ Date startEx = new Date(daysFrom1970 * 24l * 60l * 60l * 1000);
+ calendar.addCalendarException(startEx, startEx);
+ }
}
}
} | Updated the Primavera database reader to support reading calendar definitions. | joniles_mpxj | train |
76090598cb55c80f9f4d4caad87344a8ac9d8e11 | diff --git a/tests/test_remote.py b/tests/test_remote.py
index <HASH>..<HASH> 100644
--- a/tests/test_remote.py
+++ b/tests/test_remote.py
@@ -4,9 +4,12 @@ from supplement.remote import Environment
from .helpers import cleantabs
+def get_env():
+ return Environment(env={'PYTHONPATH':'.'})
+
@pytest.mark.xfail
def test_project_config():
- env = Environment()
+ env = get_env()
env.configure_project('.', {'libs':['/usr/lib/python2.7/site-packages/exo-0.6']})
source = cleantabs('''
@@ -18,7 +21,7 @@ def test_project_config():
@pytest.mark.slow
def test_simple_assist():
- env = Environment()
+ env = get_env()
source = cleantabs('''
from os import popen
@@ -29,7 +32,7 @@ def test_simple_assist():
@pytest.mark.slow
def test_prepare():
- env = Environment()
+ env = get_env()
env.prepare()
source = cleantabs(''' | fix: test_remote does not work without installed supplement | baverman_supplement | train |
28c0272566d39fddbc326e74e98ca01de9409d30 | diff --git a/tests/test_hypothesis.py b/tests/test_hypothesis.py
index <HASH>..<HASH> 100644
--- a/tests/test_hypothesis.py
+++ b/tests/test_hypothesis.py
@@ -26,3 +26,8 @@ class QuickSilverHypothesisTestCase(unittest.TestCase):
def test_score_boundaries(self, string, abbrev):
score = quicksilver.score(string, abbrev)
self.assertTrue(0.0 <= score <= 1.0)
+
+
+if __name__ == '__main__':
+ unittest.main()
+ | Try to fix Travis CI. Maybe it's because missing unittest.main()? | bcse_stringscore | train |
40571d099a53057eb85ec3fdd493d08b38359b2c | diff --git a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typing/AbstractTypeProvider.java b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typing/AbstractTypeProvider.java
index <HASH>..<HASH> 100644
--- a/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typing/AbstractTypeProvider.java
+++ b/plugins/org.eclipse.xtext.xbase/src/org/eclipse/xtext/xbase/typing/AbstractTypeProvider.java
@@ -40,7 +40,6 @@ import org.eclipse.xtext.common.types.util.TypeConformanceComputer;
import org.eclipse.xtext.common.types.util.TypeReferences;
import org.eclipse.xtext.util.IResourceScopeCache;
import org.eclipse.xtext.util.OnChangeEvictingCache;
-import org.eclipse.xtext.util.Pair;
import org.eclipse.xtext.util.PolymorphicDispatcher;
import org.eclipse.xtext.util.Triple;
import org.eclipse.xtext.util.Tuples;
@@ -102,14 +101,12 @@ public abstract class AbstractTypeProvider implements ITypeProvider {
protected static final class ImmutableLinkedItem {
protected final EObject object;
- protected final EObject additional;
protected final ImmutableLinkedItem prev;
protected final int hashCode;
protected final int size;
- public ImmutableLinkedItem(EObject object, EObject additional, ImmutableLinkedItem immutableStack) {
+ public ImmutableLinkedItem(EObject object, ImmutableLinkedItem immutableStack) {
this.object = object;
- this.additional = additional;
prev = immutableStack;
size = immutableStack == null ? 1 : immutableStack.size + 1;
if (prev != null) {
@@ -128,7 +125,7 @@ public abstract class AbstractTypeProvider implements ITypeProvider {
if (obj.hashCode() != hashCode() || obj.getClass() != ImmutableLinkedItem.class)
return false;
ImmutableLinkedItem other = (ImmutableLinkedItem) obj;
- return other.object == object && other.additional == additional && other.size == size && (other.prev == prev || prev != null && prev.equals(other.prev));
+ return other.object == object && other.size == size && (other.prev == prev || prev != null && prev.equals(other.prev));
}
@Override
@@ -292,18 +289,7 @@ public abstract class AbstractTypeProvider implements ITypeProvider {
@Override
protected AbstractTypeProvider.ComputationData<XExpression> createComputationData() {
- return new ComputationData<XExpression>() {
- @Override
- protected EObject getAdditional(XExpression expression) {
- if (expression instanceof XAbstractFeatureCall) {
- return getFeature((XAbstractFeatureCall) expression, false);
- }
- if (expression instanceof XConstructorCall) {
- return getConstructor((XConstructorCall) expression, false);
- }
- return super.getAdditional(expression);
- }
- };
+ return new ComputationData<XExpression>();
}
@Override
@@ -551,29 +537,24 @@ public abstract class AbstractTypeProvider implements ITypeProvider {
}
protected static class ComputationData<T extends EObject> {
- protected final Set<Pair<T, EObject>> computations = Sets.newHashSet();
+ protected final Set<T> computations = Sets.newHashSet();
protected ImmutableLinkedItem queryState = null;
protected Resource resource;
protected boolean resourceLeftOrCyclic;
protected boolean add(T t) {
- EObject additionalKey = getAdditional(t);
- boolean result = computations.add(Tuples.create(t, additionalKey));
+ boolean result = computations.add(t);
if (result) {
if (queryState == null) {
resource = t.eResource();
}
- queryState = new ImmutableLinkedItem(t, additionalKey, queryState);
+ queryState = new ImmutableLinkedItem(t, queryState);
}
return result;
}
- protected EObject getAdditional(T t) {
- return null;
- }
-
protected void remove(T t) {
- computations.remove(Tuples.create(t, queryState.additional));
+ computations.remove(t);
queryState = queryState.prev;
if (queryState == null)
resource = null; | [xbase][typing] WIP: Rework caching strategy of type provider: A minor simplification | eclipse_xtext-extras | train |
343b3aa2e13497577e6d138c79a006e2188886fb | diff --git a/lib/ice_cube/builders/hash_builder.rb b/lib/ice_cube/builders/hash_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/ice_cube/builders/hash_builder.rb
+++ b/lib/ice_cube/builders/hash_builder.rb
@@ -1,6 +1,5 @@
module IceCube
- # TODO why this class?
class HashBuilder
def initialize(rule = nil)
diff --git a/lib/ice_cube/builders/string_builder.rb b/lib/ice_cube/builders/string_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/ice_cube/builders/string_builder.rb
+++ b/lib/ice_cube/builders/string_builder.rb
@@ -4,7 +4,6 @@ module IceCube
attr_writer :base
- # TODO reimplement with linkedlist if desired more efficient
def initialize
@types = {}
end | Remove some TODOs that are done | seejohnrun_ice_cube | train |
67a1fce1f8cf223e71e28d047fa9c6618ffe411e | diff --git a/sonar-batch/src/main/java/org/sonar/batch/scan/DefaultProjectBootstrapper.java b/sonar-batch/src/main/java/org/sonar/batch/scan/DefaultProjectBootstrapper.java
index <HASH>..<HASH> 100644
--- a/sonar-batch/src/main/java/org/sonar/batch/scan/DefaultProjectBootstrapper.java
+++ b/sonar-batch/src/main/java/org/sonar/batch/scan/DefaultProjectBootstrapper.java
@@ -91,7 +91,8 @@ class DefaultProjectBootstrapper implements ProjectBootstrapper {
* Array of all mandatory properties required for a project without child.
*/
private static final String[] MANDATORY_PROPERTIES_FOR_SIMPLE_PROJECT = {
- PROPERTY_PROJECT_BASEDIR, CoreProperties.PROJECT_KEY_PROPERTY, CoreProperties.PROJECT_NAME_PROPERTY, CoreProperties.PROJECT_VERSION_PROPERTY
+ PROPERTY_PROJECT_BASEDIR, CoreProperties.PROJECT_KEY_PROPERTY, CoreProperties.PROJECT_NAME_PROPERTY,
+ CoreProperties.PROJECT_VERSION_PROPERTY, PROPERTY_SOURCES
};
/**
diff --git a/sonar-batch/src/test/java/org/sonar/batch/scan/DefaultProjectBootstrapperTest.java b/sonar-batch/src/test/java/org/sonar/batch/scan/DefaultProjectBootstrapperTest.java
index <HASH>..<HASH> 100644
--- a/sonar-batch/src/test/java/org/sonar/batch/scan/DefaultProjectBootstrapperTest.java
+++ b/sonar-batch/src/test/java/org/sonar/batch/scan/DefaultProjectBootstrapperTest.java
@@ -85,7 +85,9 @@ public class DefaultProjectBootstrapperTest {
}
@Test
- public void shouldNotFailIfMissingSourceDirectory() throws IOException {
+ public void fail_if_sources_not_set() throws IOException {
+ thrown.expect(IllegalStateException.class);
+ thrown.expectMessage("You must define the following mandatory properties for 'com.foo.project': sonar.sources");
loadProjectDefinition("simple-project-with-missing-source-dir");
}
@@ -469,7 +471,7 @@ public class DefaultProjectBootstrapperTest {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("You must define the following mandatory properties for 'Unknown': foo2, foo3");
- DefaultProjectBootstrapper.checkMandatoryProperties(props, new String[] {"foo1", "foo2", "foo3"});
+ DefaultProjectBootstrapper.checkMandatoryProperties(props, new String[]{"foo1", "foo2", "foo3"});
}
@Test
@@ -481,7 +483,7 @@ public class DefaultProjectBootstrapperTest {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("You must define the following mandatory properties for 'my-project': foo2, foo3");
- DefaultProjectBootstrapper.checkMandatoryProperties(props, new String[] {"foo1", "foo2", "foo3"});
+ DefaultProjectBootstrapper.checkMandatoryProperties(props, new String[]{"foo1", "foo2", "foo3"});
}
@Test
@@ -490,7 +492,7 @@ public class DefaultProjectBootstrapperTest {
props.setProperty("foo1", "bla");
props.setProperty("foo4", "bla");
- DefaultProjectBootstrapper.checkMandatoryProperties(props, new String[] {"foo1"});
+ DefaultProjectBootstrapper.checkMandatoryProperties(props, new String[]{"foo1"});
// No exception should be thrown
} | SONAR-<I> revert. sonar.sources is back. It must be mandatory. | SonarSource_sonarqube | train |
8fe9a51f95942d35f93a56afd047d5e74921e303 | diff --git a/Html.php b/Html.php
index <HASH>..<HASH> 100644
--- a/Html.php
+++ b/Html.php
@@ -61,13 +61,15 @@ class Html {
* Do not pass one string containing multiple classes as they will be
* incorrectly concatenated with dashes, i.e. "one two" will become "one-two".
*
- * @param string $class
- * The class name to clean.
+ * @param mixed $class
+ * The class name to clean. It can be a string or anything that can be cast
+ * to string.
*
* @return string
* The cleaned class name.
*/
public static function getClass($class) {
+ $class = (string) $class;
if (!isset(static::$classes[$class])) {
static::$classes[$class] = static::cleanCssIdentifier(Unicode::strtolower($class));
} | Issue #<I> by benjifisher, leslieg, alexpott: clean_class Twig filter does not work with Views rewriting | drupal_core-utility | train |
18741a2fd61769c20228381f44eb1e4e8a933cbc | diff --git a/tcp_check/test_tcp_check.py b/tcp_check/test_tcp_check.py
index <HASH>..<HASH> 100644
--- a/tcp_check/test_tcp_check.py
+++ b/tcp_check/test_tcp_check.py
@@ -8,7 +8,7 @@ import time
# project
from tests.checks.common import AgentCheckTest
-RESULTS_TIMEOUT = 5
+RESULTS_TIMEOUT = 20
CONFIG = {
'init_config': {}, | [tcp_check] there is a race condition to finish the test + get the service checks, bump up TO. (#<I>)
[tcp][ci] bumping the TO some more, still flaking sometimes. | DataDog_integrations-core | train |
b6cdb46e716fcb94d32c11034a18bbc7900df2d7 | diff --git a/playhouse/tests_sqlite_ext.py b/playhouse/tests_sqlite_ext.py
index <HASH>..<HASH> 100644
--- a/playhouse/tests_sqlite_ext.py
+++ b/playhouse/tests_sqlite_ext.py
@@ -48,8 +48,15 @@ class Post(BaseExtModel):
message = TextField()
class FTSPost(Post, sqe.FTSModel):
+ """Automatically managed and populated via the Post model."""
pass
+class FTSDoc(sqe.FTSModel):
+ """Manually managed and populated using queries."""
+ message = TextField()
+ class Meta:
+ database = ext_db
+
class Values(BaseExtModel):
klass = IntegerField()
value = FloatField()
@@ -66,14 +73,24 @@ class SqliteExtTestCase(unittest.TestCase):
'Faith has to do with things that are not seen and hope with things that are not at hand.',
]
def setUp(self):
+ FTSDoc.drop_table(True)
FTSPost.drop_table(True)
Post.drop_table(True)
Values.drop_table(True)
Values.create_table()
Post.create_table()
FTSPost.create_table(tokenize='porter', content_model=Post)
+ FTSDoc.create_table(tokenize='porter')
+
+ def test_fts_manual(self):
+ matches = lambda s: sqe.match(FTSDoc.message, s)
+ messages = [FTSDoc.create(message=msg) for msg in self.messages]
+ q = FTSDoc.select().where(matches('believe')).order_by(FTSDoc.id)
+ self.assertEqual([x.message for x in q], [
+ self.messages[0],
+ self.messages[3]])
- def test_fts(self):
+ def test_fts_auto(self):
matches = lambda s: sqe.match(FTSPost.message, s)
posts = []
for message in self.messages:
@@ -87,22 +104,22 @@ class SqliteExtTestCase(unittest.TestCase):
FTSPost.optimize()
# it will stem faithful -> faith b/c we use the porter tokenizer
- pq = FTSPost.select().where(matches('faith')).order_by('id')
+ pq = FTSPost.select().where(matches('faith')).order_by(FTSPost.id)
self.assertEqual([x.message for x in pq], self.messages)
- pq = FTSPost.select().where(matches('believe')).order_by('id')
+ pq = FTSPost.select().where(matches('believe')).order_by(FTSPost.id)
self.assertEqual([x.message for x in pq], [
self.messages[0],
self.messages[3],
])
- pq = FTSPost.select().where(matches('thin*')).order_by('id')
+ pq = FTSPost.select().where(matches('thin*')).order_by(FTSPost.id)
self.assertEqual([x.message for x in pq], [
self.messages[2],
self.messages[4],
])
- pq = FTSPost.select().where(matches('"it is"')).order_by('id')
+ pq = FTSPost.select().where(matches('"it is"')).order_by(FTSPost.id)
self.assertEqual([x.message for x in pq], [
self.messages[2],
self.messages[3], | Cleanups and testing manually managing a fts model. | coleifer_peewee | train |
ec7e58940d892e29011f0b3eac3e92ea2d1735b4 | diff --git a/prow/github/client.go b/prow/github/client.go
index <HASH>..<HASH> 100644
--- a/prow/github/client.go
+++ b/prow/github/client.go
@@ -1271,7 +1271,8 @@ func (c *Client) GetFile(org, repo, filepath, commit string) ([]byte, error) {
// Query runs a GraphQL query using shurcooL/githubql's client.
func (c *Client) Query(ctx context.Context, q interface{}, vars map[string]interface{}) error {
- c.log("Query", q, vars)
+ // Don't log query here because Query is typically called multiple times to get all pages.
+ // Instead log once per search and include total search cost.
return c.gqlc.Query(ctx, q, vars)
} | Stop logging every GraphQL query call in favor of 1 log per search. | kubernetes_test-infra | train |
4cb8318ac5679701bdb5b542ef6df264cc56151b | diff --git a/cmd/kops/replace.go b/cmd/kops/replace.go
index <HASH>..<HASH> 100644
--- a/cmd/kops/replace.go
+++ b/cmd/kops/replace.go
@@ -158,25 +158,25 @@ func RunReplace(f *util.Factory, cmd *cobra.Command, out io.Writer, c *replaceOp
cluster, err := clientset.GetCluster(clusterName)
if err != nil {
if errors.IsNotFound(err) {
- cluster = nil
+ return fmt.Errorf("cluster %q not found", clusterName)
} else {
return fmt.Errorf("error fetching cluster %q: %v", clusterName, err)
}
}
- if cluster == nil {
- return fmt.Errorf("cluster %q not found", clusterName)
- }
// check if the instancegroup exists already
igName := v.ObjectMeta.Name
ig, err := clientset.InstanceGroupsFor(cluster).Get(igName, metav1.GetOptions{})
if err != nil {
- return fmt.Errorf("unable to check for instanceGroup: %v", err)
+ if errors.IsNotFound(err) {
+ if !c.force {
+ return fmt.Errorf("instanceGroup: %v does not exist (try adding --force flag)", igName)
+ }
+ } else {
+ return fmt.Errorf("unable to check for instanceGroup: %v", err)
+ }
}
switch ig {
case nil:
- if !c.force {
- return fmt.Errorf("instanceGroup: %v does not exist (try adding --force flag)", igName)
- }
glog.Infof("instanceGroup: %v was not found, creating resource now", igName)
_, err = clientset.InstanceGroupsFor(cluster).Create(v)
if err != nil { | Kops Replace Force
This PR fixes the `kops replace --force` flag which was introduce in [#PR<I>](<URL> | kubernetes_kops | train |
c6300de894af95b68cfe0acf79b2979f6fd5afaa | diff --git a/closure/goog/events/events.js b/closure/goog/events/events.js
index <HASH>..<HASH> 100644
--- a/closure/goog/events/events.js
+++ b/closure/goog/events/events.js
@@ -547,7 +547,7 @@ goog.events.getListeners = function(obj, type, capture) {
* @param {?string} type Event type.
* @param {boolean} capture Capture phase?.
* @return {Array.<goog.events.Listener>?} Array of listener objects.
- * Returns null if object has no lsiteners of that type.
+ * Returns null if object has no listeners of that type.
* @private
*/
goog.events.getListeners_ = function(obj, type, capture) { | Fix a typo in comment for getListeners_()
R=arv
DELTA=1 (0 added, 0 deleted, 1 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL> | google_closure-library | train |
b3a358fb34c5ae600e49c9fd934f18de4795b6e5 | diff --git a/lib/cc/cli/analyze.rb b/lib/cc/cli/analyze.rb
index <HASH>..<HASH> 100644
--- a/lib/cc/cli/analyze.rb
+++ b/lib/cc/cli/analyze.rb
@@ -35,6 +35,8 @@ module CC
when '-f'
@args.shift # throw out the -f
@formatter = Formatters.resolve(@args.shift)
+ when '-dev'
+ @dev_mode = true
end
rescue Formatters::Formatter::InvalidFormatterError => e
fatal(e.message)
@@ -70,7 +72,7 @@ module CC
@engines ||= config.engine_names.map do |engine_name|
Engine.new(
engine_name,
- engine_registry[engine_name],
+ @dev_mode ? make_registry_entry(engine_name) : engine_registry[engine_name],
path,
engine_config(engine_name),
SecureRandom.uuid
@@ -86,6 +88,12 @@ module CC
ENV['CODE_PATH']
end
+ def make_registry_entry(engine_name)
+ {
+ "image_name"=>"codeclimate/codeclimate-#{engine_name}:latest"
+ }
+ end
+
end
end
end | Add -dev flag to analyze command | codeclimate_codeclimate | train |
4d641110454f114d1f179d306fb63166e66fd6cf | diff --git a/src/foremast/slacknotify/slack_notification.py b/src/foremast/slacknotify/slack_notification.py
index <HASH>..<HASH> 100644
--- a/src/foremast/slacknotify/slack_notification.py
+++ b/src/foremast/slacknotify/slack_notification.py
@@ -11,9 +11,13 @@ class SlackNotification:
"""
def __init__(self, app=None, env=None, prop_path=None):
- self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
- self.info['timestamp'] = timestamp
+
+ self.info = {'app': app,
+ 'env': env,
+ 'properties': prop_path,
+ 'timestamp': timestamp}
+
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11] | fix: Move timestamp before dict for insertion | foremast_foremast | train |
3ac33d0847e003ab0121e6454243ab27006d0864 | diff --git a/lib/search_engine.py b/lib/search_engine.py
index <HASH>..<HASH> 100644
--- a/lib/search_engine.py
+++ b/lib/search_engine.py
@@ -816,7 +816,7 @@ def create_search_box(cc, colls, p, f, rg, sf, so, sp, rm, of, ot, as,
})
# show collections in the search box? (not if there is only one
- # collection defined, and not if we are in super simple search)
+ # collection defined, and not if we are in light search)
show_colls = True
if len(collection_reclist_cache.keys()) == 1 or \
as == -1:
diff --git a/lib/websearch_templates.py b/lib/websearch_templates.py
index <HASH>..<HASH> 100644
--- a/lib/websearch_templates.py
+++ b/lib/websearch_templates.py
@@ -30,7 +30,7 @@ import locale
from urllib import quote, urlencode
from invenio.config import \
- CFG_WEBSEARCH_SUPERSIMPLESEARCH_PATTERN_BOX_WIDTH, \
+ CFG_WEBSEARCH_LIGHTSEARCH_PATTERN_BOX_WIDTH, \
CFG_WEBSEARCH_SIMPLESEARCH_PATTERN_BOX_WIDTH, \
CFG_WEBSEARCH_ADVANCEDSEARCH_PATTERN_BOX_WIDTH, \
CFG_WEBSEARCH_AUTHOR_ET_AL_THRESHOLD, \
@@ -577,9 +577,9 @@ class Template:
return out
- def tmpl_searchfor_super_simple(self, ln, collection_id, collection_name, record_count,
- example_search_queries): # EXPERIMENTAL
- """Produces super simple *Search for* box for the current collection.
+ def tmpl_searchfor_light(self, ln, collection_id, collection_name, record_count,
+ example_search_queries): # EXPERIMENTAL
+ """Produces light *Search for* box for the current collection.
Parameters:
@@ -596,7 +596,7 @@ class Template:
_ = gettext_set_language(ln)
out = '''
- <!--create_searchfor_super_simple()-->
+ <!--create_searchfor_light()-->
'''
argd = drop_default_urlargd({'ln': ln, 'sc': CFG_WEBSEARCH_SPLIT_BY_COLLECTION},
@@ -710,9 +710,9 @@ class Template:
</tr>
</tbody>
</table>-->
- <!--/create_searchfor_super_simple()-->
+ <!--/create_searchfor_light()-->
''' % {'ln' : ln,
- 'sizepattern' : CFG_WEBSEARCH_SUPERSIMPLESEARCH_PATTERN_BOX_WIDTH,
+ 'sizepattern' : CFG_WEBSEARCH_LIGHTSEARCH_PATTERN_BOX_WIDTH,
'langlink': ln != CFG_SITE_LANG and '?ln=' + ln or '',
'siteurl' : CFG_SITE_URL,
'asearch' : create_html_link(asearchurl, {}, _('Advanced Search')),
@@ -1973,7 +1973,7 @@ class Template:
}
else:
# EXPERIMENTAL
- # print super-simple search form:
+ # print light search form:
search_in = ''
if cc_intl != CFG_SITE_NAME_INTL.get(ln, CFG_SITE_NAME):
search_in = '''
@@ -2013,7 +2013,7 @@ class Template:
{}, _("Advanced Search")),
'leading' : leadingtext,
- 'sizepattern' : CFG_WEBSEARCH_SUPERSIMPLESEARCH_PATTERN_BOX_WIDTH,
+ 'sizepattern' : CFG_WEBSEARCH_LIGHTSEARCH_PATTERN_BOX_WIDTH,
'p' : cgi.escape(p, 1),
'searchwithin' : self.tmpl_searchwithin_select(
ln = ln,
diff --git a/lib/websearch_webcoll.py b/lib/websearch_webcoll.py
index <HASH>..<HASH> 100644
--- a/lib/websearch_webcoll.py
+++ b/lib/websearch_webcoll.py
@@ -287,7 +287,7 @@ class Collection:
self.create_navtrail_links(as, lang))
## second, update page body:
- for as in CFG_WEBSEARCH_ENABLED_SEARCH_INTERFACES: # do super-simple, simple and advanced search pages:
+ for as in CFG_WEBSEARCH_ENABLED_SEARCH_INTERFACES: # do light, simple and advanced search pages:
body = websearch_templates.tmpl_webcoll_body(
ln=lang, collection=self.name,
te_portalbox = self.create_portalbox(lang, 'te'),
@@ -629,12 +629,12 @@ class Collection:
elif as == 0:
return self.create_searchfor_simple(ln)
else:
- return self.create_searchfor_super_simple(ln)
+ return self.create_searchfor_light(ln)
- def create_searchfor_super_simple(self, ln=CFG_SITE_LANG):
- "Produces super simple 'Search for' box for the current collection."
+ def create_searchfor_light(self, ln=CFG_SITE_LANG):
+ "Produces light 'Search for' box for the current collection."
- return websearch_templates.tmpl_searchfor_super_simple(
+ return websearch_templates.tmpl_searchfor_light(
ln=ln,
collection_id = self.name,
collection_name=self.get_name(ln=ln), | Renamed super-simple search to light search (still experimental). | inveniosoftware_invenio-records | train |
155b6cb18ad390d364406a3bc3611a9ba0e9c71f | diff --git a/parsl/executors/high_throughput/executor.py b/parsl/executors/high_throughput/executor.py
index <HASH>..<HASH> 100644
--- a/parsl/executors/high_throughput/executor.py
+++ b/parsl/executors/high_throughput/executor.py
@@ -335,7 +335,7 @@ class HighThroughputExecutor(BlockProviderExecutor, RepresentationMixin):
self._queue_management_thread = None
self._start_queue_management_thread()
- self._start_local_queue_process()
+ self._start_local_interchange_process()
logger.debug("Created management thread: {}".format(self._queue_management_thread))
@@ -454,31 +454,31 @@ class HighThroughputExecutor(BlockProviderExecutor, RepresentationMixin):
break
logger.info("[MTHREAD] queue management worker finished")
- def _start_local_queue_process(self):
+ def _start_local_interchange_process(self):
""" Starts the interchange process locally
Starts the interchange process locally and uses an internal command queue to
get the worker task and result ports that the interchange has bound to.
"""
comm_q = Queue(maxsize=10)
- self.queue_proc = ForkProcess(target=interchange.starter,
- args=(comm_q,),
- kwargs={"client_ports": (self.outgoing_q.port,
- self.incoming_q.port,
- self.command_client.port),
- "worker_ports": self.worker_ports,
- "worker_port_range": self.worker_port_range,
- "hub_address": self.hub_address,
- "hub_port": self.hub_port,
- "logdir": "{}/{}".format(self.run_dir, self.label),
- "heartbeat_threshold": self.heartbeat_threshold,
- "poll_period": self.poll_period,
- "logging_level": logging.DEBUG if self.worker_debug else logging.INFO
- },
- daemon=True,
- name="HTEX-Interchange"
+ self.interchange_proc = ForkProcess(target=interchange.starter,
+ args=(comm_q,),
+ kwargs={"client_ports": (self.outgoing_q.port,
+ self.incoming_q.port,
+ self.command_client.port),
+ "worker_ports": self.worker_ports,
+ "worker_port_range": self.worker_port_range,
+ "hub_address": self.hub_address,
+ "hub_port": self.hub_port,
+ "logdir": "{}/{}".format(self.run_dir, self.label),
+ "heartbeat_threshold": self.heartbeat_threshold,
+ "poll_period": self.poll_period,
+ "logging_level": logging.DEBUG if self.worker_debug else logging.INFO
+ },
+ daemon=True,
+ name="HTEX-Interchange"
)
- self.queue_proc.start()
+ self.interchange_proc.start()
try:
(self.worker_task_port, self.worker_result_port) = comm_q.get(block=True, timeout=120)
except queue.Empty:
@@ -717,5 +717,5 @@ class HighThroughputExecutor(BlockProviderExecutor, RepresentationMixin):
"""
logger.info("Attempting HighThroughputExecutor shutdown")
- self.queue_proc.terminate()
+ self.interchange_proc.terminate()
logger.info("Finished HighThroughputExecutor shutdown attempt") | Refer to the interchange process rather than queue process (#<I>)
Using two names for the interchange process/queue process makes the code a
bit harder to read. | Parsl_parsl | train |
26e3e335adabe79a2d57d0053a1323823bee0b8a | diff --git a/src/level/TMXLayer.js b/src/level/TMXLayer.js
index <HASH>..<HASH> 100644
--- a/src/level/TMXLayer.js
+++ b/src/level/TMXLayer.js
@@ -71,21 +71,18 @@
* @private
*/
draw : function(context, rect) {
- // save context state
- context.save();
// set layer opacity
- context.globalAlpha = this.opacity;
+ var _alpha = context.globalAlpha
+ context.globalAlpha = this.opacity;
+
// set layer color
context.fillStyle = this.color;
- // correct the rect size is the map is not at the default screen position
- // (fixme : this might not work with dirtyRect)
- var shift = game.currentLevel.pos;
// clear the specified rect
- context.fillRect(rect.left - shift.x, rect.top - shift.y, rect.width, rect.height);
+ context.fillRect(rect.left, rect.top, rect.width, rect.height);
- // restore context state
- context.restore();
+ // restore context alpha value
+ context.globalAlpha = _alpha;
}
}); | Ticket #<I>, #<I> : Fixed (again) the Color Layer Drawing + small optimization | melonjs_melonJS | train |
a29684ab97836200d3630a2466bfbcb67871e03a | diff --git a/pom.xml b/pom.xml
index <HASH>..<HASH> 100755
--- a/pom.xml
+++ b/pom.xml
@@ -229,6 +229,13 @@
<exclude>net/openhft/chronicle/map/VanillaChronicleMap.java</exclude>
<exclude>net/openhft/chronicle/map/ReplicatedChronicleMap.java</exclude>
<exclude>net/openhft/chronicle/map/Replica.java</exclude>
+ <exclude>net/openhft/chronicle/map/ServiceDescriptor.java</exclude>
+ <exclude>net/openhft/chronicle/hash/Segment.java</exclude>
+ <exclude>net/openhft/chronicle/map/MapEventListener.java</exclude>
+ <exclude>net/openhft/chronicle/map/MapEntryCallback.java</exclude>
+ <exclude>net/openhft/chronicle/hash/KeyContext.java</exclude>
+ <exclude>net/openhft/chronicle/map/MapKeyContext.java</exclude>
+ <exclude>net/openhft/chronicle/hash/locks/LockingStrategy.java</exclude>
</sourceFileExcludes>
<additionalparam>-Xdoclint:none</additionalparam>
</configuration>
diff --git a/src/main/java/net/openhft/chronicle/hash/locks/LockingStrategy.java b/src/main/java/net/openhft/chronicle/hash/locks/LockingStrategy.java
index <HASH>..<HASH> 100644
--- a/src/main/java/net/openhft/chronicle/hash/locks/LockingStrategy.java
+++ b/src/main/java/net/openhft/chronicle/hash/locks/LockingStrategy.java
@@ -16,6 +16,11 @@
package net.openhft.chronicle.hash.locks;
+/**
+ * @deprecated never implemented and replaced with strategies in
+ * {@link net.openhft.chronicle.algo.locks}
+ */
+@Deprecated
public interface LockingStrategy {
void lock(InterProcessLock lock);
diff --git a/src/main/java/net/openhft/chronicle/map/ChronicleMap.java b/src/main/java/net/openhft/chronicle/map/ChronicleMap.java
index <HASH>..<HASH> 100755
--- a/src/main/java/net/openhft/chronicle/map/ChronicleMap.java
+++ b/src/main/java/net/openhft/chronicle/map/ChronicleMap.java
@@ -142,6 +142,11 @@ public interface ChronicleMap<K, V>
*/
V acquireUsing(@NotNull K key, V usingValue);
+ /**
+ * @deprecated this method has incoherent, unclear semantics. Use {@link #queryContext(Object)}
+ * instead
+ */
+ @Deprecated
@NotNull
MapKeyContext<K, V> acquireContext(@NotNull K key, @NotNull V usingValue);
diff --git a/src/main/java/net/openhft/chronicle/map/ChronicleMapBuilder.java b/src/main/java/net/openhft/chronicle/map/ChronicleMapBuilder.java
index <HASH>..<HASH> 100755
--- a/src/main/java/net/openhft/chronicle/map/ChronicleMapBuilder.java
+++ b/src/main/java/net/openhft/chronicle/map/ChronicleMapBuilder.java
@@ -1155,7 +1155,9 @@ public final class ChronicleMapBuilder<K, V> implements
* @param defaultValueProvider the strategy to obtain a default value by the absent key
* @return this builder object back
* @see #defaultValue(Object)
+ * @deprecated specialize {@link MapEntryOperations#defaultValue(MapAbsentEntry)} instead
*/
+ @Deprecated
public ChronicleMapBuilder<K, V> defaultValueProvider(
@NotNull DefaultValueProvider<K, V> defaultValueProvider) {
this.defaultValueProvider = defaultValueProvider; | More deprecations and Javadoc excludes | OpenHFT_Chronicle-Map | train |
f3bfdb561bd9be7daf15af1e20d462902b03018c | diff --git a/salt/cloud/clouds/ec2.py b/salt/cloud/clouds/ec2.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/clouds/ec2.py
+++ b/salt/cloud/clouds/ec2.py
@@ -355,7 +355,7 @@ def query(params=None, setname=None, requesturl=None, location=None,
method = 'GET'
region = location
service = 'ec2'
- canonical_uri = urlparse.urlparse(requesturl).path
+ canonical_uri = _urlparse(requesturl).path
host = endpoint.strip()
# Create a date for headers and the credential string
@@ -377,7 +377,7 @@ def query(params=None, setname=None, requesturl=None, location=None,
keys = sorted(params_with_headers.keys())
values = map(params_with_headers.get, keys)
- querystring = urllib.urlencode(list(zip(keys, values)))
+ querystring = _urlencode(list(zip(keys, values)))
querystring = querystring.replace('+', '%20')
canonical_request = method + '\n' + canonical_uri + '\n' + \ | Fixed urlparse and urlencode calls | saltstack_salt | train |
0ee485e05bf45f440273014d9c85da8d865bac05 | diff --git a/slave/quantum_design/ppms.py b/slave/quantum_design/ppms.py
index <HASH>..<HASH> 100644
--- a/slave/quantum_design/ppms.py
+++ b/slave/quantum_design/ppms.py
@@ -134,6 +134,11 @@ class PPMS(IEC60488):
:ivar position: The current sample position.
+ .. rubric:: Configuration
+
+ :ivar date: The configured date of the ppms computer represented by a python
+ `date` object.
+
"""
def __init__(self, transport):
super(PPMS, self).__init__(transport)
@@ -214,6 +219,17 @@ class PPMS(IEC60488):
cmd = 'BEEP', [Float(min=0.1, max=5.0), Integer(min=500, max=5000)]
self._write(cmd, duration, frequency)
+ @property
+ def date(self):
+ month, day, year = self._query(('DATE?', [Integer, Integer, Integer]))
+ return datetime.date(2000 + year, month, day)
+
+ @date.setter
+ def date(self, date):
+ # The ppms only accepts the last two digits of the year.
+ month, date, year = date.month, date.day, date.year % 100
+ self._write(('DATE', [Integer, Integer, Integer]), month, date, year)
+
def move(self, position, slowdown=0):
"""Move to the specified sample position. | Implemented `Date` commands. | p3trus_slave | train |
8747eb3133984978bd1a269ceeba8cad2240462e | diff --git a/bin/bootstrap.php b/bin/bootstrap.php
index <HASH>..<HASH> 100644
--- a/bin/bootstrap.php
+++ b/bin/bootstrap.php
@@ -17,7 +17,7 @@ define('DB_DELIMITER', 'SΜ');
define('SMPROXY_VERSION', IN_PHAR ? '@phar-version@' : absorb_version_from_git());
// Set global error handler
-set_error_handler('_error_handler', E_ALL | E_STRICT);
+// set_error_handler('_error_handler', E_ALL | E_STRICT);
// Check requirements - PHP
if (version_compare(PHP_VERSION, '7.0', '<')) { | Comment set_error_handler, rewrite error handling lately | louislivi_SMProxy | train |
c58d1a5b794417c283a6fd058a9f854d415c0674 | diff --git a/services/ElixirService.php b/services/ElixirService.php
index <HASH>..<HASH> 100755
--- a/services/ElixirService.php
+++ b/services/ElixirService.php
@@ -77,8 +77,8 @@ class ElixirService extends BaseApplicationComponent
*/
protected function readManifestFile()
{
- $manifest = file_get_contents(CRAFT_BASE_PATH . $this->publicPath . '/' . $this->buildPath . '/rev-manifest.json');
+ $manifest = file_get_contents(CRAFT_BASE_PATH . '../' . $this->publicPath . '/' . $this->buildPath . '/rev-manifest.json');
return json_decode($manifest, true);
}
-}
\ No newline at end of file
+} | Fixing manifest file path.
According to the docs and by default, CRAFT_BASE_PATH is the /craft folder. We need to go back a directory to get to the project root from the Craft folder. | venveo_craft3-mix | train |
eafc155db4ad7ce9e9b1e2968424fa5947223feb | diff --git a/tools/serviced-service/validate.go b/tools/serviced-service/validate.go
index <HASH>..<HASH> 100644
--- a/tools/serviced-service/validate.go
+++ b/tools/serviced-service/validate.go
@@ -148,7 +148,7 @@ func (mc *MigrationContext) validateName(name string, parentID string) error {
return err
}
if existing != nil {
- path, err := mc.services.GetServicePath(name)
+ path, err := mc.services.GetServicePath(existing.ID)
if err != nil {
path = fmt.Sprintf("%v", err)
} | GetServicePath takes the service ID, not name. | control-center_serviced | train |
4a93c8b30e900a79e84c02b6226e6cc7dba723ca | diff --git a/src/sagemaker/huggingface/estimator.py b/src/sagemaker/huggingface/estimator.py
index <HASH>..<HASH> 100644
--- a/src/sagemaker/huggingface/estimator.py
+++ b/src/sagemaker/huggingface/estimator.py
@@ -50,14 +50,15 @@ class HuggingFace(Framework):
compiler_config=None,
**kwargs,
):
- """This ``Estimator`` executes a HuggingFace script in a managed execution environment.
+ """This estimator runs a Hugging Face training script in a SageMaker training environment.
- The managed HuggingFace environment is an Amazon-built Docker container that executes
- functions defined in the supplied ``entry_point`` Python script within a SageMaker
- Training Job.
+ The estimator initiates the SageMaker-managed Hugging Face environment
+ by using the pre-built Hugging Face Docker container and runs
+ the Hugging Face training script that user provides through
+ the ``entry_point`` argument.
- Training is started by calling
- :meth:`~sagemaker.amazon.estimator.Framework.fit` on this Estimator.
+ After configuring the estimator class, use the class method
+ :meth:`~sagemaker.amazon.estimator.Framework.fit()` to start a training job.
Args:
py_version (str): Python version you want to use for executing your model training
diff --git a/src/sagemaker/training_compiler/config.py b/src/sagemaker/training_compiler/config.py
index <HASH>..<HASH> 100644
--- a/src/sagemaker/training_compiler/config.py
+++ b/src/sagemaker/training_compiler/config.py
@@ -18,11 +18,7 @@ logger = logging.getLogger(__name__)
class TrainingCompilerConfig(object):
- """The configuration class for accelerating SageMaker training jobs through compilation.
-
- SageMaker Training Compiler speeds up training by optimizing the model execution graph.
-
- """
+ """The SageMaker Training Compiler configuration class."""
DEBUG_PATH = "/opt/ml/output/data/compiler/"
SUPPORTED_INSTANCE_CLASS_PREFIXES = ["p3", "g4dn", "p4"]
@@ -37,9 +33,15 @@ class TrainingCompilerConfig(object):
):
"""This class initializes a ``TrainingCompilerConfig`` instance.
- Pass the output of it to the ``compiler_config``
+ `Amazon SageMaker Training Compiler
+ <https://docs.aws.amazon.com/sagemaker/latest/dg/training-compiler.html>`_
+ is a feature of SageMaker Training
+ and speeds up training jobs by optimizing model execution graphs.
+
+ You can compile Hugging Face models
+ by passing the object of this configuration class to the ``compiler_config``
parameter of the :class:`~sagemaker.huggingface.HuggingFace`
- class.
+ estimator.
Args:
enabled (bool): Optional. Switch to enable SageMaker Training Compiler.
@@ -48,13 +50,28 @@ class TrainingCompilerConfig(object):
This comes with a potential performance slowdown.
The default is ``False``.
- **Example**: The following example shows the basic ``compiler_config``
- parameter configuration, enabling compilation with default parameter values.
+ **Example**: The following code shows the basic usage of the
+ :class:`sagemaker.huggingface.TrainingCompilerConfig()` class
+ to run a HuggingFace training job with the compiler.
.. code-block:: python
- from sagemaker.huggingface import TrainingCompilerConfig
- compiler_config = TrainingCompilerConfig()
+ from sagemaker.huggingface import HuggingFace, TrainingCompilerConfig
+
+ huggingface_estimator=HuggingFace(
+ ...
+ compiler_config=TrainingCompilerConfig()
+ )
+
+ .. seealso::
+
+ For more information about how to enable SageMaker Training Compiler
+ for various training settings such as using TensorFlow-based models,
+ PyTorch-based models, and distributed training,
+ see `Enable SageMaker Training Compiler
+ <https://docs.aws.amazon.com/sagemaker/latest/dg/training-compiler-enable.html>`_
+ in the `Amazon SageMaker Training Compiler developer guide
+ <https://docs.aws.amazon.com/sagemaker/latest/dg/training-compiler.html>`_.
""" | documentation: update sagemaker training compiler docstring (#<I>) | aws_sagemaker-python-sdk | train |
5483e3c712a3916860b203c2a46762f2d15e88b2 | diff --git a/src/Command/DumpEnvCommand.php b/src/Command/DumpEnvCommand.php
index <HASH>..<HASH> 100644
--- a/src/Command/DumpEnvCommand.php
+++ b/src/Command/DumpEnvCommand.php
@@ -45,7 +45,7 @@ class DumpEnvCommand extends BaseCommand
;
}
- protected function execute(InputInterface $input, OutputInterface $output)
+ protected function execute(InputInterface $input, OutputInterface $output): int
{
$_SERVER['APP_ENV'] = $env = $input->getArgument('env');
$path = $this->options->get('root-dir').'/.env';
@@ -63,6 +63,8 @@ EOF;
file_put_contents($path.'.local.php', $vars, LOCK_EX);
$this->getIO()->writeError('Successfully dumped .env files in <info>.env.local.php</>');
+
+ return 0;
}
private function loadEnv(string $path, string $env): array
diff --git a/src/Command/GenerateIdCommand.php b/src/Command/GenerateIdCommand.php
index <HASH>..<HASH> 100644
--- a/src/Command/GenerateIdCommand.php
+++ b/src/Command/GenerateIdCommand.php
@@ -33,8 +33,10 @@ class GenerateIdCommand extends Command
;
}
- protected function execute(InputInterface $input, OutputInterface $output)
+ protected function execute(InputInterface $input, OutputInterface $output): int
{
$this->flex->generateFlexId();
+
+ return 0;
}
}
diff --git a/src/Command/SyncRecipesCommand.php b/src/Command/SyncRecipesCommand.php
index <HASH>..<HASH> 100644
--- a/src/Command/SyncRecipesCommand.php
+++ b/src/Command/SyncRecipesCommand.php
@@ -43,7 +43,7 @@ class SyncRecipesCommand extends BaseCommand
;
}
- protected function execute(InputInterface $input, OutputInterface $output)
+ protected function execute(InputInterface $input, OutputInterface $output): int
{
$win = '\\' === \DIRECTORY_SEPARATOR;
$force = $input->getOption('force');
@@ -70,7 +70,7 @@ class SyncRecipesCommand extends BaseCommand
}
if (!$packages) {
- return;
+ return 0;
}
$composer = $this->getComposer();
@@ -136,5 +136,7 @@ class SyncRecipesCommand extends BaseCommand
$io->write($output);
}
+
+ return 0;
}
}
diff --git a/src/Command/UnpackCommand.php b/src/Command/UnpackCommand.php
index <HASH>..<HASH> 100644
--- a/src/Command/UnpackCommand.php
+++ b/src/Command/UnpackCommand.php
@@ -86,7 +86,7 @@ class UnpackCommand extends BaseCommand
if (!$result->getUnpacked()) {
$io->writeError('<info>Nothing to unpack</>');
- return;
+ return 1;
}
foreach ($result->getUnpacked() as $pkg) { | non-int value in the execute command will be deprecated | symfony_flex | train |
e298b24de43f34f19b46959f09f8fb4a73519a56 | diff --git a/cufflinks/tools.py b/cufflinks/tools.py
index <HASH>..<HASH> 100644
--- a/cufflinks/tools.py
+++ b/cufflinks/tools.py
@@ -853,7 +853,7 @@ def get_subplots(rows=1,cols=1,
if not theme:
theme = auth.get_config_file()['theme']
- layout= base_layout if base_layout else getLayout(theme)
+ layout= base_layout if base_layout else getLayout(theme,**check_kwargs(kwargs,__LAYOUT_AXIS))
sp=py.plotly.tools.make_subplots(rows=rows,cols=cols,shared_xaxes=shared_xaxes,
shared_yaxes=shared_yaxes,print_grid=False,
start_cell=start_cell,**kwargs)
@@ -861,6 +861,16 @@ def get_subplots(rows=1,cols=1,
if not isinstance(v,go.XAxis) and not isinstance(v,go.YAxis):
sp['layout'].update({k:v})
+ def update_axis(fig,layout):
+ for axis, n in list(fig.axis['len'].items()):
+ for _ in range(1,n+1):
+ for k,v in list(layout['{0}axis1'.format(axis)].items()):
+ if k not in fig.layout['{0}axis{1}'.format(axis,_)]:
+ fig.layout['{0}axis{1}'.format(axis,_)][k]=v
+
+ update_axis(sp,layout)
+ # 124 - zeroline on the first figure
+
# if 'subplot_titles' in kwargs:
# if 'annotations' in layout:
# annotation=sp['layout']['annotations'][0]
@@ -869,15 +879,18 @@ def get_subplots(rows=1,cols=1,
# for ann in sp['layout']['annotations']:
# ann.update(font=dict(color=annotation['font']['color']))
- def update_items(sp_item,layout,axis):
- for k,v in list(layout[axis].items()):
- sp_item.update({k:v})
+ # def update_items(sp_item,layout,axis):
+ # for k,v in list(layout[axis].items()):
+ # sp_item.update({k:v})
+
+ # for k,v in list(sp['layout'].items()):
+ # if isinstance(v,go.XAxis):
+ # update_items(v,layout,'xaxis1')
+ # elif isinstance(v,go.YAxis):
+ # update_items(v,layout,'xaxis1')
+
+
- for k,v in list(sp['layout'].items()):
- if isinstance(v,go.XAxis):
- update_items(v,layout,'xaxis1')
- elif isinstance(v,go.YAxis):
- update_items(v,layout,'xaxis1')
return sp | Subplots - support for zeroline. Fixes #<I> | santosjorge_cufflinks | train |
73eefbbf8da3a606195d7d0179c84c4db588552e | diff --git a/tools/c7n_sphinxext/c7n_sphinxext/docgen.py b/tools/c7n_sphinxext/c7n_sphinxext/docgen.py
index <HASH>..<HASH> 100644
--- a/tools/c7n_sphinxext/c7n_sphinxext/docgen.py
+++ b/tools/c7n_sphinxext/c7n_sphinxext/docgen.py
@@ -162,7 +162,8 @@ def main(provider, output_dir, group_by):
def resource_file_name(output_dir, r):
- return os.path.join(output_dir, "%s.rst" % r.type).replace(' ', '-').lower()
+ return os.path.join(
+ output_dir, ("%s.rst" % r.type).replace(' ', '-').lower())
def _main(provider, output_dir, group_by):
@@ -200,13 +201,15 @@ def _main(provider, output_dir, group_by):
# Create files for all groups
for key, group in sorted(groups.items()):
group = sorted(group, key=operator.attrgetter('type'))
- rpath = os.path.join(output_dir, "group-%s.rst" % key).replace(' ', '-').lower()
+ rpath = os.path.join(
+ output_dir, ("group-%s.rst" % key).replace(' ', '-').lower())
with open(rpath, 'w') as fh:
t = env.get_template('provider-group.rst')
fh.write(t.render(
provider_name=provider,
key=key,
- resource_files=[os.path.basename(resource_file_name(output_dir, r)) for r in group],
+ resource_files=[os.path.basename(
+ resource_file_name(output_dir, r)) for r in group],
resources=group))
files.append(os.path.basename(rpath))
@@ -225,7 +228,8 @@ def _main(provider, output_dir, group_by):
common_actions[ElementSchema.name(a)] = (a, r)
fpath = os.path.join(
- output_dir, "%s-common-filters.rst" % provider_class.type.lower())
+ output_dir,
+ ("%s-common-filters.rst" % provider_class.type.lower()))
with open(fpath, 'w') as fh:
t = env.get_template('provider-common-elements.rst')
fh.write(t.render(
@@ -235,7 +239,8 @@ def _main(provider, output_dir, group_by):
files.insert(0, os.path.basename(fpath))
fpath = os.path.join(
- output_dir, "%s-common-actions.rst" % provider_class.type.lower())
+ output_dir,
+ ("%s-common-actions.rst" % provider_class.type.lower()))
with open(fpath, 'w') as fh:
t = env.get_template('provider-common-elements.rst')
fh.write(t.render( | docs - reference docbuilder fix output path normalization (#<I>) | cloud-custodian_cloud-custodian | train |
ec1e1028adc2da87648247375a1a2d7416a56e36 | diff --git a/binstar_client/__init__.py b/binstar_client/__init__.py
index <HASH>..<HASH> 100644
--- a/binstar_client/__init__.py
+++ b/binstar_client/__init__.py
@@ -42,7 +42,8 @@ class Binstar(PublishMixin, CollectionsMixin, OrgMixin, ChannelsMixin, PackageMi
self.token = token
if token:
- self._session.headers.update({'Authorization': 'token %s' % (token)})
+ self._session.headers.update({'Authorization': 'token %s' % (token),
+ 'User-Agent': 'Binstar/%s (+https://binstar.org)' % __version__})
self.domain = domain | ENH: added binstar cli user agent to requests | Anaconda-Platform_anaconda-client | train |
84598398d6bf6c32bd9302f26ca6f906488a36d5 | diff --git a/assets/angular-gantt-labels-plugin.js b/assets/angular-gantt-labels-plugin.js
index <HASH>..<HASH> 100644
--- a/assets/angular-gantt-labels-plugin.js
+++ b/assets/angular-gantt-labels-plugin.js
@@ -79,7 +79,7 @@ Github: https://github.com/angular-gantt/angular-gantt.git
builder.controller = function($scope) {
var hScrollBarHeight = layout.getScrollBarHeight();
- $scope.getScrollableCss = function() {
+ $scope.getLabelsCss = function() {
var css = {};
if ($scope.maxHeight) {
diff --git a/assets/angular-gantt-plugins.js b/assets/angular-gantt-plugins.js
index <HASH>..<HASH> 100644
--- a/assets/angular-gantt-plugins.js
+++ b/assets/angular-gantt-plugins.js
@@ -874,7 +874,7 @@ Github: https://github.com/angular-gantt/angular-gantt.git
builder.controller = function($scope) {
var hScrollBarHeight = layout.getScrollBarHeight();
- $scope.getScrollableCss = function() {
+ $scope.getLabelsCss = function() {
var css = {};
if ($scope.maxHeight) {
@@ -1194,7 +1194,7 @@ angular.module('gantt.drawtask.templates', []).run(['$templateCache', function($
angular.module('gantt.labels.templates', []).run(['$templateCache', function($templateCache) {
$templateCache.put('plugins/labels/labelsBody.tmpl.html',
'<div class="gantt-labels-body"\n' +
- ' ng-style="getScrollableCss()">\n' +
+ ' ng-style="getLabelsCss()">\n' +
' <div gantt-vertical-scroll-receiver>\n' +
' <div ng-repeat="row in gantt.rowsManager.visibleRows track by row.model.id">\n' +
' <gantt-row-label></gantt-row-label>\n' +
diff --git a/demo/dist/scripts/vendor.js b/demo/dist/scripts/vendor.js
index <HASH>..<HASH> 100644
--- a/demo/dist/scripts/vendor.js
+++ b/demo/dist/scripts/vendor.js
@@ -42049,7 +42049,7 @@ Github: https://github.com/angular-gantt/angular-gantt.git
builder.controller = function($scope) {
var hScrollBarHeight = layout.getScrollBarHeight();
- $scope.getScrollableCss = function() {
+ $scope.getLabelsCss = function() {
var css = {};
if ($scope.maxHeight) {
@@ -42369,7 +42369,7 @@ angular.module('gantt.drawtask.templates', []).run(['$templateCache', function($
angular.module('gantt.labels.templates', []).run(['$templateCache', function($templateCache) {
$templateCache.put('plugins/labels/labelsBody.tmpl.html',
'<div class="gantt-labels-body"\n' +
- ' ng-style="getScrollableCss()">\n' +
+ ' ng-style="getLabelsCss()">\n' +
' <div gantt-vertical-scroll-receiver>\n' +
' <div ng-repeat="row in gantt.rowsManager.visibleRows track by row.model.id">\n' +
' <gantt-row-label></gantt-row-label>\n' +
diff --git a/src/plugins/labels/labelsBody.directive.js b/src/plugins/labels/labelsBody.directive.js
index <HASH>..<HASH> 100644
--- a/src/plugins/labels/labelsBody.directive.js
+++ b/src/plugins/labels/labelsBody.directive.js
@@ -5,7 +5,7 @@
builder.controller = function($scope) {
var hScrollBarHeight = layout.getScrollBarHeight();
- $scope.getScrollableCss = function() {
+ $scope.getLabelsCss = function() {
var css = {};
if ($scope.maxHeight) {
diff --git a/src/plugins/labels/labelsBody.tmpl.html b/src/plugins/labels/labelsBody.tmpl.html
index <HASH>..<HASH> 100644
--- a/src/plugins/labels/labelsBody.tmpl.html
+++ b/src/plugins/labels/labelsBody.tmpl.html
@@ -1,5 +1,5 @@
<div class="gantt-labels-body"
- ng-style="getScrollableCss()">
+ ng-style="getLabelsCss()">
<div gantt-vertical-scroll-receiver>
<div ng-repeat="row in gantt.rowsManager.visibleRows track by row.model.id">
<gantt-row-label></gantt-row-label> | refac(labels): Renaming of css getter function | angular-gantt_angular-gantt | train |
4b2eef2b50c7a4f538e9cb8104dfec818e83d4ae | diff --git a/blocks/settings/block_settings.php b/blocks/settings/block_settings.php
index <HASH>..<HASH> 100644
--- a/blocks/settings/block_settings.php
+++ b/blocks/settings/block_settings.php
@@ -139,12 +139,4 @@ class block_settings extends block_base {
$this->contentgenerated = true;
return true;
}
-
- function html_attributes() {
- $attributes = parent::html_attributes();
- if (!empty($this->config->enablehoverexpansion) && $this->config->enablehoverexpansion == 'yes') {
- $attributes['class'] .= ' block_js_expansion';
- }
- return $attributes;
- }
}
diff --git a/blocks/settings/edit_form.php b/blocks/settings/edit_form.php
index <HASH>..<HASH> 100644
--- a/blocks/settings/edit_form.php
+++ b/blocks/settings/edit_form.php
@@ -37,13 +37,6 @@ class block_settings_edit_form extends block_edit_form {
$yesnooptions = array('yes'=>get_string('yes'), 'no'=>get_string('no'));
- $mform->addElement('select', 'config_enablehoverexpansion', get_string('enablehoverexpansion', $this->block->blockname), $yesnooptions);
- if (empty($this->block->config->enablehoverexpansion) || $this->block->config->enablehoverexpansion=='no') {
- $mform->getElement('config_enablehoverexpansion')->setSelected('no');
- } else {
- $mform->getElement('config_enablehoverexpansion')->setSelected('yes');
- }
-
$mform->addElement('select', 'config_enabledock', get_string('enabledock', $this->block->blockname), $yesnooptions);
if (empty($this->block->config->enabledock) || $this->block->config->enabledock=='yes') {
$mform->getElement('config_enabledock')->setSelected('yes');
diff --git a/blocks/settings/lang/en/block_settings.php b/blocks/settings/lang/en/block_settings.php
index <HASH>..<HASH> 100644
--- a/blocks/settings/lang/en/block_settings.php
+++ b/blocks/settings/lang/en/block_settings.php
@@ -24,6 +24,5 @@
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
-$string['enablehoverexpansion'] = 'Enable mouseover expansion of this block';
$string['enabledock'] = 'Allow the user to dock this block';
$string['pluginname'] = 'Settings'; | block-settings MDL-<I> Removed the expansion on hover settings which was deprecated and no longer supported | moodle_moodle | train |
e46506e7d1fc74198b8e75190d961a79494b17c7 | diff --git a/bosh_agent/lib/bosh_agent/infrastructure/openstack/registry.rb b/bosh_agent/lib/bosh_agent/infrastructure/openstack/registry.rb
index <HASH>..<HASH> 100644
--- a/bosh_agent/lib/bosh_agent/infrastructure/openstack/registry.rb
+++ b/bosh_agent/lib/bosh_agent/infrastructure/openstack/registry.rb
@@ -92,6 +92,7 @@ module Bosh::Agent
user_data["registry"].has_key?("endpoint")
raise("Cannot parse user data for endpoint #{user_data.inspect}")
end
+ Bosh::Agent::Config.logger.info("got user_data: #{user_data}")
lookup_registry(user_data)
end
@@ -105,7 +106,7 @@ module Bosh::Agent
# if we get data from an old director which doesn't set dns
# info, there is noting we can do, so just return the endpoint
- unless user_data.has_key?("dns")
+ if user_data["dns"].nil? || user_data["dns"]["nameserver"].nil?
return endpoint
end | [agent] guard against dns server list being nil | cloudfoundry_bosh | train |
f4a223296e577a0a46b8f7394a63ac0cf0b6677a | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,31 +1,46 @@
import os
-from setuptools import setup
+from sys import version_info
+from setuptools import setup, __version__ as tools_version
README_PATH = 'README.rst'
-longDesc = ""
+LONG_DESC = ''
if os.path.exists(README_PATH):
with open(README_PATH) as readme:
- longDesc = readme.read()
+ LONG_DESC = readme.read()
+
+INSTALL_REQUIRES = ['Pillow']
+EXTRAS_REQUIRE = {}
+
+if int(tools_version.split('.', 1)[0]) < 18:
+ if version_info[:2] < (3, 4):
+ INSTALL_REQUIRES.append('enum34')
+else:
+ EXTRAS_REQUIRE[':python_version<"3.4"'] = ['enum34']
setup(
- name = "pytesseract",
- version = "0.1.8",
- author = "Samuel Hoffstaetter",
- author_email="[email protected]",
- maintainer = "Matthias Lee",
- maintainer_email = "[email protected]",
- description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
- long_description = longDesc,
- license = "GPLv3",
- keywords = "python-tesseract OCR Python",
- url = "https://github.com/madmaze/python-tesseract",
+ name='pytesseract',
+ version='0.1.8',
+ author='Samuel Hoffstaetter',
+ author_email='[email protected]',
+ maintainer='Matthias Lee',
+ maintainer_email='[email protected]',
+ description=(
+ "Python-tesseract is a python wrapper for Google's Tesseract-OCR"
+ ),
+ long_description=LONG_DESC,
+ license='GPLv3',
+ keywords='python-tesseract OCR Python',
+ url='https://github.com/madmaze/python-tesseract',
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
- package_data = {'pytesseract': ['*.png','*.jpg']},
- install_requires = ['Pillow'],
- entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
- classifiers = [
+ package_data={'pytesseract': ['*.png', '*.jpg']},
+ install_requires=INSTALL_REQUIRES,
+ extras_require=EXTRAS_REQUIRE,
+ entry_points={
+ 'console_scripts': ['pytesseract = pytesseract.pytesseract:main']
+ },
+ classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3', | Add conditional enum dependency for python < <I> | madmaze_pytesseract | train |
2d9f791bef636f3e4912f82fcdcfdc1e76bd5dd5 | diff --git a/Classes/Interfaces/Time.php b/Classes/Interfaces/Time.php
index <HASH>..<HASH> 100644
--- a/Classes/Interfaces/Time.php
+++ b/Classes/Interfaces/Time.php
@@ -45,5 +45,5 @@ interface Time
*
* @var int
*/
- const SECONDS_PER_YEAR = 220752000;
+ const SECONDS_PER_YEAR = self::SECONDS_PER_DAY * 365;
} | [BUGFIX] Fix the seconds per year constant (#<I>)
Closes #<I> | oliverklee_ext-oelib | train |
5440439b195c0d2d30c8facf73d2e7b77bedf846 | diff --git a/go_agent/src/bosh/app/app.go b/go_agent/src/bosh/app/app.go
index <HASH>..<HASH> 100644
--- a/go_agent/src/bosh/app/app.go
+++ b/go_agent/src/bosh/app/app.go
@@ -131,7 +131,13 @@ func parseOptions(args []string) (opts options, err error) {
flagSet.SetOutput(ioutil.Discard)
flagSet.StringVar(&opts.InfrastructureName, "I", "", "Set Infrastructure")
flagSet.StringVar(&opts.PlatformName, "P", "", "Set Platform")
- flagSet.StringVar(&opts.BaseDirectory, "B", "/var/vcap", "Set Base Directory")
+ flagSet.StringVar(&opts.BaseDirectory, "b", "/var/vcap", "Set Base Directory")
+
+ // The following two options are accepted but ignored for compatibility with the old agent
+ var systemRoot string
+ flagSet.StringVar(&systemRoot, "r", "/", "system root (ignored by go agent)")
+ var noAlerts bool
+ flagSet.BoolVar(&noAlerts, "no-alerts", false, "don't process alerts (ignored by go agent)")
err = flagSet.Parse(args[1:])
return | accept but ignore system root and no-alerts flag
This is for compatability with how the ruby agent runs in tests | cloudfoundry_bosh | train |
75c31d0b99da71e46c8bd2334706b6c6016cc6e6 | diff --git a/src/packages/dom.js b/src/packages/dom.js
index <HASH>..<HASH> 100644
--- a/src/packages/dom.js
+++ b/src/packages/dom.js
@@ -42,7 +42,7 @@ Dom.remove = (el) => {
Dom.replaceWith = (el, referenceNode) => {
Dom.remove(referenceNode);
- el.outerHTML = referenceNode;
+ el.parentNode.replaceChild(referenceNode, el);
};
Dom.hide = (el) => { | Enable reinitialise. | madebymany_sir-trevor-js | train |
5b345784e95e85e7ab6781c956668b4bf8368c73 | diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -117,7 +117,7 @@ gulp.task('dev', ['dev:static', 'dev:applystyles', 'dev:generate'], function() {
gulp.watch('lib/app/sass/**/*.scss', function() {
runSequence('sass:no-fail', 'dev:applystyles', 'dev:generate');
});
- gulp.watch(['lib/app/js/**/*.js', '!lib/app/js/vendor/**/*.js'], function() {
+ gulp.watch(['lib/app/js/**/*.js', 'lib/app/views/**/*', 'lib/app/index.html', '!lib/app/js/vendor/**/*.js'], function() {
gulp.start('lint:js');
runSequence('js:app', 'dev:generate');
});
diff --git a/lib/app/js/directives/scopeUserStyles.js b/lib/app/js/directives/scopeUserStyles.js
index <HASH>..<HASH> 100644
--- a/lib/app/js/directives/scopeUserStyles.js
+++ b/lib/app/js/directives/scopeUserStyles.js
@@ -21,11 +21,14 @@ angular.module('sgApp')
return element[0].innerHTML;
}, function(newVal) {
root.innerHTML = style + newVal;
+ //host.innerHTML = '';
});
root.innerHTML = style + content;
+ //host.innerHTML = '';
} else {
host.innerHTML = content;
}
+ angular.bootstrap(host.firstChild);
}
};
});
diff --git a/lib/app/views/element-fullscreen.html b/lib/app/views/element-fullscreen.html
index <HASH>..<HASH> 100644
--- a/lib/app/views/element-fullscreen.html
+++ b/lib/app/views/element-fullscreen.html
@@ -1 +1,2 @@
+<h1>Here you are:</h1>
<div ng-bind-html="markup | addWrapper | unsafe" dynamic-compile sg-scope-user-styles></div>
diff --git a/lib/demo/testDirective.html b/lib/demo/testDirective.html
index <HASH>..<HASH> 100644
--- a/lib/demo/testDirective.html
+++ b/lib/demo/testDirective.html
@@ -1 +1 @@
-<p>Hello from test directive</p>
\ No newline at end of file
+<p ng-controller="sgAppTest" ng-click="alert()">Click here!</p>
\ No newline at end of file
diff --git a/lib/demo/testDirective.js b/lib/demo/testDirective.js
index <HASH>..<HASH> 100644
--- a/lib/demo/testDirective.js
+++ b/lib/demo/testDirective.js
@@ -3,7 +3,13 @@
// Test directive is used to demo lazy loading external directive in the test project
angular.module('sgAppTest', [])
+ .controller('sgAppTest', function($scope, $element) {
+ $scope.alert = function() {
+ $element[0].innerHTML = 'You have clicked!';
+ };
+ })
.directive('sgTestDirective', function() {
+ console.log(document.getElementById('socketDisconnection'));
return {
replace: true,
restrict: 'A', | Add click handler to demo Angular directive | SC5_sc5-styleguide | train |
5be8bface86e83624cc593f90d71a71278b28d53 | diff --git a/core/src/main/java/de/bwaldvogel/mongo/backend/AbstractMongoDatabase.java b/core/src/main/java/de/bwaldvogel/mongo/backend/AbstractMongoDatabase.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/de/bwaldvogel/mongo/backend/AbstractMongoDatabase.java
+++ b/core/src/main/java/de/bwaldvogel/mongo/backend/AbstractMongoDatabase.java
@@ -753,7 +753,7 @@ public abstract class AbstractMongoDatabase<P> implements MongoDatabase {
Index<P> index = openOrCreateIdIndex(collectionName, indexName, ascending);
log.info("adding unique _id index for collection {}", collectionName);
collection.addIndex(index);
- } else if (Utils.isTrue(indexDescription.get("unique"))) {
+ } else {
List<IndexKey> keys = new ArrayList<>();
for (Entry<String, Object> entry : key.entrySet()) {
String field = entry.getKey();
@@ -762,16 +762,27 @@ public abstract class AbstractMongoDatabase<P> implements MongoDatabase {
}
boolean sparse = Utils.isTrue(indexDescription.get("sparse"));
- log.info("adding {} unique index {} for collection {}", sparse ? "sparse" : "non-sparse", keys, collectionName);
+ final Index<P> index;
+ if (Utils.isTrue(indexDescription.get("unique"))) {
+ log.info("adding {} unique index {} for collection {}", sparse ? "sparse" : "non-sparse", keys, collectionName);
- Index<P> index = openOrCreateUniqueIndex(collectionName, indexName, keys, sparse);
- collection.addIndex(index);
- } else {
- // TODO: non-unique non-id indexes not yet implemented
- log.warn("adding non-unique non-id index with key {} is not yet implemented", key);
+ index = openOrCreateUniqueIndex(collectionName, indexName, keys, sparse);
+ } else {
+ index = openOrCreateSecondaryIndex(collectionName, indexName, keys, sparse);
+ }
+
+ if (index != null) {
+ collection.addIndex(index);
+ }
}
}
+ @VisibleForExternalBackends
+ protected Index<P> openOrCreateSecondaryIndex(String collectionName, String indexName, List<IndexKey> keys, boolean sparse) {
+ log.warn("adding secondary index with keys {} is not yet implemented. ignoring", keys);
+ return null;
+ }
+
private static boolean isAscending(Object keyValue) {
return Objects.equals(Utils.normalizeValue(keyValue), Double.valueOf(1.0));
} | Allow external backends to implement secondary indices | bwaldvogel_mongo-java-server | train |
274ef668625ae1b9333d44619c3d3bb6b71bcdd4 | diff --git a/bundle.js b/bundle.js
index <HASH>..<HASH> 100644
--- a/bundle.js
+++ b/bundle.js
@@ -2996,6 +2996,14 @@ exports.transform = function(ast) {
return types.traverse(ast, visitNode);
};
+// Makes a unique context identifier. This is needed to handle retrieval of
+// tempvars from contexts up the scope in nested generator situation.
+// see issue #70
+var nextCtxId = 0;
+function makeContextId() {
+ return b.identifier("$ctx" + nextCtxId++);
+}
+
function visitNode(node) {
if (!n.Function.check(node) || !node.generator) {
// Note that because we are not returning false here the traversal
@@ -3014,7 +3022,7 @@ function visitNode(node) {
}
// TODO Ensure these identifiers are named uniquely.
- var contextId = b.identifier("$ctx");
+ var contextId = makeContextId();
var functionId = node.id ? b.identifier(node.id.name + "$") : null/*Anonymous*/;
var argsId = b.identifier("$args");
var wrapGeneratorId = b.identifier("wrapGenerator");
@@ -4114,6 +4122,15 @@ NPp.needsParens = function() {
|| n.Property.check(parent)
|| n.ConditionalExpression.check(parent);
+ if (n.YieldExpression.check(node))
+ return isBinary(parent)
+ || n.CallExpression.check(parent)
+ || n.MemberExpression.check(parent)
+ || n.NewExpression.check(parent)
+ || n.ConditionalExpression.check(parent)
+ || n.UnaryExpression.check(parent)
+ || n.YieldExpression.check(parent);
+
if (n.NewExpression.check(parent) &&
this.name === "callee") {
assert.strictEqual(parent.callee, node);
@@ -4163,9 +4180,6 @@ NPp.needsParens = function() {
this.firstInStatement())
return true;
- if (n.YieldExpression.check(node))
- return isBinary(parent);
-
return false;
};
@@ -4341,7 +4355,12 @@ var Sp = Scope.prototype;
Sp.didScan = false;
Sp.declares = function(name) {
- if (!this.didScan) {
+ this.scan();
+ return hasOwn.call(this.bindings, name);
+};
+
+Sp.scan = function(force) {
+ if (force || !this.didScan) {
for (var name in this.bindings) {
// Empty out this.bindings, just in cases.
delete this.bindings[name];
@@ -4349,8 +4368,11 @@ Sp.declares = function(name) {
scanScope(this.path, this.bindings);
this.didScan = true;
}
+};
- return hasOwn.call(this.bindings, name);
+Sp.getBindings = function () {
+ this.scan();
+ return this.bindings;
};
function scanScope(path, bindings) { | Regenerate bundle.js for .needsParens and context naming improvements. | facebook_regenerator | train |
690cbce373c9a33b853a3d019521e5e1116eff0c | diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -116,6 +116,9 @@ class UrlSlug {
} = parseOptions(options)
const fragments = unidecode(String(string)).match(NORMALIZE)
+ if (!fragments) {
+ return ''
+ }
return transformer
? transformer(fragments, separator)
@@ -141,6 +144,10 @@ class UrlSlug {
fragments = slug.match(REVERT_UNKNOWN)
}
+ if (!fragments) {
+ return ''
+ }
+
return transformer ? transformer(fragments, ' ') : fragments.join(' ')
}
diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -172,6 +172,15 @@ describe('module', () => {
.to.be.equal('Rct')
})
+ it('should handle empty strings', () => {
+ expect(instance.convert(''))
+ .to.be.equal('')
+ })
+
+ it('should handle strings with no alphanumeric characters', () => {
+ expect(instance.convert('- ( ) [ ]'))
+ .to.be.equal('')
+ })
})
describe('revert', () => {
@@ -199,6 +208,11 @@ describe('module', () => {
.to.be.equal('Comfortably Numb')
})
+ it('should empty strings revert to another empty string', () => {
+ expect(instance.revert(''))
+ .to.be.equal('')
+ })
+
})
}) | Properly handle empty strings (#6)
* properly handle empty strings
changed convert() to return an empty string if there are no fragments.
added test case for empty strings
added test case for a string with only symbols
* fix reverting empty strings & add a test case for it | stldo_url-slug | train |
5c860ec0488b9dfe3e7420daf233505c5b1f3a8d | diff --git a/salt/modules/parted.py b/salt/modules/parted.py
index <HASH>..<HASH> 100644
--- a/salt/modules/parted.py
+++ b/salt/modules/parted.py
@@ -145,14 +145,12 @@ def list_(device, unit=None):
'physical sector': cols[4],
'partition table': cols[5],
'model': cols[6]}
- try:
+ if len(cols) == 8:
ret['info']['disk flags'] = cols[7]
- except IndexError:
# Older parted (2.x) doesn't show disk flags in the 'print'
# output, and will return a 7-column output for the info
# line. In these cases we just leave this field out of the
# return dict.
- pass
mode = 'partitions'
else:
ret['partitions'][cols[0]] = { | Remove use of try/except for flow control
If the intent is to only extract the 7th column if it exists then it
makes more sense to actually test for the number of columns rather than
catch an IndexError which may occur at any of the 3 list index
operations within the try block. | saltstack_salt | train |
caa6577e2c8407a59645ca3bdfedd552eef41335 | diff --git a/Gemfile.lock b/Gemfile.lock
index <HASH>..<HASH> 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
- paginas (0.1.1)
+ paginas (0.1.3)
rails (~> 4.2.0)
tinymce-rails
diff --git a/app/controllers/paginas/pages_controller.rb b/app/controllers/paginas/pages_controller.rb
index <HASH>..<HASH> 100755
--- a/app/controllers/paginas/pages_controller.rb
+++ b/app/controllers/paginas/pages_controller.rb
@@ -2,23 +2,20 @@ require_dependency "paginas/application_controller"
module Paginas
class PagesController < ApplicationController
- before_action :set_page, only: [:show, :edit, :update, :destroy]
+ before_action :set_page, only: [:display,:show, :edit, :update, :destroy]
def index
- if params[:featured]
- @pages = Page.where(featured: true)
- render :featured
- else
- @pages = Page.all
- end
+ @pages = Page.all
+ end
+ def featured
+ @pages = Page.where(featured: true)
+ render :featured
end
def show
- if params[:display_only]
- render :display
- end
end
-
+ def display
+ end
def new
@page = Page.new
end
diff --git a/config/routes.rb b/config/routes.rb
index <HASH>..<HASH> 100755
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -1,6 +1,6 @@
Paginas::Engine.routes.draw do
resources :pages
- get 'featured', to: 'pages#index', featured: true
- get 'display/:id', to: 'pages#show', display_only: true
+ get 'featured', to: 'pages#featured'
+ get 'display/:id', to: 'pages#display', as: "display"
root "pages#index"
end
diff --git a/lib/paginas/version.rb b/lib/paginas/version.rb
index <HASH>..<HASH> 100755
--- a/lib/paginas/version.rb
+++ b/lib/paginas/version.rb
@@ -1,3 +1,3 @@
module Paginas
- VERSION = "0.1.2"
+ VERSION = "0.1.3"
end | Added featured and display actions to the controller to remove logic from index and show. Changed routes to | CarlosRoque_paginas | train |
59f5ed2ee55b2cc3d157acdfeed0e3703c5fa9dd | diff --git a/lib/catissue/database/controlled_values.rb b/lib/catissue/database/controlled_values.rb
index <HASH>..<HASH> 100644
--- a/lib/catissue/database/controlled_values.rb
+++ b/lib/catissue/database/controlled_values.rb
@@ -11,20 +11,8 @@ require 'caruby/domain/properties'
module CaTissue
# This ControlledValues class loads caTissue permissible values from the database.
- # Use of this class requires the +dbi+ gem and the following caTissue database access
- # properties are defined in the
- # home directory .catissue.yaml file:
- # * :database_host - the database host
- # * :database - the database name
- # * :database_user - the database username (not the caTissue login name)
- # * :database_password - the database password (not the caTissue login password)
- #
- # The default :database_host is the application :host property value, which in turn
- # defaults to +localhost+.
- #
- # The optional :database_port property overrides the default MySQL port.
- #
- # ControlledValues is an auxiliary utility class and is not used by the CaTissue Ruby API.
+ # Use of this class requires the +dbi+ gem. See {CaRuby::SQLExecutor#initialize}
+ # for a description of the database access properties.
class ControlledValues
include Singleton | Refer to SQLExecutor for option descriptions. | caruby_tissue | train |
999e760b12c6c76ae0c91094bd80a4748e4d5a12 | diff --git a/entry.py b/entry.py
index <HASH>..<HASH> 100755
--- a/entry.py
+++ b/entry.py
@@ -6,6 +6,7 @@ class Entry:
self.hours = hours
self.description = description
self.date = date
+ self.pushed = False
if project_name in settings.projects:
self.project_id = settings.projects[project_name][0]
diff --git a/parser.py b/parser.py
index <HASH>..<HASH> 100755
--- a/parser.py
+++ b/parser.py
@@ -17,6 +17,7 @@ class Parser:
for line in file:
line_number += 1
+ self.lines[line_number] = {'text': line, 'entry': None}
self.process_line(line, line_number)
file.close()
@@ -24,6 +25,7 @@ class Parser:
def __init__(self, file):
self.file = file
self.entries = {}
+ self.lines = {}
class TaxiParser(Parser):
def process_date(self, date_matches):
@@ -70,4 +72,19 @@ class TaxiParser(Parser):
if not self.date in self.entries:
self.entries[self.date] = []
- self.entries[self.date].append(Entry(self.date, splitted_line[0], total_hours, splitted_line[2]))
+ new_entry = Entry(self.date, splitted_line[0], total_hours, splitted_line[2])
+ self.entries[self.date].append(new_entry)
+ self.lines[line_number]['entry'] = new_entry
+
+ def update_file(self):
+ file = open(self.file, 'w')
+
+ for line in self.lines.itervalues():
+ text = line['text']
+
+ if line['entry'] is not None and line['entry'].pushed:
+ text = '# %s' % text
+
+ file.write(text)
+
+ file.close()
diff --git a/pusher.py b/pusher.py
index <HASH>..<HASH> 100755
--- a/pusher.py
+++ b/pusher.py
@@ -58,7 +58,9 @@ class Pusher:
response = self._request(post_url, parameters)
response_body = response.read()
- print response_body
+ entry.pushed = True
+
+ #print response_body
def push(self, entries):
if not self._login():
diff --git a/taxi.py b/taxi.py
index <HASH>..<HASH> 100755
--- a/taxi.py
+++ b/taxi.py
@@ -37,6 +37,7 @@ def commit(parser):
)
pusher.push(parser.entries)
+ parser.update_file()
def main():
usage = "usage: %prog [options] action"
diff --git a/zebra.sample b/zebra.sample
index <HASH>..<HASH> 100755
--- a/zebra.sample
+++ b/zebra.sample
@@ -1,15 +1,15 @@
02/04/2011
-liip_internal 09:00-10:00 Fix from yesterday didn't work, repaired coffee machine # again
+# liip_internal 09:00-10:00 Fix from yesterday didn't work, repaired coffee machine # again
#liip_meeting 10:00-10:45 Slashdotting
#liip_sick 0.25 Kernel recompilation after kernel panic
-liip_internal 14:00-16:30 Some more coffee needed because of the kernel panic
+# liip_internal 14:00-16:30 Some more coffee needed because of the kernel panic
unknown? 14:00-16:30 Some more coffee needed because of the kernel panic
01/04/2011
-liip_internal 09:00-14:30 Started the day with a coffee
+# liip_internal 09:00-14:30 Started the day with a coffee
#liip_meeting 15:00-16:00 Slept a little
-liip_internal 16:00-16:45 Team meeting
+# liip_internal 16:00-16:45 Team meeting
#liip_meeting 16:45-17:00 Some new coffee
-liip_internal 17:00-17:30 Fixed the coffee machine for tomorrow
+# liip_internal 17:00-17:30 Fixed the coffee machine for tomorrow | Added zebra file commenting after pushing | liip_taxi | train |
79af476f7fb8301a683ee1ec2e872f64a0b69722 | diff --git a/limpyd_jobs/workers.py b/limpyd_jobs/workers.py
index <HASH>..<HASH> 100644
--- a/limpyd_jobs/workers.py
+++ b/limpyd_jobs/workers.py
@@ -781,7 +781,10 @@ class WorkerConfig(object):
print "The worker will run with the following options:"
for name in self.options.worker_class.parameters:
option = getattr(self.worker, name)
- if isinstance(option, (list, tuple, set)):
+ if name == 'callback' and \
+ self.options.worker_class.execute == Worker.execute:
+ option = '<jobs "run" method>'
+ elif isinstance(option, (list, tuple, set)):
option = ','.join(option)
print " - %s = %s" % (name.replace('_', '-'), option) | Update print_options "callback" entry if default one (which call jobs run method) | limpyd_redis-limpyd-jobs | train |
a80d66e42bb6cb6b5c61f9bbab9fac3c7cb86dad | diff --git a/src/main/java/com/sebastian_daschner/jaxrs_analyzer/analysis/javadoc/JavaDocAnalyzer.java b/src/main/java/com/sebastian_daschner/jaxrs_analyzer/analysis/javadoc/JavaDocAnalyzer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/sebastian_daschner/jaxrs_analyzer/analysis/javadoc/JavaDocAnalyzer.java
+++ b/src/main/java/com/sebastian_daschner/jaxrs_analyzer/analysis/javadoc/JavaDocAnalyzer.java
@@ -7,6 +7,7 @@ import com.sebastian_daschner.jaxrs_analyzer.model.results.MethodResult;
import com.sun.javadoc.ClassDoc;
import com.sun.javadoc.MethodDoc;
+import java.io.File;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.nio.file.Paths;
@@ -60,7 +61,7 @@ public class JavaDocAnalyzer {
}
private String joinPaths(final Set<Path> projectSourcePaths) {
- return projectSourcePaths.stream().map(Path::toString).collect(Collectors.joining(":"));
+ return projectSourcePaths.stream().map(Path::toString).collect(Collectors.joining(File.pathSeparator));
}
private void combineResults(final Set<ClassResult> classResults) {
diff --git a/src/test/java/com/sebastian_daschner/jaxrs_analyzer/analysis/ProjectAnalyzerTest.java b/src/test/java/com/sebastian_daschner/jaxrs_analyzer/analysis/ProjectAnalyzerTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/sebastian_daschner/jaxrs_analyzer/analysis/ProjectAnalyzerTest.java
+++ b/src/test/java/com/sebastian_daschner/jaxrs_analyzer/analysis/ProjectAnalyzerTest.java
@@ -61,7 +61,7 @@ public class ProjectAnalyzerTest {
path = Paths.get(testClassPath).toAbsolutePath();
- final Set<Path> classPaths = Stream.of(System.getProperty("java.class.path").split(":"))
+ final Set<Path> classPaths = Stream.of(System.getProperty("java.class.path").split(File.pathSeparator))
.map(Paths::get)
.collect(Collectors.toSet()); | replace fixed path separator by File.pathSeparator
This fixes #<I> Javadoc analysis fails on Windows | sdaschner_jaxrs-analyzer | train |
6f3cd118a7a60cacf043a561df97e8e436f6eb18 | diff --git a/openid/tools/oiddiag.py b/openid/tools/oiddiag.py
index <HASH>..<HASH> 100644
--- a/openid/tools/oiddiag.py
+++ b/openid/tools/oiddiag.py
@@ -252,6 +252,15 @@ class Thing(ApacheView):
def associate(self, auth_request):
self.statusMsg("Associating with %s..." % (auth_request.server_url,))
+ consu = self.getConsumer()
+ dh = DiffieHellman()
+ body = consu._createAssociateRequest(dh)
+ assoc = consu._fetchAssociation(dh, auth_request.server_url, body)
+ self.record(Event("Association made. "
+ "Handle: %s, issued: %s, lifetime: %s hours" % (
+ assoc.handle, time.ctime(assoc.issued), assoc.lifetime / 3600.,)))
+
+
def getConsumer(self):
if self.consumer is None:
# Super-Bogosity! | [project @ oiddiag: make association request] | necaris_python3-openid | train |
e8cad4324b34fb7463fdf296084e1bee48b488ef | diff --git a/aws/resource_aws_emr_instance_fleet_test.go b/aws/resource_aws_emr_instance_fleet_test.go
index <HASH>..<HASH> 100644
--- a/aws/resource_aws_emr_instance_fleet_test.go
+++ b/aws/resource_aws_emr_instance_fleet_test.go
@@ -19,6 +19,7 @@ func TestAccAWSEMRInstanceFleet_basic(t *testing.T) {
resourceName := "aws_emr_instance_fleet.task"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
+ ErrorCheck: testAccErrorCheck(t, emr.EndpointsID),
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSEmrInstanceFleetDestroy,
Steps: []resource.TestStep{
@@ -46,6 +47,7 @@ func TestAccAWSEMRInstanceFleet_zero_count(t *testing.T) {
resourceName := "aws_emr_instance_fleet.task"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
+ ErrorCheck: testAccErrorCheck(t, emr.EndpointsID),
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSEmrInstanceFleetDestroy,
Steps: []resource.TestStep{
@@ -81,6 +83,7 @@ func TestAccAWSEMRInstanceFleet_ebsBasic(t *testing.T) {
resourceName := "aws_emr_instance_fleet.task"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
+ ErrorCheck: testAccErrorCheck(t, emr.EndpointsID),
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSEmrInstanceFleetDestroy,
Steps: []resource.TestStep{
@@ -108,6 +111,7 @@ func TestAccAWSEMRInstanceFleet_full(t *testing.T) {
resourceName := "aws_emr_instance_fleet.task"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
+ ErrorCheck: testAccErrorCheck(t, emr.EndpointsID),
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSEmrInstanceFleetDestroy,
Steps: []resource.TestStep{
@@ -137,6 +141,7 @@ func TestAccAWSEMRInstanceFleet_disappears(t *testing.T) {
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
+ ErrorCheck: testAccErrorCheck(t, emr.EndpointsID),
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSEmrInstanceFleetDestroy,
Steps: []resource.TestStep{ | tests/r/emr_instance_fleet: Add ErrorCheck | terraform-providers_terraform-provider-aws | train |
321decdfb37baf816be4d707afc12c76c88c3b57 | diff --git a/src/Service/Api.php b/src/Service/Api.php
index <HASH>..<HASH> 100644
--- a/src/Service/Api.php
+++ b/src/Service/Api.php
@@ -273,9 +273,9 @@ class Api
$connectorOptions['api_token_type'] = 'access';
}
- $proxy = $this->getProxy();
- if ($proxy !== null) {
- $connectorOptions['proxy'] = $proxy;
+ $guzzleOptions = $this->getGuzzleOptions();
+ if (!empty($guzzleOptions['defaults']['proxy'])) {
+ $connectorOptions['proxy'] = $guzzleOptions['defaults']['proxy'];
}
// Override the OAuth 2.0 token and revoke URLs if provided.
@@ -372,7 +372,14 @@ class Api
'headers' => ['User-Agent' => $this->getUserAgent()],
'debug' => $this->config->get('api.debug') ? STDERR : false,
'verify' => !$this->config->get('api.skip_ssl'),
- 'proxy' => $this->getProxy(),
+ 'proxy' => array_map(function($proxyUrl) {
+ // If Guzzle is going to use PHP's built-in HTTP streams,
+ // rather than curl, then transform the proxy scheme.
+ if (!\extension_loaded('curl') && \ini_get('allow_url_fopen')) {
+ return \str_replace(['http://', 'https://'], ['tcp://', 'tcp://'], $proxyUrl);
+ }
+ return $proxyUrl;
+ }, $this->getProxies()),
'timeout' => $this->config->get('api.default_timeout'),
],
];
@@ -464,25 +471,21 @@ class Api
}
/**
- * Finds a proxy address based on the http_proxy or https_proxy environment variables.
+ * Finds proxy addresses based on the http_proxy and https_proxy environment variables.
*
- * @return string|array|null
+ * @return array
+ * An ordered array of proxy URLs keyed by scheme: 'https' and/or 'http'.
*/
- private function getProxy() {
- // The proxy variables should be ignored in a non-CLI context.
- if (PHP_SAPI !== 'cli') {
- return null;
- }
+ private function getProxies() {
$proxies = [];
- foreach (['https', 'http'] as $scheme) {
- $proxies[$scheme] = str_replace(['http://', 'https://'], ['tcp://', 'ssl://'], getenv($scheme . '_proxy'));
+ if (getenv('https_proxy') !== false) {
+ $proxies['https'] = getenv('https_proxy');
}
- $proxies = array_filter($proxies);
- if (count($proxies)) {
- return count($proxies) === 1 ? reset($proxies) : $proxies;
+ // An environment variable prefixed by 'http_' cannot be trusted in a non-CLI (web) context.
+ if (PHP_SAPI === 'cli' && getenv('http_proxy') !== false) {
+ $proxies['http'] = getenv('http_proxy');
}
-
- return null;
+ return $proxies;
}
/**
@@ -494,6 +497,7 @@ class Api
*/
public function getStreamContext($timeout = 15) {
$opts = [
+ // See https://www.php.net/manual/en/context.http.php
'http' => [
'method' => 'GET',
'follow_location' => 0,
@@ -504,15 +508,12 @@ class Api
],
],
];
- $proxy = $this->getProxy();
- if (is_array($proxy)) {
- if (isset($proxy['https'])) {
- $opts['http']['proxy'] = $proxy['https'];
- } elseif (isset($proxy['http'])) {
- $opts['http']['proxy'] = $proxy['http'];
- }
- } elseif (is_string($proxy) && $proxy !== '') {
- $opts['http']['proxy'] = $proxy;
+
+ // The PHP stream context only accepts a single proxy option, under the schemes 'tcp' or 'ssl'.
+ $proxies = $this->getProxies();
+ foreach ($proxies as $scheme => $proxyUrl) {
+ $opts['http']['proxy'] = \str_replace(['http://', 'https://'], ['tcp://', 'ssl://'], $proxyUrl);
+ break;
}
return stream_context_create($opts); | Fix proxy settings in Api service (#<I>)
* Fix proxy settings in Api service
* Use the environment variable directly for Guzzle
* Borrow Guzzle's detection logic for https_proxy and http_proxy
* Transform the proxy URL for non-curl
* Clarify comment | platformsh_platformsh-cli | train |
910fc2caf93ddf12cd8cfa2402dedd02b16bff5c | diff --git a/src/test/java/io/github/bonigarcia/test/advance/SafariWithGlobalOptionsJupiterTest.java b/src/test/java/io/github/bonigarcia/test/advance/SafariWithGlobalOptionsJupiterTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/io/github/bonigarcia/test/advance/SafariWithGlobalOptionsJupiterTest.java
+++ b/src/test/java/io/github/bonigarcia/test/advance/SafariWithGlobalOptionsJupiterTest.java
@@ -36,7 +36,6 @@ public class SafariWithGlobalOptionsJupiterTest {
@Options
SafariOptions safariOptions = new SafariOptions();
{
- safariOptions.useCleanSession(true);
safariOptions.setUseTechnologyPreview(false);
}
diff --git a/src/test/java/io/github/bonigarcia/test/annotations/SafariAnnotationReaderTest.java b/src/test/java/io/github/bonigarcia/test/annotations/SafariAnnotationReaderTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/io/github/bonigarcia/test/annotations/SafariAnnotationReaderTest.java
+++ b/src/test/java/io/github/bonigarcia/test/annotations/SafariAnnotationReaderTest.java
@@ -17,7 +17,6 @@
package io.github.bonigarcia.test.annotations;
import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertTrue;
import java.lang.reflect.Parameter;
import java.util.Optional;
@@ -46,7 +45,6 @@ public class SafariAnnotationReaderTest {
@ParameterizedTest
@MethodSource("testClassProvider")
- @SuppressWarnings("deprecation")
void testSafariOptions(Class<?> testClass) throws Exception {
Parameter parameter = testClass
.getMethod("safariTest", SafariDriver.class).getParameters()[0];
@@ -54,7 +52,6 @@ public class SafariAnnotationReaderTest {
SafariOptions safariOptions = (SafariOptions) annotationsReader
.getOptions(parameter, testInstance);
- assertTrue(safariOptions.getUseCleanSession());
assertFalse(safariOptions.getUseTechnologyPreview());
}
} | Fix Safari tests (broken with the update of Selenium) | bonigarcia_selenium-jupiter | train |
a033a970a155c8371849dd4e3a51c5b8961f9723 | diff --git a/lib/mess/external.js b/lib/mess/external.js
index <HASH>..<HASH> 100644
--- a/lib/mess/external.js
+++ b/lib/mess/external.js
@@ -59,6 +59,7 @@ var External = function External(env) {
return {
'.zip': this.unzip,
'.mss': this.plainfile,
+ '.shp': this.inplace,
'.geojson': this.plainfile,
'.kml': this.plainfile
}[extension];
@@ -68,6 +69,7 @@ var External = function External(env) {
return {
'.zip': this.unzip_dest,
'.mss': this.plainfile_dest,
+ '.shp': this.inplace_dest,
'.geojson': this.plainfile_dest,
'.kml': this.plainfile_dest
}[extension];
@@ -108,6 +110,11 @@ var External = function External(env) {
that.plainname(resource_url));
},
+ inplace_dest: function(resource_url, that) {
+ console.log(url.parse(resource_url));
+ return resource_url;
+ },
+
/**
* Deal with a plain file, which is likely to be
* GeoJSON, KML, or one of the other OGR-supported formats,
@@ -134,6 +141,17 @@ var External = function External(env) {
}
},
+ /**
+ * Deal with an inplace local file
+ *
+ * @param {String} filename the place of the file on your system.
+ * @param {String} resource_url
+ * @param {Function} callback
+ */
+ inplace: function(filename, resource_url, callback, that) {
+ callback(null, [resource_url, filename]);
+ },
+
locateShp: function(dir) {
try {
var unzipped = fs.readdirSync(dir); | First bit of support for inplace files. Require nodejs bug to be fixed. | mapbox_carto | train |
ff6354f4e8c3d8365bfd191b88f74551b9224d0e | diff --git a/src/Zephyrus/Network/HttpRequester.php b/src/Zephyrus/Network/HttpRequester.php
index <HASH>..<HASH> 100644
--- a/src/Zephyrus/Network/HttpRequester.php
+++ b/src/Zephyrus/Network/HttpRequester.php
@@ -132,9 +132,7 @@ class HttpRequester
*/
public function executeUpload(CURLFile $file, string $name = 'file', array $payload = [])
{
- $this->setContentType(ContentType::FORM_MULTIPART);
- $payload = $this->prepareMultipartFormData(array_merge([$name => $file], $payload));
- $this->execute($payload);
+ $this->execute(array_merge([$name => $file], $payload));
}
/**
@@ -165,7 +163,9 @@ class HttpRequester
/**
* Executes the HTTP request with the given payload. The payload can either be an array for the form content types
- * e.g. application/x-www-form-urlencoded or a string for other type such as application/json.
+ * e.g. application/x-www-form-urlencoded or a string for other type such as application/json. If the payload is
+ * an array and contains a CURLFile, it will automatically be handled as an upload attempt and switch the content
+ * type to multipart/form-data.
*
* @param string|array $payload
* @throws HttpRequesterException
@@ -173,6 +173,10 @@ class HttpRequester
*/
public function execute(string|array $payload = ""): string
{
+ if (is_array($payload) && $this->hasCurlFile($payload)) {
+ $this->setContentType(ContentType::FORM_MULTIPART);
+ $payload = $this->prepareMultipartFormData($payload);
+ }
$curl = $this->buildCurl($payload);
$this->response = curl_exec($curl);
if ($this->response === false) {
@@ -315,6 +319,16 @@ class HttpRequester
}
/**
+ * Retrieves the content type used for the HTTP Request.
+ *
+ * @return string
+ */
+ public function getContentType(): string
+ {
+ return $this->contentType;
+ }
+
+ /**
* Prepares the cURL instance based on the requester configurations.
*
* @param string|array $payload
@@ -445,6 +459,26 @@ class HttpRequester
}
/**
+ * Verifies if the payload (only array compatible) has a CURLFile instance within its data. If it happens, it means
+ * the request needs to be a form data content type.
+ *
+ * @param array $payload
+ * @return bool
+ */
+ private function hasCurlFile(array $payload): bool
+ {
+ foreach ($payload as $data) {
+ if (is_array($data)) {
+ return $this->hasCurlFile($data);
+ }
+ if ($data instanceof CURLFile) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
* Corrects a problem with cURL while sending array in multipart/form-data. Reconstruct an array with the proper
* formatting needed by multipart content type. Can go up to 2 levels of nested array. Needs to be done recursively
* to allow an unlimited amount of levels.
diff --git a/tests/network/HttpRequesterTest.php b/tests/network/HttpRequesterTest.php
index <HASH>..<HASH> 100644
--- a/tests/network/HttpRequesterTest.php
+++ b/tests/network/HttpRequesterTest.php
@@ -28,6 +28,7 @@ class HttpRequesterTest extends TestCase
$request->addHeaders(['X-APP' => 'PHPUnit']);
$request->addOptions([CURLOPT_RETURNTRANSFER => true]);
$request->execute();
+ self::assertEquals(ContentType::FORM, $request->getContentType());
$request = HttpRequester::get("https://raw.githubusercontent.com/dadajuice/zephyrus/master/tests/lib/filesystem/existing.txt");
$request->executeStream(function ($result, $info) {
@@ -62,6 +63,16 @@ class HttpRequesterTest extends TestCase
self::assertTrue($file instanceof \CURLFile);
self::assertEquals(ROOT_DIR . '/lib/filesystem/existing.txt', $file->getFilename());
$request->executeUpload($file, 'file', ['test' => ['name' => 't', 'age' => 3, 'classes' => ['nest', 'nest 2']]]);
+ self::assertEquals(ContentType::FORM_MULTIPART, $request->getContentType());
+ }
+
+ public function testUploadWithExecuteNested()
+ {
+ $request = HttpRequester::post("https://raw.githubusercontent.com/dadajuice/zephyrus/master/tests/lib/filesystem/sdfdgdfdgfdfg.txt");
+ $file = HttpRequester::prepareUploadFile(ROOT_DIR . '/lib/filesystem/existing.txt', 'test.txt');
+ self::assertTrue($file instanceof \CURLFile);
+ $request->execute(['test' => ['name' => 't', 'file' => $file, 'classes' => ['nest', 'nest 2']]]);
+ self::assertEquals(ContentType::FORM_MULTIPART, $request->getContentType());
}
public function testInvalidUpload() | Added flexibility for the HTTPRequester execute methods to allow upload | dadajuice_zephyrus | train |
60eb55b9be1c848a9d60deb7535372fcb517643f | diff --git a/liquibase-core/src/main/java/liquibase/database/AbstractJdbcDatabase.java b/liquibase-core/src/main/java/liquibase/database/AbstractJdbcDatabase.java
index <HASH>..<HASH> 100644
--- a/liquibase-core/src/main/java/liquibase/database/AbstractJdbcDatabase.java
+++ b/liquibase-core/src/main/java/liquibase/database/AbstractJdbcDatabase.java
@@ -202,7 +202,7 @@ public abstract class AbstractJdbcDatabase implements Database {
@Override
public int getDatabaseMajorVersion() throws DatabaseException {
if (connection == null) {
- return -1;
+ return 999;
}
try {
return connection.getDatabaseMajorVersion(); | CORE-<I> Add support for MSSQL sequences
If no connection exists (like when checking for feature available on startup) assume a large version number so liquibase will not say new-version features are not available | liquibase_liquibase | train |
9629013f9b7060e556b6e248648654e3cadce7d6 | diff --git a/api/symboltable.py b/api/symboltable.py
index <HASH>..<HASH> 100644
--- a/api/symboltable.py
+++ b/api/symboltable.py
@@ -100,7 +100,7 @@ class SymbolTable(object):
except KeyError:
pass
- entry = self[id2] = symbol_
+ entry = self[0][id2] = symbol_
entry.callable = None # True if function, strings or arrays
entry.forwarded = False # True for a function header
entry.mangled = '%s_%s' % (self.mangle, entry.name) # Mangled name
@@ -366,8 +366,7 @@ class SymbolTable(object):
''' Like the above, but checks that entry.declared is False.
Otherwise raises an error.
- Parameter default_value specifies an initalized
- variable, if set.
+ Parameter default_value specifies an initalized variable, if set.
'''
if not self.check_is_undeclared(id_, scope=0): # 0 = Current Scope
entry = self.get_entry(id_)
@@ -388,16 +387,16 @@ class SymbolTable(object):
entry.declared = True # marks it as declared
if entry.type_ != type_.type_:
- if not type_.symbol.implicit:
+ if not type_.implicit:
syntax_error(lineno,
"'%s' suffix is for type '%s' but it was "
"declared as '%s'" %
(id_, entry.type_, type_))
return None
- type_.symbol.implicit = False
+ type_.implicit = False
type_.type_ = entry.type_
- if type_.symbol.implicit:
+ if type_.implicit:
warning_implicit_type(lineno, id_, entry.type_)
if default_value is not None and entry.type_ != default_value.type_:
@@ -612,6 +611,14 @@ class SymbolTable(object):
def make_callable(self, id_, lineno):
''' Creates a func/array/string call. Checks if id is callable or not.
+ An identifier is "callable" if it can be followed by a list of parameters.
+ This does not mean the id_ is a function, but that it allows the same
+ syntax a function does:
+
+ For example:
+ - MyFunction(a, "hello", 5) is a Function so MyFuncion is callable
+ - MyArray(5, 3.7, VAL("32")) makes MyArray identifier"callable".
+ - MyString(5 TO 7) or MyString(5) is a "callable" string.
'''
entry = self.get_or_create(id_, lineno)
if entry.callable is False: # Is it NOT callable?
diff --git a/zxbparser.py b/zxbparser.py
index <HASH>..<HASH> 100755
--- a/zxbparser.py
+++ b/zxbparser.py
@@ -393,7 +393,7 @@ def p_var_decl(p):
| DIM idlist typedef CO
'''
for vardata in p[2]:
- SYMBOL_TABLE.declare_var(vardata[0], vardata[1], p[3])
+ SYMBOL_TABLE.declare_variable(vardata[0], vardata[1], p[3])
p[0] = None # Variable declarations are made at the end of parsing | Termporary commit.
Some fixes. | boriel_zxbasic | train |
ffa30e05d153d6c0ec63de49773c68b2e6d94f51 | diff --git a/sshd-shell-spring-boot-starter/src/main/java/sshd/shell/springboot/command/HeapDumpCommand.java b/sshd-shell-spring-boot-starter/src/main/java/sshd/shell/springboot/command/HeapDumpCommand.java
index <HASH>..<HASH> 100644
--- a/sshd-shell-spring-boot-starter/src/main/java/sshd/shell/springboot/command/HeapDumpCommand.java
+++ b/sshd-shell-spring-boot-starter/src/main/java/sshd/shell/springboot/command/HeapDumpCommand.java
@@ -15,7 +15,12 @@
*/
package sshd.shell.springboot.command;
+import java.io.File;
import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.management.HeapDumpWebEndpoint;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
@@ -23,6 +28,7 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
+import sshd.shell.springboot.autoconfiguration.SshSessionContext;
import sshd.shell.springboot.autoconfiguration.SshdShellCommand;
/**
@@ -33,17 +39,38 @@ import sshd.shell.springboot.autoconfiguration.SshdShellCommand;
@ConditionalOnClass(HeapDumpWebEndpoint.class)
@ConditionalOnProperty(name = "management.endpoint.env.enabled", havingValue = "true", matchIfMissing = true)
@SshdShellCommand(value = "heapDump", description = "Heap dump command")
[email protected]
public final class HeapDumpCommand {
-
+
@Autowired
private HeapDumpWebEndpoint heapDumpEndpoint;
-
+
@SshdShellCommand(value = "live", description = "Get heapdump with live flag")
public String withLive(String arg) throws IOException {
- if (StringUtils.isEmpty(arg)) {
+ if (StringUtils.isEmpty(arg)) {
return "Usage: heapDump live <true|false>";
}
- Resource response = heapDumpEndpoint.heapDump(Boolean.valueOf(arg)).getBody();
- return response.getFile().getAbsolutePath();
+ Resource heapDumpResource = heapDumpEndpoint.heapDump(Boolean.valueOf(arg)).getBody();
+ try {
+ Path path = sessionUserPathContainingHeapDumpFile(heapDumpResource);
+ return "Resource can be downloaded with SFTP/SCP at " + path.getFileName().toString();
+ } catch (IllegalStateException ex) {
+ log.warn(ex.getMessage());
+ return "Resource can be found at " + heapDumpResource.getFile().getAbsolutePath();
+ }
+ }
+
+ private Path sessionUserPathContainingHeapDumpFile(Resource heapDumpResource) throws IOException {
+ Path userDirFilePath = Paths.get(sessionUserDir().getPath(), heapDumpResource.getFilename());
+ Path heapDumpFilePath = Paths.get(heapDumpResource.getURI());
+ return Files.move(heapDumpFilePath, userDirFilePath, StandardCopyOption.REPLACE_EXISTING);
+ }
+
+ private File sessionUserDir() throws IOException {
+ File sessionUserDir = SshSessionContext.getUserDir();
+ if (!sessionUserDir.exists()) {
+ Files.createDirectories(sessionUserDir.toPath());
+ }
+ return sessionUserDir;
}
} | Refactored to allow for moving of generated heap dump to SFTP location if enabled. | anand1st_sshd-shell-spring-boot | train |
81aaa2c4b1ea524b080ce7607c9e646cd298e9f0 | diff --git a/test/client-test.js b/test/client-test.js
index <HASH>..<HASH> 100644
--- a/test/client-test.js
+++ b/test/client-test.js
@@ -635,7 +635,8 @@ describe('gaasClient.bundle()', function() {
function(err, entry3){
if(err) return done(err);
expect(entry3.reviewed).to.be.false;
- expect(entry3.notes).to.deep.equal([ 'Take note.', 'note: Take.' ])
+ // Notes are no longer on subitems.
+ // expect(entry3.notes).to.deep.equal([ 'Take note.', 'note: Take.' ])
done();
});
}) | subitem policy has changed on notes | IBM-Cloud_gp-js-client | train |
93a29df8556029f688a0e3c7a99c7a7af5c596d6 | diff --git a/src/OAuth2/Server.php b/src/OAuth2/Server.php
index <HASH>..<HASH> 100644
--- a/src/OAuth2/Server.php
+++ b/src/OAuth2/Server.php
@@ -179,14 +179,14 @@ class Server implements ResourceControllerInterface,
*
* @ingroup oauth2_section_4
*/
- public function handleTokenRequest(RequestInterface $request, ResponseInterface $response)
+ public function handleTokenRequest(RequestInterface $request, ResponseInterface $response = null)
{
$this->response = is_null($response) ? new Response() : $response;
$this->getTokenController()->handleTokenRequest($request, $this->response);
return $this->response;
}
- public function grantAccessToken(RequestInterface $request, ResponseInterface $response)
+ public function grantAccessToken(RequestInterface $request, ResponseInterface $response = null)
{
$this->response = is_null($response) ? new Response() : $response;
$value = $this->getTokenController()->grantAccessToken($request, $this->response);
@@ -223,7 +223,7 @@ class Server implements ResourceControllerInterface,
*/
public function handleAuthorizeRequest(RequestInterface $request, ResponseInterface $response, $is_authorized, $user_id = null)
{
- $this->response = is_null($response) ? new Response() : $response;
+ $this->response = $response;
$this->getAuthorizeController()->handleAuthorizeRequest($request, $this->response, $is_authorized, $user_id);
return $this->response;
}
@@ -247,21 +247,21 @@ class Server implements ResourceControllerInterface,
*
* @ingroup oauth2_section_3
*/
- public function validateAuthorizeRequest(RequestInterface $request, ResponseInterface $response)
+ public function validateAuthorizeRequest(RequestInterface $request, ResponseInterface $response = null)
{
$this->response = is_null($response) ? new Response() : $response;
$value = $this->getAuthorizeController()->validateAuthorizeRequest($request, $this->response);
return $value;
}
- public function verifyResourceRequest(RequestInterface $request, ResponseInterface $response, $scope = null)
+ public function verifyResourceRequest(RequestInterface $request, ResponseInterface $response = null, $scope = null)
{
$this->response = is_null($response) ? new Response() : $response;
$value = $this->getResourceController()->verifyResourceRequest($request, $this->response, $scope);
return $value;
}
- public function getAccessTokenData(RequestInterface $request, ResponseInterface $response)
+ public function getAccessTokenData(RequestInterface $request, ResponseInterface $response = null)
{
$this->response = is_null($response) ? new Response() : $response;
$value = $this->getResourceController()->getAccessTokenData($request, $this->response); | allows response to be null on Server methods | bshaffer_oauth2-server-php | train |
983d5431d6b923a89ec2988991b3e97a7b663272 | diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py
index <HASH>..<HASH> 100644
--- a/gcloud/datastore/test_connection.py
+++ b/gcloud/datastore/test_connection.py
@@ -672,10 +672,6 @@ class TestConnection(unittest2.TestCase):
mutation = datastore_pb.Mutation()
class Xact(object):
-
- def id(self):
- return 'xact'
-
def mutation(self):
return mutation
DATASET_ID = 'DATASET'
@@ -746,10 +742,6 @@ class TestConnection(unittest2.TestCase):
mutation = datastore_pb.Mutation()
class Xact(object):
-
- def id(self):
- return 'xact'
-
def mutation(self):
return mutation
DATASET_ID = 'DATASET'
@@ -821,10 +813,6 @@ class TestConnection(unittest2.TestCase):
mutation = datastore_pb.Mutation()
class Xact(object):
-
- def id(self):
- return 'xact'
-
def mutation(self):
return mutation
DATASET_ID = 'DATASET'
diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py
index <HASH>..<HASH> 100644
--- a/gcloud/datastore/test_entity.py
+++ b/gcloud/datastore/test_entity.py
@@ -20,8 +20,6 @@ class TestEntity(unittest2.TestCase):
klass = self._getTargetClass()
if dataset is _MARKER:
dataset = Dataset(_DATASET_ID)
- if kind is _MARKER:
- kind = _KIND
return klass(dataset, kind)
def test_ctor_defaults(self):
@@ -215,9 +213,6 @@ class _Dataset(dict):
def get_entity(self, key):
return self.get(key)
- def get_entities(self, keys):
- return [self.get(x) for x in keys]
-
class _Connection(object):
_transaction = _saved = _deleted = None
diff --git a/gcloud/storage/test_iterator.py b/gcloud/storage/test_iterator.py
index <HASH>..<HASH> 100644
--- a/gcloud/storage/test_iterator.py
+++ b/gcloud/storage/test_iterator.py
@@ -365,24 +365,14 @@ class _Connection(object):
self._requested = []
def make_request(self, **kw):
- from gcloud.storage.exceptions import NotFoundError
self._requested.append(kw)
- try:
- response, self._responses = self._responses[0], self._responses[1:]
- except:
- raise NotFoundError('miss', None)
- else:
- return response
+ response, self._responses = self._responses[0], self._responses[1:]
+ return response
def api_request(self, **kw):
- from gcloud.storage.exceptions import NotFoundError
self._requested.append(kw)
- try:
- response, self._responses = self._responses[0], self._responses[1:]
- except:
- raise NotFoundError('miss', None)
- else:
- return response
+ response, self._responses = self._responses[0], self._responses[1:]
+ return response
def build_api_url(self, path, query_params=None):
from urllib import urlencode
diff --git a/gcloud/storage/test_key.py b/gcloud/storage/test_key.py
index <HASH>..<HASH> 100644
--- a/gcloud/storage/test_key.py
+++ b/gcloud/storage/test_key.py
@@ -557,9 +557,7 @@ class Test_Key(unittest2.TestCase):
class _Response(dict):
- @property
- def status(self):
- return self.get('status', 200)
+ pass
class _Connection(object):
@@ -570,24 +568,14 @@ class _Connection(object):
self._requested = []
def make_request(self, **kw):
- from gcloud.storage.exceptions import NotFoundError
self._requested.append(kw)
- try:
- response, self._responses = self._responses[0], self._responses[1:]
- except:
- raise NotFoundError('miss', None)
- else:
- return response
+ response, self._responses = self._responses[0], self._responses[1:]
+ return response
def api_request(self, **kw):
- from gcloud.storage.exceptions import NotFoundError
self._requested.append(kw)
- try:
- response, self._responses = self._responses[0], self._responses[1:]
- except:
- raise NotFoundError('miss', None)
- else:
- return response
+ response, self._responses = self._responses[0], self._responses[1:]
+ return response
def build_api_url(self, path, query_params=None,
api_base_url=API_BASE_URL):
diff --git a/tox.ini b/tox.ini
index <HASH>..<HASH> 100644
--- a/tox.ini
+++ b/tox.ini
@@ -13,7 +13,7 @@ deps =
basepython =
python2.7
commands =
- nosetests --with-xunit --with-xcoverage --cover-package=gcloud --nocapture --cover-erase
+ nosetests --with-xunit --with-xcoverage --cover-package=gcloud --nocapture --cover-erase --cover-tests
deps =
nose
unittest2 | Ensure tests get coverage too.
Remove cruft surfaced by reporting coverage on test modules. | googleapis_google-cloud-python | train |
a2a87a295911b3bef492031cf306be4bd76506b0 | diff --git a/wsgicors.py b/wsgicors.py
index <HASH>..<HASH> 100644
--- a/wsgicors.py
+++ b/wsgicors.py
@@ -48,6 +48,7 @@ class CORS(object):
self.pol_origin = kw.get("origin", "")
self.pol_methods = kw.get("methods", "") # * or list of methods
self.pol_headers = kw.get("headers", "") # * or list of headers
+ self.pol_expose_headers = kw.get("expose_headers", "") # * or list of headers
self.pol_credentials = kw.get("credentials", "false") # true or false
self.pol_maxage = kw.get("maxage", "") # in seconds
@@ -133,6 +134,9 @@ class CORS(object):
if self.pol_credentials == 'true':
headers.append(('Access-Control-Allow-Credentials', 'true'))
+ if self.pol_expose_headers:
+ headers.append(('Access-Control-Expose-Headers', self.pol_expose_headers))
+
return start_response(status, headers, exc_info)
else:
custom_start_response = start_response | Add support for Access-Control-Expose-Headers in responses to real (not preflight) requests. | may-day_wsgicors | train |
f0b312f716833010330028fbb38122b584bf2e5d | diff --git a/lib/bson/binary.rb b/lib/bson/binary.rb
index <HASH>..<HASH> 100644
--- a/lib/bson/binary.rb
+++ b/lib/bson/binary.rb
@@ -50,10 +50,12 @@ module BSON
# @!attribute data
# @return [ Object ] The raw binary data.
# @since 2.0.0
+ attr_reader :data
+
# @!attribute type
# @return [ Symbol ] The binary type.
# @since 2.0.0
- attr_reader :data, :type
+ attr_reader :type
# Determine if this binary object is equal to another object.
# | Separate attribute declarations for readability (#<I>) | mongodb_bson-ruby | train |
c17a76de3753363a90f5e217b31b9951fff1a115 | diff --git a/metrics-jetty9/src/test/java/com/codahale/metrics/jetty9/InstrumentedHandlerTest.java b/metrics-jetty9/src/test/java/com/codahale/metrics/jetty9/InstrumentedHandlerTest.java
index <HASH>..<HASH> 100644
--- a/metrics-jetty9/src/test/java/com/codahale/metrics/jetty9/InstrumentedHandlerTest.java
+++ b/metrics-jetty9/src/test/java/com/codahale/metrics/jetty9/InstrumentedHandlerTest.java
@@ -21,7 +21,6 @@ import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.LockSupport;
import static org.assertj.core.api.Assertions.assertThat;
@@ -158,7 +157,11 @@ public class InstrumentedHandlerTest {
switch (path) {
case "/blocking":
request.setHandled(true);
- LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(1));
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
httpServletResponse.setStatus(200);
httpServletResponse.setContentType("text/plain");
httpServletResponse.getWriter().write("some content from the blocking request\n");
@@ -167,7 +170,11 @@ public class InstrumentedHandlerTest {
request.setHandled(true);
final AsyncContext context = request.startAsync();
Thread t = new Thread(() -> {
- LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(1));
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
httpServletResponse.setStatus(200);
httpServletResponse.setContentType("text/plain");
final ServletOutputStream servletOutputStream; | chore: Use `Thread.sleep` instead of `LockSupport.park` in tests (#<I>)
`LockSupport.park` can spuriously return, which makes the tests flaky.
See: <URL> | dropwizard_metrics | train |
5571a33a7908b24d49cff9769f2c54fddc770b1f | diff --git a/plugins/gravatar/plugin_tests/gravatar_test.py b/plugins/gravatar/plugin_tests/gravatar_test.py
index <HASH>..<HASH> 100644
--- a/plugins/gravatar/plugin_tests/gravatar_test.py
+++ b/plugins/gravatar/plugin_tests/gravatar_test.py
@@ -95,3 +95,21 @@ class GravatarTest(base.TestCase):
self.assertStatusOk(resp)
self.admin = self.model('user').load(self.admin['_id'], force=True)
self.assertFalse('gravatar_baseUrl' in self.admin)
+
+ def testUserInfoUpdate(self):
+ user = self.model('user').createUser(
+ email='[email protected]',
+ login='normal',
+ firstName='normal',
+ lastName='normal',
+ password='password',
+ admin=False
+ )
+
+ resp = self.request('/user/%s' % str(user['_id']), method='PUT',
+ user=user, params={
+ 'email': '[email protected]',
+ 'firstName': 'normal',
+ 'lastName': 'normal'
+ })
+ self.assertStatusOk(resp)
diff --git a/plugins/gravatar/server/__init__.py b/plugins/gravatar/server/__init__.py
index <HASH>..<HASH> 100644
--- a/plugins/gravatar/server/__init__.py
+++ b/plugins/gravatar/server/__init__.py
@@ -83,6 +83,7 @@ def _validateSettings(event):
_cachedDefaultImage = None
[email protected]
def _userUpdate(event):
"""
Called when the user document is being changed. If the email field changes, | Gravatar plugin was causing an error on user info update
Non-admin users were getting a cryptic "Administrator access
required" error when trying to update their user information. This
was because the gravatar plugin had registered a handler to a
REST hook without declaring an access level on itself. | girder_girder | train |
8a32f32a7c627cb298d8d682751ef36aa79f1db3 | diff --git a/src/browser-detector.js b/src/browser-detector.js
index <HASH>..<HASH> 100644
--- a/src/browser-detector.js
+++ b/src/browser-detector.js
@@ -5,7 +5,7 @@ var detector = module.exports = {};
detector.isIE = function(version) {
function isAnyIeVersion() {
var agent = navigator.userAgent.toLowerCase();
- return agent.indexOf("msie") !== -1 || agent.indexOf("trident") !== -1;
+ return agent.indexOf("msie") !== -1 || agent.indexOf("trident") !== -1 || agent.indexOf(" edge/") !== -1;
}
if(!isAnyIeVersion()) { | Handle Edge as an IE browser.
Edge was handled as a non IE browser, and no resize events was triggered. Not, when handling Edge as an IE browser, events are triggered again. | wnr_element-resize-detector | train |
18ddcc0816be2b38993764dfa3b8ec27e7eb5d7a | diff --git a/lib/ModelRoot.js b/lib/ModelRoot.js
index <HASH>..<HASH> 100644
--- a/lib/ModelRoot.js
+++ b/lib/ModelRoot.js
@@ -28,11 +28,14 @@ function ModelRoot(o) {
ModelRoot.prototype.errorSelector = function errorSelector(x, y) {
return y;
};
-ModelRoot.prototype.comparator = function comparator(a, b) {
- if (hasOwn(a, "value") && hasOwn(b, "value")) {
- return a.value === b.value;
+ModelRoot.prototype.comparator = function comparator(cacheNode, messageNode) {
+ if (hasOwn(cacheNode, "value") && hasOwn(messageNode, "value")) {
+ // They are the same only if the following fields are the same.
+ return cacheNode.value === messageNode.value &&
+ cacheNode.$type === messageNode.$type &&
+ cacheNode.$expires === messageNode.$expires;
}
- return a === b;
+ return cacheNode === messageNode;
};
module.exports = ModelRoot;
diff --git a/test/internal/index.js b/test/internal/index.js
index <HASH>..<HASH> 100644
--- a/test/internal/index.js
+++ b/test/internal/index.js
@@ -1,4 +1,5 @@
describe('Internal', function() {
require('./request/GetRequest.spec');
require('./request/RequestQueue.spec');
+ require('./ModelRoot.comparator.spec');
}); | merged in <I>c9f<I>cd8e8d0f<I>b0e<I>ecc<I>fc | Netflix_falcor | train |
e7e50ed76713e99128037a16ffb0807fbf108a0a | diff --git a/go/engine/scankeys.go b/go/engine/scankeys.go
index <HASH>..<HASH> 100644
--- a/go/engine/scankeys.go
+++ b/go/engine/scankeys.go
@@ -337,7 +337,8 @@ func (s *ScanKeys) unlockAll(m libkb.MetaContext) openpgp.EntityList {
Reason: unlockReason,
SecretUI: m.UIs().SecretUI,
}
- unlocked, err := skb.PromptAndUnlock(m, parg, nil, s.me)
+ secretStore := libkb.NewSecretStore(m.G(), s.me.GetNormalizedName())
+ unlocked, err := skb.PromptAndUnlock(m, parg, secretStore, s.me)
if err != nil {
m.Warning("error unlocking key: %s", err)
continue | Supply secret store to prompt and unlock (#<I>) | keybase_client | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.