hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
ca6d9b77a0d8ce9e1c9219347e847413db38ab63
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -60,7 +60,8 @@ As a component ### function - - function (value) + - is.function(value) + - is.aFunction(value) - for ES3 browsers, where "function" is a reserved word ### number diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -369,7 +369,7 @@ is.error = function (value) { */ /** - * is.function + * is.function / is.aFunction * Test if `value` is a function. * * @param {Mixed} value value to test @@ -377,7 +377,7 @@ is.error = function (value) { * @api public */ -is.function = function(value) { +is['function'] = is.aFunction = function(value) { return '[object Function]' === toString.call(value); }; diff --git a/test/index.js b/test/index.js index <HASH>..<HASH> 100644 --- a/test/index.js +++ b/test/index.js @@ -198,14 +198,15 @@ test('is.error', function (t) { }); test('is.function', function (t) { - t.true(is.function(function () {}), 'function is function'); - t.true(is.function(console.log), 'console.log is function'); + t.equal(is['function'], is.aFunction, 'alias works'); + t.true(is.aFunction(function () {}), 'function is function'); + t.true(is.aFunction(console.log), 'console.log is function'); if (typeof window !== 'undefined') { // in IE7/8, typeof alert === 'object' - t.true(is.function(window.alert), 'window.alert is function'); + t.true(is.aFunction(window.alert), 'window.alert is function'); } - t.false(is.function({}), 'object is not function'); - t.false(is.function(null), 'null is not function'); + t.false(is.aFunction({}), 'object is not function'); + t.false(is.aFunction(null), 'null is not function'); t.end(); });
"is.function" will throw a syntax error in older browsers, because in ES3, "function" is a reserved word.
enricomarino_is
train
0714cc4139acc08cb1557a8c1cc4c5bc1a74a996
diff --git a/tests/test_multi_user.py b/tests/test_multi_user.py index <HASH>..<HASH> 100644 --- a/tests/test_multi_user.py +++ b/tests/test_multi_user.py @@ -22,10 +22,10 @@ class TestCase(BaseTestCase): self.prepare_client('/multi_user2/', username='test_user') resp = self.client.post() resp.raw() - resp = self.client.post() - resp.raw() - resp = self.client.post() - resp.raw() + # resp = self.client.post() + # resp.raw() + # resp = self.client.post() + # resp.raw() @classmethod def create_wrong_user(cls): diff --git a/zengine/engine.py b/zengine/engine.py index <HASH>..<HASH> 100644 --- a/zengine/engine.py +++ b/zengine/engine.py @@ -216,6 +216,7 @@ class WFCurrent(Current): self.task_type = '' self.task = None self.pool = {} + self.flow_enabled = True self.task_name = '' self.activity = '' self.lane_permissions = [] @@ -564,9 +565,11 @@ class ZEngine(object): """ # FIXME: raise if first task after line change isn't a UserTask # actually this check should be done at parser - while (self.current.task_type != 'UserTask' and - not self.current.task_type.startswith('End')): + while (self.current.flow_enabled and + self.current.task_type != 'UserTask' and not + self.current.task_type.startswith('End')): for task in self.workflow.get_tasks(state=Task.READY): + self.current.old_lane = self.current.lane_name self.current._update_task(task) self.check_for_permission() self.check_for_lane_permission() @@ -575,16 +578,16 @@ class ZEngine(object): self.parse_workflow_messages() self.workflow.complete_task_from_id(self.current.task.id) self._save_workflow() - self.catch_lane_change(for_what='sendoff') + self.current.output['token'] = self.current.token # look for incoming ready task(s) for task in self.workflow.get_tasks(state=Task.READY): self.current._update_task(task) - self.catch_lane_change(for_what='notify') + self.catch_lane_change() self.handle_wf_finalization() - def catch_lane_change(self, for_what=None): + def catch_lane_change(self): """ trigger a lane_user_change signal if we switched to a new lane and new lane's user is different from current one @@ -594,9 +597,9 @@ class ZEngine(object): # if lane_name not found in pool or it's user different from the current(old) user if (self.current.lane_name not in self.current.pool or self.current.pool[self.current.lane_name] != self.current.user_id): - if self.current.lane_auto_sendoff and for_what == 'sendoff': - self.current.sendoff_current_user() - if self.current.lane_auto_invite and for_what == 'notify': + self.current.sendoff_current_user() + self.current.flow_enabled = False + if self.current.lane_auto_invite: self.current.invite_other_parties(self._get_possible_lane_owners()) self.current.old_lane = self.current.lane_name @@ -780,5 +783,6 @@ class ZEngine(object): """ Removes the ``token`` key from ``current.output`` if WF is over. """ - if self.current.task_type.startswith('End') and 'token' in self.current.output: + if ((self.current.flow_enabled or self.current.task_type.startswith('End')) and + 'token' in self.current.output): del self.current.output['token']
multi-lane look like OK but other wf's broken rref #<I> ref GH-<I> rref #<I> ref GH-<I>
zetaops_zengine
train
e400609e0fa84a67f544ac8c9a0f829cc2be9ae5
diff --git a/salt/client.py b/salt/client.py index <HASH>..<HASH> 100644 --- a/salt/client.py +++ b/salt/client.py @@ -27,6 +27,7 @@ The data structurte needs to be: import os import re +import glob import time import cPickle as pickle @@ -66,6 +67,30 @@ class LocalClient(object): pub_data = self.pub(tgt, fun, arg) return self.get_returns(pub_data['jid'], pub_data['minions'], timeout) + def _check_glob_minions(self, expr): + ''' + Return the minions found by looking via globs + ''' + cwd = os.getcwd() + os.chdir(os.path.join(self.opts['pki_dir'], 'minions')) + ret = set(glob.glob(expr)) + os.chdir(cwd) + return ret + + def _check_re_minions(self, expr): + ''' + Return the minions found by looking via regular expresions + ''' + ret = set() + cwd = os.getcwd() + os.chdir(os.path.join(self.opts['pki_dir'], 'minions')) + reg = re.compile(expr) + for fn_ in os.listdir('.'): + if reg.match(fn_): + ret.add(fn_) + os.chdir(cwd) + return ret + def get_returns(self, jid, minions, timeout=5): ''' This method starts off a watcher looking at the return data for a @@ -92,22 +117,15 @@ class LocalClient(object): return ret time.sleep(0.02) - def check_minions(self, expr): + def check_minions(self, expr, expr_form='glob'): ''' Check the passed regex against the available minions' public keys stored for authentication. This should return a set of hostnames which match the regex, this will then be used to parse the returns to make sure everyone has checked back in. ''' - ret = set() - cwd = os.getcwd() - os.chdir(os.path.join(self.opts['pki_dir'], 'minions')) - reg = re.compile(expr) - for fn_ in os.listdir('.'): - if reg.match(fn_): - ret.add(fn_) - os.chdir(cwd) - return ret + return {'glob': self._check_glob_minions, + 'pcre': self._check_pcre_minions}[exper_form](expr) def pub(self, tgt, fun, arg=()): ''' @@ -148,6 +166,8 @@ class LocalClient(object): fun=fun, arg=arg, key=self.key) + if self.opts.has_key('tgt_type'): + package['tgt_type': self.opts['tgt_type']] # Prep zmq context = zmq.Context() socket = context.socket(zmq.REQ)
Add support for globs and regex in the target syntax
saltstack_salt
train
01adda0970bd98709e6d6169b625461fded330bc
diff --git a/udiskie/cli.py b/udiskie/cli.py index <HASH>..<HASH> 100644 --- a/udiskie/cli.py +++ b/udiskie/cli.py @@ -251,7 +251,10 @@ class _EntryPoint(object): """Start asynchronous operations.""" try: self.udisks = yield get_backend(self.options['udisks_version']) - yield self._init() + results = yield self._init() + if not all(success and all(result) + for success, result in results): + self.exit_status = 1 except Exception: self.exit_status = 1 # Print the stack trace only up to the current level:
Set non-zero exitcode in case of error Resolves #<I>
coldfix_udiskie
train
2b414cc3e6efb6dc657ae87aa550838f8f77e882
diff --git a/pypln/api.py b/pypln/api.py index <HASH>..<HASH> 100644 --- a/pypln/api.py +++ b/pypln/api.py @@ -66,6 +66,16 @@ class Corpus(object): def __hash__(self): return hash(repr(self)) + @classmethod + def from_url(cls, url, auth): + result = requests.get(url, auth=auth) + if result.status_code == 200: + return cls(**result.json()) + else: + raise RuntimeError("Getting corpus details failed with status " + "{}. The response was: '{}'".format(result.status_code, + result.text)) + def add_document(self, file_object, filename): '''Add a document to this corpus''' pass diff --git a/tests/test_pypln.py b/tests/test_pypln.py index <HASH>..<HASH> 100644 --- a/tests/test_pypln.py +++ b/tests/test_pypln.py @@ -88,6 +88,8 @@ class CorpusTest(unittest.TestCase): 'name': 'test', 'owner': 'user', 'url': 'http://pypln.example.com/corpora/1/'} + self.user = "user" + self.password = "password" def test_instantiate_corpus_from_json(self): corpus = Corpus(**self.example_json) @@ -145,3 +147,29 @@ class CorpusTest(unittest.TestCase): corpus_2 = Corpus(**json_2) self.assertNotEqual(corpus_1, corpus_2) + + @patch("requests.get") + def test_instantiate_corpus_from_url(self, mocked_get): + mocked_get.return_value.status_code = 200 + mocked_get.return_value.json.return_value = self.example_json + + url = self.example_json['url'] + + corpus = Corpus.from_url(url, (self.user, self.password)) + + mocked_get.assert_called_with(url, auth=(self.user, self.password)) + + self.assertIsInstance(corpus, Corpus) + + for k, v in self.example_json.items(): + self.assertEqual(getattr(corpus, k), v) + + @patch("requests.get") + def test_instantiating_corpus_from_url_fails(self, mocked_get): + mocked_get.return_value.status_code = 403 + mocked_get.return_value.json.return_value = self.example_json + + url = self.example_json['url'] + + with self.assertRaises(RuntimeError): + corpus = Corpus.from_url(url, (self.user, self.password))
Allows user to instantiate a corpus from a url
NAMD_pypln.api
train
adc9cb215ae466d4252e322b6a83359d9a962477
diff --git a/src/test/java/redis/clients/jedis/tests/commands/ClusterCommandsTest.java b/src/test/java/redis/clients/jedis/tests/commands/ClusterCommandsTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/redis/clients/jedis/tests/commands/ClusterCommandsTest.java +++ b/src/test/java/redis/clients/jedis/tests/commands/ClusterCommandsTest.java @@ -9,6 +9,7 @@ import org.junit.Test; import redis.clients.jedis.HostAndPort; import redis.clients.jedis.Jedis; +import redis.clients.jedis.exceptions.JedisDataException; import redis.clients.jedis.tests.HostAndPortUtil; import redis.clients.jedis.tests.JedisTestBase; @@ -49,6 +50,11 @@ public class ClusterCommandsTest extends JedisTestBase { node2.clusterDelSlots(6000, 1, 2, 3, 4, 5, 500, 5000); node1.clusterAddSlots(6000); node1.clusterDelSlots(6000); + try { + node2.clusterDelSlots(10000); + } catch (JedisDataException jde) { + // Do nothing, slot may or may not be assigned depending on gossip + } } private static void waitForEqualClusterSize() throws InterruptedException {
Fix ClusterCommandTest AfterClass handling as it may leave the cluster inconsistent
xetorthio_jedis
train
50fc8ec8074999f65da537a57e743d6809c702ef
diff --git a/lib/safenet.rb b/lib/safenet.rb index <HASH>..<HASH> 100644 --- a/lib/safenet.rb +++ b/lib/safenet.rb @@ -296,7 +296,7 @@ module SafeNet # Optional payload payload = {} - payload["name"] = SafeNet.escape(options[:name]) if options.has_key?(:name) + payload["name"] = options[:name] if options.has_key?(:name) payload["metadata"] = Base64.strict_encode64(options[:meta]) if options.has_key?(:meta) # API call @@ -476,7 +476,7 @@ module SafeNet # Optional payload payload = {} - payload["name"] = SafeNet.escape(options[:name]) if options.has_key?(:name) + payload["name"] = options[:name] if options.has_key?(:name) payload["metadata"] = Base64.strict_encode64(options[:meta]) if options.has_key?(:meta) # API call diff --git a/lib/safenet/version.rb b/lib/safenet/version.rb index <HASH>..<HASH> 100644 --- a/lib/safenet/version.rb +++ b/lib/safenet/version.rb @@ -1,3 +1,3 @@ module Safenet - VERSION = "0.5.6" + VERSION = "0.5.7" end
Bugfix: move_file method: if the path has a space on it, it changes to %<I>.
loureirorg_ruby-safenet
train
2497d88639852ce13b7fdbf86ea0e0c53b92c3f4
diff --git a/src/utils/helpers.js b/src/utils/helpers.js index <HASH>..<HASH> 100644 --- a/src/utils/helpers.js +++ b/src/utils/helpers.js @@ -2,7 +2,7 @@ * Get value of an object property/path even if it's nested */ export function getValueByPath(obj, path) { - const value = path.split('.').reduce((o, i) => o[i], obj) + const value = path.split('.').reduce((o, i) => o ? o[i] : null, obj) return value }
improve reliability of getValueByPath helper function (#<I>)
buefy_buefy
train
2098df9d60532c1dd9d1ad105ec03d812acf50e4
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ </a> <a href="https://insight.sensiolabs.com/projects/703f233e-0738-4bf3-9d47-09d3c6de19b0" target="_blank"> <img src="https://insight.sensiolabs.com/projects/703f233e-0738-4bf3-9d47-09d3c6de19b0/mini.png"> - </a> + </a> </p> Keep it simple, *stupid*! @@ -22,14 +22,13 @@ Keep it simple, *stupid*! ###### index.php ```php <?php -/*K*/ require_once __DIR__.'/../vendor/autoload.php'; -/*I*/ use function Siler\Route\route; -/*S*/ route('/', 'pages/home.php'); +use function Siler\Route\route; +require_once __DIR__.'/../vendor/autoload.php'; +route('get', '/', 'pages/home.php'); ``` ###### pages/home.php ```php -<?php -/*S*/ echo 'Hello World'; +<?php echo 'Hello World'; ``` Get it? diff --git a/example/index.php b/example/index.php index <HASH>..<HASH> 100644 --- a/example/index.php +++ b/example/index.php @@ -1,7 +1,4 @@ <?php - use function Siler\Route\route; - require_once __DIR__.'/../vendor/autoload.php'; - -route('/', 'pages/home.php'); +route('get', '/', 'pages/home.php'); diff --git a/src/Route/Route.php b/src/Route/Route.php index <HASH>..<HASH> 100644 --- a/src/Route/Route.php +++ b/src/Route/Route.php @@ -5,16 +5,17 @@ namespace Siler\Route; -use function Siler\Http\path; +use Siler\Http; use function Siler\require_fn; /** * Define a new route * + * @param string $method The HTTP request method to listen on * @param string $path The HTTP URI to listen on * @param string|callable $callback The callable to be executed or a string to be used with Siler\require_fn */ -function route($path, $callback) +function route($method, $path, $callback) { $path = preg_replace('/\{([A-z]+)\}/', '(?<$1>.*)', $path); $path = "#^{$path}/?$#"; @@ -23,7 +24,7 @@ function route($path, $callback) $callback = require_fn($callback); } - if (preg_match($path, path(), $params)) { + if (Http\method_is($method) && preg_match($path, Http\path(), $params)) { $callback($params); } } diff --git a/tests/Route/RouteTest.php b/tests/Route/RouteTest.php index <HASH>..<HASH> 100644 --- a/tests/Route/RouteTest.php +++ b/tests/Route/RouteTest.php @@ -14,6 +14,7 @@ class RouteTest extends TestCase $_SERVER['HTTP_HOST'] = 'test:8000'; $_SERVER['SCRIPT_NAME'] = '/foo/test.php'; $_SERVER['REQUEST_URI'] = '/bar/baz'; + $_SERVER['REQUEST_METHOD'] = 'GET'; } /** @@ -22,15 +23,15 @@ class RouteTest extends TestCase */ public function testRouteMatching() { - route('/foo', function ($params) { + route('get', '/foo', function ($params) { throw new \Exception('Route /foo should not match'); }); - route('/bar', function ($params) { + route('get', '/bar', function ($params) { throw new \Exception('Route /bar should not match'); }); - route('/bar/baz', function ($params) { + route('get', '/bar/baz', function ($params) { throw new \Exception('Route /bar/baz should match'); }); } @@ -41,7 +42,7 @@ class RouteTest extends TestCase */ public function testRouteRegexp() { - route('/bar/([a-z]+)', function ($params) { + route('get', '/bar/([a-z]+)', function ($params) { throw new \Exception($params[1]); }); } @@ -52,7 +53,7 @@ class RouteTest extends TestCase */ public function testRouteNamedGroup() { - route('/bar/{baz}', function ($params) { + route('get', '/bar/{baz}', function ($params) { throw new \Exception($params['baz']); }); } @@ -63,6 +64,23 @@ class RouteTest extends TestCase */ public function testRouteWithString() { - route('/bar/{bar}', __DIR__.'/../fixtures/throw.php'); + route('get', '/bar/{bar}', __DIR__.'/../fixtures/throw.php'); + } + + /** + * @expectedException \Exception + * @expectedExceptionMessage Route POST /bar/baz should match + */ + public function testRouteMethod() + { + $_SERVER['REQUEST_METHOD'] = 'POST'; + + route('get', '/bar/baz', function ($params) { + throw new \Exception('Route GET /bar/baz should not match'); + }); + + route('post', '/bar/baz', function ($params) { + throw new \Exception('Route POST /bar/baz should match'); + }); } }
Implicit declare the HTTP method for the route
leocavalcante_siler
train
65c6cc6fec53d67879d738dce38fe1022dbf91b3
diff --git a/oidc_provider/lib/utils/token.py b/oidc_provider/lib/utils/token.py index <HASH>..<HASH> 100644 --- a/oidc_provider/lib/utils/token.py +++ b/oidc_provider/lib/utils/token.py @@ -91,7 +91,12 @@ def client_id_from_id_token(id_token): Returns a string or None. """ payload = JWT().unpack(id_token).payload() - return payload.get('aud', None) + aud = payload.get('aud', None) + if aud is None: + return None + if isinstance(aud, list): + return aud[0] + return aud def create_token(user, client, scope, id_token_dic=None): diff --git a/oidc_provider/tests/test_end_session_endpoint.py b/oidc_provider/tests/test_end_session_endpoint.py index <HASH>..<HASH> 100644 --- a/oidc_provider/tests/test_end_session_endpoint.py +++ b/oidc_provider/tests/test_end_session_endpoint.py @@ -30,21 +30,40 @@ class EndSessionTestCase(TestCase): self.url = reverse('oidc_provider:end-session') - def test_redirects(self): + def test_redirects_when_aud_is_str(self): query_params = { 'post_logout_redirect_uri': self.LOGOUT_URL, } response = self.client.get(self.url, query_params) - # With no id_token the OP MUST NOT redirect to the requested redirect_uri. - self.assertRedirects(response, settings.get('OIDC_LOGIN_URL'), fetch_redirect_response=False) + # With no id_token the OP MUST NOT redirect to the requested + # redirect_uri. + self.assertRedirects( + response, settings.get('OIDC_LOGIN_URL'), + fetch_redirect_response=False) - id_token_dic = create_id_token(user=self.user, aud=self.oidc_client.client_id) + id_token_dic = create_id_token( + user=self.user, aud=self.oidc_client.client_id) id_token = encode_id_token(id_token_dic, self.oidc_client) query_params['id_token_hint'] = id_token response = self.client.get(self.url, query_params) - self.assertRedirects(response, self.LOGOUT_URL, fetch_redirect_response=False) + self.assertRedirects( + response, self.LOGOUT_URL, fetch_redirect_response=False) + + def test_redirects_when_aud_is_list(self): + """Check with 'aud' containing a list of str.""" + query_params = { + 'post_logout_redirect_uri': self.LOGOUT_URL, + } + id_token_dic = create_id_token( + user=self.user, aud=self.oidc_client.client_id) + id_token_dic['aud'] = [id_token_dic['aud']] + id_token = encode_id_token(id_token_dic, self.oidc_client) + query_params['id_token_hint'] = id_token + response = self.client.get(self.url, query_params) + self.assertRedirects( + response, self.LOGOUT_URL, fetch_redirect_response=False) @mock.patch(settings.get('OIDC_AFTER_END_SESSION_HOOK')) def test_call_post_end_session_hook(self, hook_function):
Fixed client id retrieval when aud is a list of str. (#<I>) * Fixed client id retrievel when aud is a list of str. * Split tests.
juanifioren_django-oidc-provider
train
3e0cb229df42a7a985d4dce6f64cd389309a30e4
diff --git a/ev3dev/core.py b/ev3dev/core.py index <HASH>..<HASH> 100644 --- a/ev3dev/core.py +++ b/ev3dev/core.py @@ -2814,9 +2814,10 @@ class Sound: return Popen('/usr/bin/aplay -q "%s"' % wav_file, stdout=n, shell=True) @staticmethod - def speak(text): + def speak(text, espeak_opts='-a 200 -s 130'): """ Speak the given text aloud. """ with open(os.devnull, 'w') as n: - return Popen('/usr/bin/espeak -a 200 --stdout "%s" | /usr/bin/aplay -q' % text, stdout=n, shell=True) + cmd_line = '/usr/bin/espeak --stdout {0} "{1}" | /usr/bin/aplay -q'.format(espeak_opts, text) + return Popen(cmd_line, stdout=n, shell=True)
Allow passing espeak options to Sound.speak (#<I>) My main motivation is to allow controll over words-per-minute rate, but I guess there are other possible uses.
ev3dev_ev3dev-lang-python
train
c1d5d77566a4c907b0923b8a7493fad0fbf1188d
diff --git a/translator/src/main/java/com/google/devtools/j2objc/javac/JavacEnvironment.java b/translator/src/main/java/com/google/devtools/j2objc/javac/JavacEnvironment.java index <HASH>..<HASH> 100644 --- a/translator/src/main/java/com/google/devtools/j2objc/javac/JavacEnvironment.java +++ b/translator/src/main/java/com/google/devtools/j2objc/javac/JavacEnvironment.java @@ -27,6 +27,7 @@ import com.sun.tools.javac.util.Name; import com.sun.tools.javac.util.Names; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.util.Map; import javax.lang.model.element.Element; import javax.lang.model.element.PackageElement; import javax.lang.model.util.Elements; @@ -64,14 +65,14 @@ class JavacEnvironment implements ParserEnvironment { Name className = javacNames.fromString(name); // Check first if compiler already created or loaded the class. - ClassSymbol symbol = symbolTable.classes.get(className); + ClassSymbol symbol = getClass(className); if (symbol == null) { // Not available, read it from a class file. // Note: the enterName(Name) method moved from ClassReader to // Symtab in Java 9. Reflection is used to support both locations. symbol = enterClassJavac(className); if (symbol != null) { - symbolTable.classes.put(className, symbol); + putClass(className, symbol); } else { symbol = enterClassJavac9(className); // The symbolTable is already updated in Java 9. @@ -80,8 +81,52 @@ class JavacEnvironment implements ParserEnvironment { return symbol; } + @SuppressWarnings("unchecked") + private ClassSymbol getClass(Name className) { + try { + return (ClassSymbol) + ((Map<Name, ClassSymbol>) Symtab.class.getField("classes").get(symbolTable)) + .get(className); + } catch (ReflectiveOperationException e) { + // continue + } + try { + // Java 9 + Class<?> moduleSymbolCls = Class.forName("com.sun.tools.javac.code.Symbol$ModuleSymbol"); + Object javaBaseModule = Symtab.class.getDeclaredField("java_base").get(symbolTable); + return (ClassSymbol) + Symtab.class + .getMethod("getClass", moduleSymbolCls, Name.class) + .invoke(symbolTable, javaBaseModule, className); + } catch (ReflectiveOperationException e) { + throw new LinkageError(e.getMessage(), e); + } + } + + @SuppressWarnings("unchecked") + private void putClass(Name className, ClassSymbol symbol) { + try { + ((Map<Name, ClassSymbol>) Symtab.class.getField("classes").get(symbolTable)) + .put(className, symbol); + } catch (ReflectiveOperationException e) { + throw new LinkageError(e.getMessage(), e); + } + } + public PackageElement defaultPackage() { - return symbolTable.unnamedPackage; + try { + return (PackageElement) Symtab.class.getField("unnamedPackage").get(symbolTable); + } catch (NoSuchFieldException e) { + // continue + } catch (ReflectiveOperationException e) { + throw new LinkageError(e.getMessage(), e); + } + try { + Object unnamedModule = Symtab.class.getField("unnamedModule"); + return (PackageElement) unnamedModule.getClass().getField("unnamedPackage").get(symbolTable); + } catch (ReflectiveOperationException e) { + throw new LinkageError(e.getMessage(), e); + } } private ClassSymbol enterClassJavac(Name className) { @@ -98,26 +143,20 @@ class JavacEnvironment implements ParserEnvironment { try { Method m = Symtab.class.getDeclaredMethod("enterClass", Name.class); return (ClassSymbol) m.invoke(symbolTable, className); - } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { - return null; + } catch (NoSuchMethodException e) { + // continue + } catch (ReflectiveOperationException e) { + throw new LinkageError(e.getMessage(), e); + } + try { + Class<?> moduleSymbolCls = Class.forName("com.sun.tools.javac.code.Symbol$ModuleSymbol"); + Object javaBaseModule = Symtab.class.getDeclaredField("java_base").get(symbolTable); + Method enterClass = + Symtab.class.getDeclaredMethod("enterClass", moduleSymbolCls, Name.class); + return (ClassSymbol) enterClass.invoke(symbolTable, javaBaseModule, className); + } catch (ReflectiveOperationException e) { + throw new LinkageError(e.getMessage(), e); } - -// TODO(tball): update to this version with -source 1.9. -// try { -// Field javaBaseField = Names.class.getDeclaredField("java_base"); -// Name javaBaseName = (Name) javaBaseField.get(javacNames); -// -// Class<?> moduleSymbolCls = Class.forName("com.sun.tools.javac.code.Symbol.MethodSymbol"); -// Method enterModule = Symtab.class.getDeclaredMethod("enterModule", Name.class); -// Object javaBaseModule = enterModule.invoke(symbolTable, javaBaseName); -// -// Method enterClass = -// Symtab.class.getDeclaredMethod("enterClass", moduleSymbolCls, Name.class); -// return (ClassSymbol) enterClass.invoke(symbolTable, javaBaseModule, className); -// } catch (ClassNotFoundException | NoSuchFieldException | NoSuchMethodException -// | InvocationTargetException | IllegalAccessException e) { -// return null; -// } } public Context getContext() {
Prepare for JDK 9 Symtab changes Use reflection to access Symtab methods that are changing in JDK 9 to remain compatible in both directions.
google_j2objc
train
fad6c8ac2d1d49e33d7398d7464ef489e4895a55
diff --git a/dht_net.go b/dht_net.go index <HASH>..<HASH> 100644 --- a/dht_net.go +++ b/dht_net.go @@ -23,7 +23,7 @@ import ( "go.opencensus.io/tag" ) -var dhtReadMessageTimeout = time.Minute +var dhtReadMessageTimeout = 10 * time.Second var dhtStreamIdleTimeout = 1 * time.Minute var ErrReadTimeout = fmt.Errorf("timed out reading response")
feat: reduce request read timeout (#<I>) This should stop us from waiting on unresponsive peers. This only kicks in when we've already _sent_ a request so this: * Shouldn't be blocked on other requests we're planning on making. * Shouldn't be blocked on dialing.
libp2p_go-libp2p-kad-dht
train
2a1975cbc9fae651f233510eee58304bcebc9ca2
diff --git a/mod/forum/lib.php b/mod/forum/lib.php index <HASH>..<HASH> 100644 --- a/mod/forum/lib.php +++ b/mod/forum/lib.php @@ -2185,30 +2185,28 @@ function forum_print_attachments($post, $return=NULL) { if ($basedir = forum_file_area($post)) { if ($files = get_directory_list($basedir)) { $strattachment = get_string("attachment", "forum"); - $strpopupwindow = get_string("popupwindow"); foreach ($files as $file) { $icon = mimeinfo("icon", $file); if ($CFG->slasharguments) { - $ffurl = "file.php/$filearea/$file"; + $ffurl = "$CFG->wwwroot/file.php/$filearea/$file"; } else { - $ffurl = "file.php?file=/$filearea/$file"; + $ffurl = "$CFG->wwwroot/file.php?file=/$filearea/$file"; } - $image = "<img border=\"0\" src=\"$CFG->pixpath/f/$icon\" height=\"16\" width=\"16\" alt=\"$strpopupwindow\" />"; + $image = "<img border=\"0\" src=\"$CFG->pixpath/f/$icon\" height=\"16\" width=\"16\" alt=\"\" />"; if ($return == "html") { - $output .= "<a href=\"$CFG->wwwroot/$ffurl\">$image</a> "; - $output .= "<a href=\"$CFG->wwwroot/$ffurl\">$file</a><br />"; + $output .= "<a href=\"$ffurl\">$image</a> "; + $output .= "<a href=\"$ffurl\">$file</a><br />"; } else if ($return == "text") { - $output .= "$strattachment $file:\n$CFG->wwwroot/$ffurl\n"; + $output .= "$strattachment $file:\n$ffurl\n"; } else { if ($icon == "image.gif") { // Image attachments don't get printed as links - $imagereturn .= "<br /><img src=\"$CFG->wwwroot/$ffurl\" alt=\"\" />"; + $imagereturn .= "<br /><img src=\"$ffurl\" alt=\"\" />"; } else { - link_to_popup_window("/$ffurl", "attachment", $image, 500, 500, $strattachment); - echo "<a href=\"$CFG->wwwroot/$ffurl\">$file</a>"; - echo "<br />"; + echo "<a href=\"$ffurl\">$image</a> "; + echo "<a href=\"$ffurl\">$file</a><br />"; } } }
removed file pop-ups; merged from MOODLE_<I>_STABLE
moodle_moodle
train
78d094d20c9a6c8fc8633558fc25376fb88da972
diff --git a/SlevomatCodingStandard/Helpers/ReferencedNameHelper.php b/SlevomatCodingStandard/Helpers/ReferencedNameHelper.php index <HASH>..<HASH> 100644 --- a/SlevomatCodingStandard/Helpers/ReferencedNameHelper.php +++ b/SlevomatCodingStandard/Helpers/ReferencedNameHelper.php @@ -92,14 +92,17 @@ class ReferencedNameHelper ], true) ) { if ($tokens[$previousTokenBeforeStartPointer]['code'] === T_COMMA) { - $implementsOrExtendsPointer = TokenHelper::findPreviousExcluding( + $precedingTokenPointer = TokenHelper::findPreviousExcluding( $phpcsFile, array_merge([T_COMMA], TokenHelper::$nameTokenCodes, TokenHelper::$ineffectiveTokenCodes), $previousTokenBeforeStartPointer - 1 ); if ( - $tokens[$implementsOrExtendsPointer]['code'] !== T_IMPLEMENTS - && $tokens[$implementsOrExtendsPointer]['code'] !== T_EXTENDS + !in_array($tokens[$precedingTokenPointer]['code'], [ + T_IMPLEMENTS, + T_EXTENDS, + T_USE, + ]) ) { $type = ReferencedName::TYPE_CONSTANT; } diff --git a/tests/Helpers/ReferencedNameHelperTest.php b/tests/Helpers/ReferencedNameHelperTest.php index <HASH>..<HASH> 100644 --- a/tests/Helpers/ReferencedNameHelperTest.php +++ b/tests/Helpers/ReferencedNameHelperTest.php @@ -17,6 +17,8 @@ class ReferencedNameHelperTest extends \SlevomatCodingStandard\Helpers\TestCase ['\SecondImplementedInterface', false, false], ['\ThirdImplementedInterface', false, false], ['\FullyQualified\SomeOtherTrait', false, false], + ['SomeDifferentTrait', false, false], + ['\FullyQualified\SometTotallyDifferentTrait', false, false], ['SomeTrait', false, false], ['TypehintedName', false, false], ['ClassInstance', false, false], diff --git a/tests/Helpers/data/lotsOfReferencedNames.php b/tests/Helpers/data/lotsOfReferencedNames.php index <HASH>..<HASH> 100644 --- a/tests/Helpers/data/lotsOfReferencedNames.php +++ b/tests/Helpers/data/lotsOfReferencedNames.php @@ -8,7 +8,7 @@ use UsedNamespace\UsedNameFooBar as UsedNameFooBarBaz; class FooClass extends \ExtendedClass implements \ImplementedInterface, \SecondImplementedInterface, \ThirdImplementedInterface { - use \FullyQualified\SomeOtherTrait; + use \FullyQualified\SomeOtherTrait, SomeDifferentTrait, \FullyQualified\SometTotallyDifferentTrait; use SomeTrait; /** @ORM\Column(name="foo") */
Fixed detecting unused uses for multiple traits on the same line
slevomat_coding-standard
train
a4263e7db5766bc8ce9feb9eb4097d0d0068e8bc
diff --git a/transloadit.go b/transloadit.go index <HASH>..<HASH> 100755 --- a/transloadit.go +++ b/transloadit.go @@ -102,7 +102,11 @@ func (client *Client) doRequest(req *http.Request) (Response, error) { func (client *Client) request(method string, path string, content map[string]interface{}) (Response, error) { - uri := client.config.Endpoint + "/" + path + uri := path + // Don't add host for absolute urls + if u, err := url.Parse(path); err == nil && u.Scheme == "" { + uri = client.config.Endpoint + "/" + path + } // Ensure content is a map if content == nil {
Don't add entpoint to urls with scheme
transloadit_go-sdk
train
705c4322728f7b816db66984b3ede9b3aa0e17b6
diff --git a/PHPCI/Plugin/CopyBuild.php b/PHPCI/Plugin/CopyBuild.php index <HASH>..<HASH> 100644 --- a/PHPCI/Plugin/CopyBuild.php +++ b/PHPCI/Plugin/CopyBuild.php @@ -39,7 +39,7 @@ class CopyBuild implements \PHPCI\Plugin return false; } - $cmd = 'mkdir -p "%s" && cp -Rf "%s" "%s/"'; + $cmd = 'mkdir -p "%s" && cp -Rf "%s"* "%s/"'; $success = $this->phpci->executeCommand($cmd, $this->directory, $build, $this->directory); if ($this->ignore) {
Fixing the copy_build plugin, issue #<I>
dancryer_PHPCI
train
1ffbb5c24c04d961a9bb6fbb45b97734d4c1ff59
diff --git a/cmd/web-handlers_test.go b/cmd/web-handlers_test.go index <HASH>..<HASH> 100644 --- a/cmd/web-handlers_test.go +++ b/cmd/web-handlers_test.go @@ -1590,12 +1590,14 @@ func TestWebObjectLayerFaultyDisks(t *testing.T) { z := obj.(*xlZones) xl := z.zones[0].sets[0] xlDisks := xl.getDisks() + z.zones[0].xlDisksMu.Lock() xl.getDisks = func() []StorageAPI { for i, d := range xlDisks { xlDisks[i] = newNaughtyDisk(d, nil, errFaultyDisk) } return xlDisks } + z.zones[0].xlDisksMu.Unlock() // Initialize web rpc endpoint. apiRouter := initTestWebRPCEndPoint(obj) diff --git a/cmd/xl-v1-healing_test.go b/cmd/xl-v1-healing_test.go index <HASH>..<HASH> 100644 --- a/cmd/xl-v1-healing_test.go +++ b/cmd/xl-v1-healing_test.go @@ -270,6 +270,7 @@ func TestHealObjectXL(t *testing.T) { } xlDisks := xl.getDisks() + z.zones[0].xlDisksMu.Lock() xl.getDisks = func() []StorageAPI { // Nil more than half the disks, to remove write quorum. for i := 0; i <= len(xlDisks)/2; i++ { @@ -277,6 +278,7 @@ func TestHealObjectXL(t *testing.T) { } return xlDisks } + z.zones[0].xlDisksMu.Unlock() // Try healing now, expect to receive errDiskNotFound. _, err = obj.HealObject(context.Background(), bucket, object, false, false, madmin.HealDeepScan) diff --git a/cmd/xl-v1-object_test.go b/cmd/xl-v1-object_test.go index <HASH>..<HASH> 100644 --- a/cmd/xl-v1-object_test.go +++ b/cmd/xl-v1-object_test.go @@ -217,12 +217,14 @@ func TestXLDeleteObjectDiskNotFound(t *testing.T) { // for a 16 disk setup, quorum is 9. To simulate disks not found yet // quorum is available, we remove disks leaving quorum disks behind. xlDisks := xl.getDisks() + z.zones[0].xlDisksMu.Lock() xl.getDisks = func() []StorageAPI { for i := range xlDisks[:7] { xlDisks[i] = newNaughtyDisk(xlDisks[i], nil, errFaultyDisk) } return xlDisks } + z.zones[0].xlDisksMu.Unlock() err = obj.DeleteObject(context.Background(), bucket, object) if err != nil { t.Fatal(err) @@ -236,11 +238,13 @@ func TestXLDeleteObjectDiskNotFound(t *testing.T) { // Remove one more disk to 'lose' quorum, by setting it to nil. xlDisks = xl.getDisks() + z.zones[0].xlDisksMu.Lock() xl.getDisks = func() []StorageAPI { xlDisks[7] = nil xlDisks[8] = nil return xlDisks } + z.zones[0].xlDisksMu.Unlock() err = obj.DeleteObject(context.Background(), bucket, object) // since majority of disks are not available, metaquorum is not achieved and hence errXLReadQuorum error if err != toObjectErr(errXLReadQuorum, bucket, object) { @@ -293,9 +297,11 @@ func TestGetObjectNoQuorum(t *testing.T) { xlDisks[i] = newNaughtyDisk(xlDisks[i], diskErrors, errFaultyDisk) } } + z.zones[0].xlDisksMu.Lock() xl.getDisks = func() []StorageAPI { return xlDisks } + z.zones[0].xlDisksMu.Unlock() // Fetch object from store. err = xl.GetObject(context.Background(), bucket, object, 0, int64(len("abcd")), ioutil.Discard, "", opts) if err != toObjectErr(errXLReadQuorum, bucket, object) { @@ -350,9 +356,11 @@ func TestPutObjectNoQuorum(t *testing.T) { xlDisks[i] = newNaughtyDisk(xlDisks[i], diskErrors, errFaultyDisk) } } + z.zones[0].xlDisksMu.Lock() xl.getDisks = func() []StorageAPI { return xlDisks } + z.zones[0].xlDisksMu.Unlock() // Upload new content to same object "object" _, err = obj.PutObject(context.Background(), bucket, object, mustGetPutObjReader(t, bytes.NewReader([]byte("abcd")), int64(len("abcd")), "", ""), opts) if err != toObjectErr(errXLWriteQuorum, bucket, object) {
fix racy tests when editing xl.getDisks (#<I>)
minio_minio
train
aabf00d1569655b2a97fee130b48e3b1762aa616
diff --git a/plugin/geomajas-plugin-deskmanager/deskmanager/src/main/java/org/geomajas/plugin/deskmanager/command/manager/dto/GetTerritoriesRequest.java b/plugin/geomajas-plugin-deskmanager/deskmanager/src/main/java/org/geomajas/plugin/deskmanager/command/manager/dto/GetTerritoriesRequest.java index <HASH>..<HASH> 100644 --- a/plugin/geomajas-plugin-deskmanager/deskmanager/src/main/java/org/geomajas/plugin/deskmanager/command/manager/dto/GetTerritoriesRequest.java +++ b/plugin/geomajas-plugin-deskmanager/deskmanager/src/main/java/org/geomajas/plugin/deskmanager/command/manager/dto/GetTerritoriesRequest.java @@ -22,7 +22,7 @@ public class GetTerritoriesRequest implements CommandRequest { private static final long serialVersionUID = 1L; - public static final String COMMAND = "command.deskmanager.manager.GetGroups"; + public static final String COMMAND = "command.deskmanager.manager.GetTerritories"; public String toString() { return COMMAND;
GDM-<I>: make command names more unique GDM-<I>: unit test assertion order
geomajas_geomajas-project-server
train
2870031844dfc28919763d5c05c1d1bc6db8f4f1
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -16,10 +16,6 @@ else: with open("README.md") as readme_file: long_description = readme_file.read() -tests_require = [ - "vcrpy>=1.10.3", -] - setup( name="tweepy", version=version, @@ -42,7 +38,6 @@ setup( "requests>=2.27.0,<3", "requests-oauthlib>=1.2.0,<2", ], - tests_require=tests_require, extras_require={ "async": ["aiohttp>=3.7.3,<4"], "dev": [ @@ -51,7 +46,7 @@ setup( "tox>=3.21.0", ], "socks": ["requests[socks]>=2.27.0,<3"], - "test": tests_require, + "test": ["vcrpy>=1.10.3"], }, test_suite="tests", keywords="twitter library",
Remove tests_require from setup.py setup.py test has been deprecated with <URL>
tweepy_tweepy
train
f38b63d240b3f9bc12e9e26a4bb53260400ea720
diff --git a/ate/runner.py b/ate/runner.py index <HASH>..<HASH> 100644 --- a/ate/runner.py +++ b/ate/runner.py @@ -51,7 +51,7 @@ class Runner(object): @param (str) context level, testcase or testset """ # convert keys in request headers to lowercase - config_dict = utils.lower_dict_key(config_dict) + config_dict = utils.lower_config_dict_key(config_dict) self.context.init_context(level) self.context.config_context(config_dict, level) diff --git a/ate/utils.py b/ate/utils.py index <HASH>..<HASH> 100644 --- a/ate/utils.py +++ b/ate/utils.py @@ -300,7 +300,7 @@ def lower_dict_key(origin_dict, depth=1): new_dict = {} for key, value in origin_dict.items(): - if depth > 2: + if depth >= 2: new_dict[key] = value continue @@ -311,6 +311,17 @@ def lower_dict_key(origin_dict, depth=1): return new_dict +def lower_config_dict_key(config_dict): + """ convert key in config dict to lower case, convertion will occur in two places: + 1, all keys in config dict; + 2, all keys in config["request"] + """ + config_dict = lower_dict_key(config_dict) + if "request" in config_dict and isinstance(config_dict["request"], dict): + config_dict["request"] = lower_dict_key(config_dict["request"]) + + return config_dict + def convert_to_order_dict(map_list): """ convert mapping in list to ordered dict @param (list) map_list diff --git a/tests/test_utils.py b/tests/test_utils.py index <HASH>..<HASH> 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -222,7 +222,7 @@ class TestUtils(ApiServerUnittest): self.assertFalse(utils.is_variable(("os", os))) self.assertFalse(utils.is_variable(("utils", utils))) - def test_lower_dict_key(self): + def test_handle_config_key_case(self): origin_dict = { "Name": "test", "Request": { @@ -234,7 +234,7 @@ class TestUtils(ApiServerUnittest): } } } - new_dict = utils.lower_dict_key(origin_dict) + new_dict = utils.lower_config_dict_key(origin_dict) self.assertIn("name", new_dict) self.assertIn("request", new_dict) self.assertIn("method", new_dict["request"])
bugfix: keys that are not in 'request' of config shall not be lower cased
HttpRunner_HttpRunner
train
621b9fff194e0593b9fa9e72e3be73d487ff4fa8
diff --git a/formats/__init__.py b/formats/__init__.py index <HASH>..<HASH> 100644 --- a/formats/__init__.py +++ b/formats/__init__.py @@ -5,7 +5,7 @@ Support multiple formats with ease. - :copyright: (c) 2014 by Hidde Bultsma. + :copyright: (c) 2015 by Hidde Bultsma. :license: MIT, see LICENSE for more details. """ diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ with open(path.join(here, 'README.rst'), encoding='utf-8') as f: setup( name='formats', - version='0.1.0a2', + version='0.1.0a3', description='Support multiple formats with ease', long_description=long_description, url='https://github.com/redodo/formats',
Pre-release: <I>a3
redodo_formats
train
8845e760c736cc184fd845af83f216d222fdbe38
diff --git a/test.rb b/test.rb index <HASH>..<HASH> 100644 --- a/test.rb +++ b/test.rb @@ -127,9 +127,6 @@ end ctx = Twostroke::Context.new -# these are test suites that must be run in an isolated context -isolated_tests = [/mootools/] - files = Dir[File.expand_path("../test/*.js", __FILE__)] .sort .map { |file| TestFile.new file, ctx }
remove isolated_tests from test.rb
charliesome_twostroke
train
86b7f48e437bf85044bd9b5381a0ee0e9ec62cab
diff --git a/src/Generators/ViewGenerate.php b/src/Generators/ViewGenerate.php index <HASH>..<HASH> 100644 --- a/src/Generators/ViewGenerate.php +++ b/src/Generators/ViewGenerate.php @@ -2,8 +2,6 @@ namespace Amranidev\ScaffoldInterface\Generators; -use Amranidev\ScaffoldInterface\Datasystem\Datasystem; - /** * Class ViewGenerate. * @@ -29,10 +27,10 @@ class ViewGenerate * @param $data Array * @param NamesGenerate */ - public function __construct(Datasystem $dataSystem, NamesGenerate $names) + public function __construct() { - $this->dataSystem = $dataSystem; - $this->names = $names; + $this->dataSystem = app()->make('Datasystem'); + $this->names = app()->make('NamesGenerate'); } /**
refactor ViewGenerate Class
amranidev_scaffold-interface
train
4c383daf8a9d7aaa5049a98d7d6da19c85793d2d
diff --git a/test/test_consumer_group.py b/test/test_consumer_group.py index <HASH>..<HASH> 100644 --- a/test/test_consumer_group.py +++ b/test/test_consumer_group.py @@ -44,6 +44,7 @@ def test_consumer(kafka_broker, version): assert len(consumer._client._conns) > 0 node_id = list(consumer._client._conns.keys())[0] assert consumer._client._conns[node_id].state is ConnectionStates.CONNECTED + consumer.close() @pytest.mark.skipif(version() < (0, 9), reason='Unsupported Kafka Version') @@ -153,6 +154,7 @@ def test_paused(kafka_broker, topic): consumer.unsubscribe() assert set() == consumer.paused() + consumer.close() @pytest.mark.skipif(version() < (0, 9), reason='Unsupported Kafka Version') @@ -183,3 +185,4 @@ def test_heartbeat_thread(kafka_broker, topic): assert consumer._coordinator.heartbeat.last_poll == last_poll consumer.poll(timeout_ms=100) assert consumer._coordinator.heartbeat.last_poll > last_poll + consumer.close() diff --git a/test/test_consumer_integration.py b/test/test_consumer_integration.py index <HASH>..<HASH> 100644 --- a/test/test_consumer_integration.py +++ b/test/test_consumer_integration.py @@ -44,6 +44,7 @@ def test_kafka_consumer(simple_client, topic, kafka_consumer_factory): assert len(messages[0]) == 100 assert len(messages[1]) == 100 + kafka_consumer.close() class TestConsumerIntegration(KafkaIntegrationTestCase): @@ -558,6 +559,7 @@ class TestConsumerIntegration(KafkaIntegrationTestCase): messages.add((msg.partition, msg.offset)) self.assertEqual(len(messages), 5) self.assertGreaterEqual(t.interval, TIMEOUT_MS / 1000.0 ) + consumer.close() @kafka_versions('>=0.8.1') def test_kafka_consumer__offset_commit_resume(self): @@ -597,6 +599,7 @@ class TestConsumerIntegration(KafkaIntegrationTestCase): output_msgs2.append(m) self.assert_message_count(output_msgs2, 20) self.assertEqual(len(set(output_msgs1) | set(output_msgs2)), 200) + consumer2.close() @kafka_versions('>=0.10.1') def test_kafka_consumer_max_bytes_simple(self): @@ -617,6 +620,7 @@ class TestConsumerIntegration(KafkaIntegrationTestCase): self.assertEqual( seen_partitions, set([ TopicPartition(self.topic, 0), TopicPartition(self.topic, 1)])) + consumer.close() @kafka_versions('>=0.10.1') def test_kafka_consumer_max_bytes_one_msg(self): @@ -642,6 +646,7 @@ class TestConsumerIntegration(KafkaIntegrationTestCase): fetched_msgs = [next(consumer) for i in range(10)] self.assertEqual(len(fetched_msgs), 10) + consumer.close() @kafka_versions('>=0.10.1') def test_kafka_consumer_offsets_for_time(self): @@ -695,6 +700,7 @@ class TestConsumerIntegration(KafkaIntegrationTestCase): self.assertEqual(offsets, { tp: late_msg.offset + 1 }) + consumer.close() @kafka_versions('>=0.10.1') def test_kafka_consumer_offsets_search_many_partitions(self): @@ -733,6 +739,7 @@ class TestConsumerIntegration(KafkaIntegrationTestCase): tp0: p0msg.offset + 1, tp1: p1msg.offset + 1 }) + consumer.close() @kafka_versions('<0.10.1') def test_kafka_consumer_offsets_for_time_old(self): diff --git a/test/test_producer.py b/test/test_producer.py index <HASH>..<HASH> 100644 --- a/test/test_producer.py +++ b/test/test_producer.py @@ -55,7 +55,6 @@ def test_end_to_end(kafka_broker, compression): futures.append(producer.send(topic, 'msg %d' % i)) ret = [f.get(timeout=30) for f in futures] assert len(ret) == messages - producer.close() consumer.subscribe([topic]) @@ -67,6 +66,7 @@ def test_end_to_end(kafka_broker, compression): break assert msgs == set(['msg %d' % i for i in range(messages)]) + consumer.close() @pytest.mark.skipif(platform.python_implementation() != 'CPython',
Close KafkaConsumer instances during tests (#<I>)
dpkp_kafka-python
train
49cbcefc9c2b90ff59426a05c3b52e8f7e6c9f1a
diff --git a/third_party/language-mapping-list/language-mapping-list.js b/third_party/language-mapping-list/language-mapping-list.js index <HASH>..<HASH> 100644 --- a/third_party/language-mapping-list/language-mapping-list.js +++ b/third_party/language-mapping-list/language-mapping-list.js @@ -493,6 +493,10 @@ mozilla.LanguageMapping = { nativeName: "한국어 (韩国)", englishName: "Korean (Korea)" }, + 'ku': { + nativeName: "Kurdî", + englishName: "Kurdish" + }, 'ku-TR': { nativeName: "Kurdî", englishName: "Kurdish" @@ -585,10 +589,6 @@ mozilla.LanguageMapping = { nativeName: "ဗမာစကာ", englishName: "Burmese" }, - 'no': { - nativeName: "Norsk", - englishName: "Norwegian" - }, 'nb': { nativeName: "Norsk (bokmål)", englishName: "Norwegian (bokmal)" @@ -621,6 +621,10 @@ mozilla.LanguageMapping = { nativeName: "Norsk (nynorsk)", englishName: "Norwegian (nynorsk)" }, + 'no': { + nativeName: "Norsk", + englishName: "Norwegian" + }, 'oc': { nativeName: "Occitan", englishName: "Occitan" @@ -857,11 +861,11 @@ mozilla.LanguageMapping = { nativeName: "中文", englishName: "Chinese" }, - 'zh-Hans': { + 'zh-HANS': { nativeName: "中文简体", englishName: "Chinese Simplified" }, - 'zh-Hant': { + 'zh-HANT': { nativeName: "中文繁體", englishName: "Chinese Traditional" }, @@ -893,4 +897,4 @@ mozilla.LanguageMapping = { // to the table without needing to normalize it to lower-lower every time. for (let key of Object.keys(mozilla.LanguageMapping)) { mozilla.LanguageMapping[key.toLowerCase()] = mozilla.LanguageMapping[key]; -} \ No newline at end of file +}
Update language mapping - Add Kurdish ("ku") to supplement Kurdish-Turkey ("ku-TR") - Alphabetize Norwegian ("no") - Normalize case on Chinese ("HANS" and "HANT") Change-Id: I<I>ae<I>b<I>cdf8a<I>da<I>f4f<I>a7eb7f<I>
google_shaka-player
train
8c2b4616dd784f9c578a88e690c83ee9cf36f429
diff --git a/nikeplus/export.py b/nikeplus/export.py index <HASH>..<HASH> 100644 --- a/nikeplus/export.py +++ b/nikeplus/export.py @@ -2,14 +2,12 @@ Export nikeplus data to csv or print to screen """ +import argparse from collections import namedtuple import json -import urllib -import urllib2 +import sys import time - -# Insert token here -ACCESS_TOKEN = '' +import urllib2 # FIXME: Could use descriptors here: # - Could hold: default value, api name, pretty name, and conversion func @@ -31,6 +29,30 @@ NikePlusActivity = namedtuple('NikePlusActivity', name_to_api.keys()) km_to_mi = lambda distance: distance * 0.621371 +def _parse_args(): + """Parse sys.argv arguments""" + + parser = argparse.ArgumentParser(description='Export NikePlus data to CSV') + + parser.add_argument('-t', '--token', required=False, default=None, + help='Access token for API, can also store in file named "access_token" to avoid passing via command line') + + args = vars(parser.parse_args()) + + if args['token'] is None: + try: + with open('access_token', 'r') as _file: + access_token = _file.read() + except IOError: + print 'Must pass access token via command line or store in file named "access_token"' + sys.exit(-1) + + args['token'] = access_token + + return args + + + def calculate_mile_pace(duration, miles): pace = '' sp = duration.split(':') @@ -72,9 +94,10 @@ def decode_activity(activity): return activity -def get_activities(): +def get_activities(access_token): base_url = 'https://api.nike.com' - url = '/me/sport/activities?access_token=%s' % ACCESS_TOKEN + + url = '/me/sport/activities?access_token=%s' % access_token # weird required headers, blah. headers = {'appid':'fuelband', 'Accept':'application/json'} @@ -107,13 +130,12 @@ def activity_to_csv(activity): # This is safe b/c _dict is ordered dict so the order is dependable. return ','.join(str(value) for value in _dict.values()) - def main(): - # FIXME: Add help, real argparse # FIXME: Use csv module to write b/c it will handle case where data could # have a comma in it. - activities = get_activities() + args = _parse_args() + activities = get_activities(args['token']) # Print header activity = activities.next()
Add proper arg parsing and access token via command line
durden_nikeplus
train
f085a36fb728fe1079d32fc07540274e2161e5ab
diff --git a/pysnmp/proto/secmod/rfc3414/service.py b/pysnmp/proto/secmod/rfc3414/service.py index <HASH>..<HASH> 100644 --- a/pysnmp/proto/secmod/rfc3414/service.py +++ b/pysnmp/proto/secmod/rfc3414/service.py @@ -791,7 +791,8 @@ class SnmpUSMSecurityModel(AbstractSecurityModel): int(time.time()) ) - expireAt = int(self.__expirationTimer + 300 / snmpEngine.transportDispatcher.getTimerResolution()) + timerResolution = snmpEngine.transportDispatcher is None and 1.0 or snmpEngine.transportDispatcher.getTimerResolution() + expireAt = int(self.__expirationTimer + 300 / timerResolution) if expireAt not in self.__timelineExpQueue: self.__timelineExpQueue[expireAt] = [] self.__timelineExpQueue[expireAt].append(msgAuthoritativeEngineId) @@ -859,7 +860,9 @@ class SnmpUSMSecurityModel(AbstractSecurityModel): msgAuthoritativeEngineTime, int(time.time()) ) - expireAt = int(self.__expirationTimer + 300 / snmpEngine.transportDispatcher.getTimerResolution()) + + timerResolution = snmpEngine.transportDispatcher is None and 1.0 or snmpEngine.transportDispatcher.getTimerResolution() + expireAt = int(self.__expirationTimer + 300 / timerResolution) if expireAt not in self.__timelineExpQueue: self.__timelineExpQueue[expireAt] = [] self.__timelineExpQueue[expireAt].append(msgAuthoritativeEngineId)
work even if transport dispatcher is not available
etingof_pysnmp
train
5d451085933a258060b9b5609f0417c36e38ac77
diff --git a/lib/unexpectedMitm.js b/lib/unexpectedMitm.js index <HASH>..<HASH> 100644 --- a/lib/unexpectedMitm.js +++ b/lib/unexpectedMitm.js @@ -689,7 +689,32 @@ module.exports = { expect.promise(function () { return expect.shift(); - }).caught(handleError); + }).caught(handleError).then(function () { + /* + * Handle the case of specified but unprocessed mocks. + * + * Where the driving assertion resolves we must check + * if any mocks still exist. If so, we add then to the + * set of expected exchanges the resolve the promise. + */ + var hasRemainingRequestDescriptions = requestDescriptions.length > 0; + if (hasRemainingRequestDescriptions) { + requestDescriptions.forEach(function (requestDescription) { + var responseProperties = requestDescription && requestDescription.response, + expectedRequestProperties = requestDescription && requestDescription.request; + + expectedRequestProperties = resolveExpectedRequestProperties(expectedRequestProperties); + + httpConversationSatisfySpec.exchanges.push({ + request: expectedRequestProperties, + response: responseProperties + }); + }); + + cleanUp(); + resolve([httpConversation, httpConversationSatisfySpec]); + } + }); }).spread(function (httpConversation, httpConversationSatisfySpec) { expect.errorMode = 'default'; return expect(httpConversation, 'to satisfy', httpConversationSatisfySpec);
Restore the code to empty any unused remaining request descriptions.
unexpectedjs_unexpected-mitm
train
6953a8680466f638fba6affbb9cda8aea6378e63
diff --git a/examples/create_shipment.py b/examples/create_shipment.py index <HASH>..<HASH> 100755 --- a/examples/create_shipment.py +++ b/examples/create_shipment.py @@ -3,6 +3,7 @@ This example shows how to ship shipments. """ import logging +import binascii from fedex.services.ship_service import FedexProcessShipmentRequest from fedex.config import FedexConfig @@ -98,3 +99,38 @@ ship.send_request() # This will show the reply to your shipment being sent. You can access the # attributes through the response attribute on the request object. print ship.response + +# Get the label image in ASCII format from the reply. +ascii_label_data = ship.response.CompletedShipmentDetail.CompletedPackageDetails[0].Label.Parts[0].Image +# Convert the ASCII data to binary. +label_binary_data = binascii.a2b_base64(ascii_label_data) +# This will be the file we write the label out to. +png_file = open('example_shipment_label.png', 'wb') + +""" +This is an example of how to dump a label to a PNG file. +""" +png_file.write(label_binary_data) +png_file.close() + +""" +This is an example of how to print the label to a serial printer. This will not +work for all label printers, consult your printer's documentation for more +details on what formats it can accept. +""" +# Pipe the binary directly to the label printer. Works under Linux +# without requiring PySerial. This WILL NOT work on other platforms. +#label_printer = open("/dev/ttyS0", "w") +#label_printer.write(label_binary_data) +#label_printer.close() + +""" +This is a potential cross-platform solution using pySerial. This has not been +tested in a long time and may or may not work. For Windows, Mac, and other +platforms, you may want to go this route. +""" +#import serial +#label_printer = serial.Serial(0) +#print "SELECTED SERIAL PORT: "+ label_printer.portstr +#label_printer.write(label_binary_data) +#label_printer.close() \ No newline at end of file
Add examples of how to store or print labels.
python-fedex-devs_python-fedex
train
8f47a60e4e293cd4fa2760ad31c233bc8cbebd98
diff --git a/lib/db/upgrade.php b/lib/db/upgrade.php index <HASH>..<HASH> 100644 --- a/lib/db/upgrade.php +++ b/lib/db/upgrade.php @@ -792,5 +792,31 @@ function xmldb_main_upgrade($oldversion) { upgrade_main_savepoint(true, 2012060100.02); } + if ($oldversion < 2012060100.03) { + // Merge duplicate records in files_reference that were created during the development + // phase at 2.3dev sites. This is needed so we can create the unique index over + // (repositoryid, referencehash) fields. + $sql = "SELECT repositoryid, referencehash, MIN(id) AS minid + FROM {files_reference} + GROUP BY repositoryid, referencehash + HAVING COUNT(*) > 1;"; + $duprs = $DB->get_recordset_sql($sql); + foreach ($duprs as $duprec) { + // get the list of all ids in {files_reference} that need to be remapped + $dupids = $DB->get_records_select('files_reference', "repositoryid = ? AND referencehash = ? AND id > ?", + array($duprec->repositoryid, $duprec->referencehash, $duprec->minid), '', 'id'); + $dupids = array_keys($dupids); + // relink records in {files} that are now referring to a duplicate record + // in {files_reference} to refer to the first one + list($subsql, $subparams) = $DB->get_in_or_equal($dupids); + $DB->set_field_select('files', 'referencefileid', $duprec->minid, "referencefileid $subsql", $subparams); + // and finally remove all orphaned records from {files_reference} + $DB->delete_records_list('files_reference', 'id', $dupids); + } + $duprs->close(); + + upgrade_main_savepoint(true, 2012060100.03); + } + return true; } diff --git a/version.php b/version.php index <HASH>..<HASH> 100644 --- a/version.php +++ b/version.php @@ -30,7 +30,7 @@ defined('MOODLE_INTERNAL') || die(); -$version = 2012060100.02; // YYYYMMDD = weekly release date of this DEV branch +$version = 2012060100.03; // YYYYMMDD = weekly release date of this DEV branch // RR = release increments - 00 in DEV branches // .XX = incremental changes
MDL-<I> Remap all instances of an external file to a single record in files_reference
moodle_moodle
train
db6bde03ec12dbec3d80450e1e303521781bc634
diff --git a/dcard/posts.py b/dcard/posts.py index <HASH>..<HASH> 100644 --- a/dcard/posts.py +++ b/dcard/posts.py @@ -5,9 +5,14 @@ except ImportError: from . import api from .utils import Client +client = Client() + class Post: + def __init__(self): + pass + @staticmethod def build_url(post_id): post_url = '{api_root}/{api_posts}/{post_id}'.format( @@ -19,21 +24,21 @@ class Post: @staticmethod def get_content(post_url): - return Client.get(post_url) + return client.sget(post_url) @staticmethod def get_links(post_url): links_url = '{post_url}/links'.format(post_url=post_url) - return Client.get(links_url) + return client.sget(links_url) @staticmethod def get_comments(post_url): comments_url = '{post_url}/comments'.format(post_url=post_url) - + params = {} comments = [] while True: - _comments = Client.get(comments_url, params=params, verbose=True) + _comments = Client.get(comments_url, params=params) if len(_comments) == 0: break comments += _comments @@ -42,21 +47,43 @@ class Post: return comments @staticmethod - def single_get(post_meta=None, post_id=None): - post_id = post_id if post_id else post_meta['id'] - post_url = Post.build_url(post_id) - return { - 'content': Post.get_content(post_url), - 'links': Post.get_links(post_url), - 'comments': Post.get_comments(post_url) - } + def _get(post_ids, **kwargs): + + post_urls = [Post.build_url(i) for i in post_ids] + + crawl_links = kwargs.get('links', True) + crawl_content = kwargs.get('content', True) + crawl_comments = kwargs.get('comments', True) + + if crawl_links: + links_futures = [Post.get_links(url) for url in post_urls] + if crawl_content: + content_futures = [Post.get_content(url) for url in post_urls] + + results = [{}] * len(post_urls) + if crawl_links: + for i, f in enumerate(links_futures): + results[i]['links'] = f.result().json() + if crawl_content: + for i, f in enumerate(content_futures): + results[i]['content'] = f.result().json() + if crawl_comments: + for i, url in enumerate(post_urls): + results[i]['comments'] = Post.get_comments(url) + return results @staticmethod - def get(post_meta=None, post_id=None): - if isinstance(post_meta, list): - return [Post.single_get(m) for m in post_meta] - elif isinstance(post_id, list): - return [Post.single_get(i) for i in post_id] - else: - return Post.single_get(post_meta=post_meta, post_id=post_id) + def get(post_meta=None, post_id=None, **kwargs): + ids = [] + if post_meta: + if isinstance(post_meta, list): + ids = [m['id'] for m in post_meta] + else: + ids = [post_meta['id']] + if post_id: + if isinstance(post_id, list): + ids = post_id + else: + ids = [post_id] + return Post._get(ids, **kwargs) diff --git a/dcard/utils.py b/dcard/utils.py index <HASH>..<HASH> 100644 --- a/dcard/utils.py +++ b/dcard/utils.py @@ -1,9 +1,14 @@ -import requests import logging +import requests +from requests_futures.sessions import FuturesSession + class Client: + def __init__(self): + self.session = FuturesSession(max_workers=10) + @staticmethod def get(url, verbose=False, **kwargs): response = requests.get(url, **kwargs) @@ -11,6 +16,9 @@ class Client: logging.info(response.url) return response.json() + def sget(self, url, **kwargs): + return self.session.get(url, **kwargs) + def filter_general(forums): for forum in forums: diff --git a/requirements.txt b/requirements.txt index <HASH>..<HASH> 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ -requests==2.9.1 \ No newline at end of file +requests==2.9.1 +requests-futures==0.9.7 diff --git a/spider.py b/spider.py index <HASH>..<HASH> 100644 --- a/spider.py +++ b/spider.py @@ -13,3 +13,8 @@ if __name__ == '__main__': ids = dcard.forums('funny').get_metas(pages=5, callback=collect_ids) print('{:.5f}'.format(time.time() - t)) print(len(ids)) + + t = time.time() + articles = dcard.posts.get(post_id=ids[:10], comments=False) + print('{:.5f}'.format(time.time() - t)) + print(len(articles)) diff --git a/tests/test_dcard.py b/tests/test_dcard.py index <HASH>..<HASH> 100644 --- a/tests/test_dcard.py +++ b/tests/test_dcard.py @@ -41,6 +41,6 @@ def test_post_ids(forums): def test_post_bundle(): - post = Dcard.posts.get({'id': 224341009}) + post = Dcard.posts.get({'id': 224341009})[0] comment_count = post['content']['commentCount'] assert comment_count == len(post['comments'])
make multi-requests async. for high concurrency
leVirve_dcard-spider
train
c5296ed233708b0d8e22436e6f442ae57d12ff9e
diff --git a/rpc/src/main/java/io/datakernel/rpc/client/sender/RequestSenderFactory.java b/rpc/src/main/java/io/datakernel/rpc/client/sender/RequestSenderFactory.java index <HASH>..<HASH> 100644 --- a/rpc/src/main/java/io/datakernel/rpc/client/sender/RequestSenderFactory.java +++ b/rpc/src/main/java/io/datakernel/rpc/client/sender/RequestSenderFactory.java @@ -16,69 +16,112 @@ package io.datakernel.rpc.client.sender; +import io.datakernel.rpc.client.RpcClientConnection; import io.datakernel.rpc.client.RpcClientConnectionPool; import io.datakernel.rpc.hash.HashFunction; import io.datakernel.rpc.protocol.RpcMessage.RpcMessageData; import java.net.InetSocketAddress; +import java.util.ArrayList; import java.util.List; -public class RequestSenders { +import static com.google.common.base.Preconditions.checkNotNull; - private RequestSenders() { - } +public abstract class RequestSenderFactory { + + private final List<RequestSenderFactory> subSenderFactories; - public static List<RequestSender> serverGroup(List<InetSocketAddress>) { - // TODO: - return null; + public RequestSenderFactory(List<RequestSenderFactory> subSenderFactories) { + this.subSenderFactories = subSenderFactories; } - public static RequestSender server(InetSocketAddress) + public abstract RequestSender create(RpcClientConnectionPool pool); + protected List<RequestSender> createSubSenders(RpcClientConnectionPool pool) { - public static RequestSenderFactory firstAvailable(List<RequestSender> senders) { - return new RequestSenderFactory() { + // method should be called only from factories that have subFactories + assert subSenderFactories != null; + + List<RequestSender> requestSenders = new ArrayList<>(subSenderFactories.size()); + for (RequestSenderFactory subSenderFactory : subSenderFactories) { + requestSenders.add(subSenderFactory.create(pool)); + } + return requestSenders; + } + +// public static List<RequestSender> serverGroup(List<InetSocketAddress>) { +// // TODO: +// return null; +// } + + public static RequestSenderFactory server(final InetSocketAddress address) { + checkNotNull(address); + return new RequestSenderFactory(null) { @Override public RequestSender create(RpcClientConnectionPool pool) { - return new RequestSenderToFirst(pool); + return new RequestSenderToSingleServer(address, pool); } }; } - public static RequestSenderFactory allAvailable() { - return new RequestSenderFactory() { +// public static RequestSenderFactory servers() + + + public static RequestSenderFactory firstAvailable(final List<RequestSenderFactory> senders) { + checkNotNull(senders); + return new RequestSenderFactory(senders) { @Override public RequestSender create(RpcClientConnectionPool pool) { - return new RequestSenderToAll(pool); + List<RequestSender> subSenders = createSubSenders(pool); + return new RequestSenderToFirst(subSenders); } }; } - public static RequestSenderFactory roundRobin() { - return new RequestSenderFactory() { + public static RequestSenderFactory allAvailable(final List<RequestSenderFactory> senders) { + return new RequestSenderFactory(senders) { @Override public RequestSender create(RpcClientConnectionPool pool) { - return new RequestSenderRoundRobin(pool); + List<RequestSender> subSenders = createSubSenders(pool); + return new RequestSenderToAll(subSenders); } }; } - public static RequestSenderFactory consistentHashBucket(final HashFunction<RpcMessageData> hashFunction) { - return new RequestSenderFactory() { + public static RequestSenderFactory roundRobin(final List<RequestSenderFactory> senders) { + checkNotNull(senders); + return new RequestSenderFactory(senders) { @Override public RequestSender create(RpcClientConnectionPool pool) { - return new RequestSenderRendezvousHashing(pool, hashFunction); + List<RequestSender> subSenders = createSubSenders(pool); + return new RequestSenderRoundRobin(subSenders); } }; } - public static RequestSenderFactory sharding(final HashFunction<RpcMessageData> hashFunction) { - return new RequestSenderFactory() { + public static RequestSenderFactory rendezvousHashing(final List<RequestSenderFactory> senders, + final HashFunction<RpcMessageData> hashFunction) { + checkNotNull(senders); + checkNotNull(hashFunction); + return new RequestSenderFactory(senders) { @Override public RequestSender create(RpcClientConnectionPool pool) { - return new RequestSenderSharding(pool, hashFunction); + List<RequestSender> subSenders = createSubSenders(pool); + return new RequestSenderRendezvousHashing(subSenders, hashFunction); } }; } + public static RequestSenderFactory sharding(final List<RequestSenderFactory> senders, + final HashFunction<RpcMessageData> hashFunction) { + checkNotNull(senders); + checkNotNull(hashFunction); + return new RequestSenderFactory(senders) { + @Override + public RequestSender create(RpcClientConnectionPool pool) { + List<RequestSender> subSenders = createSubSenders(pool); + return new RequestSenderSharding(subSenders, hashFunction); + } + }; + } }
modify RequestSenderFactory, servers() method is absent yet
softindex_datakernel
train
c38f8392fec1872a5bed52b791717fefde530668
diff --git a/lib/macho.rb b/lib/macho.rb index <HASH>..<HASH> 100644 --- a/lib/macho.rb +++ b/lib/macho.rb @@ -16,7 +16,7 @@ require_relative "macho/tools" # The primary namespace for ruby-macho. module MachO # release version - VERSION = "2.4.0" + VERSION = "2.5.0" # Opens the given filename as a MachOFile or FatFile, depending on its magic. # @param filename [String] the file being opened
chore(version): <I> (#<I>)
Homebrew_ruby-macho
train
81aba27eb601e956bde79887c3fb2bf476a58f2e
diff --git a/devices/danfoss.js b/devices/danfoss.js index <HASH>..<HASH> 100644 --- a/devices/danfoss.js +++ b/devices/danfoss.js @@ -79,7 +79,8 @@ module.exports = [ .withDescription('Mean radiator load for room calculated by gateway for load balancing purposes (-8000=undefined)') .withValueMin(-8000).withValueMax(2000), exposes.numeric('load_estimate', ea.STATE_GET) - .withDescription('Load estimate on this radiator'), + .withDescription('Load estimate on this radiator') + .withValueMin(-8000).withValueMax(3600), exposes.binary('preheat_status', ea.STATE_GET, true, false) .withDescription('Specific for pre-heat running in Zigbee Weekly Schedule mode'), exposes.enum('adaptation_run_status', ea.STATE_GET, ['none', 'in_progress', 'found', 'lost'])
Define `load_estimate` min/max value for Danfoss <I>G<I> (#<I>) * Update danfoss.js Hi, according to the Danfoss documentation the default value für load_estimate is -<I> therfore is added min and max for this expose. * Update danfoss.js
Koenkk_zigbee-shepherd-converters
train
301ce6b4f0da9d01734dd3a0360bff535a8acad5
diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index <HASH>..<HASH> 100755 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -362,15 +362,10 @@ def run(args, dag=None): task = dag.get_task(task_id=args.task_id) ti = TaskInstance(task, args.execution_date) ti.refresh_from_db() - - log = logging.getLogger('airflow.task') - if args.raw: - log = logging.getLogger('airflow.task.raw') - - set_context(log, ti) + ti.init_run_context() hostname = socket.getfqdn() - log.info("Running on host %s", hostname) + log.info("Running %s on host %s", ti, hostname) with redirect_stdout(log, logging.INFO), redirect_stderr(log, logging.WARN): if args.local: @@ -428,13 +423,7 @@ def run(args, dag=None): if args.raw: return - # Force the log to flush. The flush is important because we - # might subsequently read from the log to insert into S3 or - # Google cloud storage. Explicitly close the handler is - # needed in order to upload to remote storage services. - for handler in log.handlers: - handler.flush() - handler.close() + logging.shutdown() def task_failed_deps(args): diff --git a/airflow/models.py b/airflow/models.py index <HASH>..<HASH> 100755 --- a/airflow/models.py +++ b/airflow/models.py @@ -1855,6 +1855,12 @@ class TaskInstance(Base, LoggingMixin): TI.state == State.RUNNING ).count() + def init_run_context(self): + """ + Sets the log context. + """ + self._set_context(self) + class TaskFail(Base): """ diff --git a/airflow/utils/log/logging_mixin.py b/airflow/utils/log/logging_mixin.py index <HASH>..<HASH> 100644 --- a/airflow/utils/log/logging_mixin.py +++ b/airflow/utils/log/logging_mixin.py @@ -30,8 +30,7 @@ class LoggingMixin(object): Convenience super-class to have a logger configured with the class name """ def __init__(self, context=None): - if context is not None: - set_context(self.log, context) + self._set_context(context) # We want to deprecate the logger property in Airflow 2.0 # The log property is the de facto standard in most programming languages @@ -56,6 +55,10 @@ class LoggingMixin(object): ) return self._log + def _set_context(self, context): + if context is not None: + set_context(self.log, context) + class StreamLogWriter(object): encoding = False
[AIRFLOW-<I>] Handle ti log entirely within ti Previously logging was setup outside a TaskInstance, this puts everything inside. Also propery closes the logging. Closes #<I> from bolkedebruin/AIRFLOW-<I>
apache_airflow
train
531df7a209968db7c20b014e6d62446f36ef6847
diff --git a/qdisc_linux.go b/qdisc_linux.go index <HASH>..<HASH> 100644 --- a/qdisc_linux.go +++ b/qdisc_linux.go @@ -278,9 +278,13 @@ func qdiscPayload(req *nl.NetlinkRequest, qdisc Qdisc) error { if qdisc.FlowDefaultRate > 0 { nl.NewRtAttrChild(options, nl.TCA_FQ_FLOW_DEFAULT_RATE, nl.Uint32Attr((uint32(qdisc.FlowDefaultRate)))) } + default: + options = nil } - req.AddData(options) + if options != nil { + req.AddData(options) + } return nil }
Avoid serializing empty TCA_OPTIONS in qdisc messages This was causing a QdiscReplace() call for a generic qdisc (QdiscType="clsact", Parent=HANDLE_CLSACT) to fail with the error "invalid argument" after the qdisc was first created.
vishvananda_netlink
train
b5da3e15d08c2eb98955ed6a4f7d9c4fd5a6e5d5
diff --git a/tsfresh/examples/driftbif_simulation.py b/tsfresh/examples/driftbif_simulation.py index <HASH>..<HASH> 100644 --- a/tsfresh/examples/driftbif_simulation.py +++ b/tsfresh/examples/driftbif_simulation.py @@ -132,9 +132,9 @@ def sample_tau(n=10, kappa_3=0.3, ratio=0.5, rel_increase=0.15): return tau.tolist() -def load_driftbif(n, l, m=2, classification=True, kappa_3=0.3, seed=False): +def load_driftbif(n, length, m=2, classification=True, kappa_3=0.3, seed=False): """ - Simulates n time-series with l time steps each for the m-dimensional velocity of a dissipative soliton + Simulates n time-series with length time steps each for the m-dimensional velocity of a dissipative soliton classification=True: target 0 means tau<=1/0.3, Dissipative Soliton with Brownian motion (purely noise driven) @@ -145,8 +145,8 @@ def load_driftbif(n, l, m=2, classification=True, kappa_3=0.3, seed=False): :param n: number of samples :type n: int - :param l: length of the time series - :type l: int + :param length: length of the time series + :type length: int :param m: number of spatial dimensions (default m=2) the dissipative soliton is propagating in :type m: int :param classification: distinguish between classification (default True) and regression target @@ -166,8 +166,8 @@ def load_driftbif(n, l, m=2, classification=True, kappa_3=0.3, seed=False): logging.warning("You set the dimension parameter for the dissipative soliton to m={}, however it is only" "properly defined for m=1 or m=2.".format(m)) - id = np.repeat(range(n), l * m) - dimensions = list(np.repeat(range(m), l)) * n + id = np.repeat(range(n), length * m) + dimensions = list(np.repeat(range(m), length)) * n labels = list() values = list() @@ -180,8 +180,8 @@ def load_driftbif(n, l, m=2, classification=True, kappa_3=0.3, seed=False): labels.append(ds.label) else: labels.append(ds.tau) - values.append(ds.simulate(l, v0=np.zeros(m)).transpose().flatten()) - time = np.stack([ds.delta_t * np.arange(l)] * n * m).flatten() + values.append(ds.simulate(length, v0=np.zeros(m)).transpose().flatten()) + time = np.stack([ds.delta_t * np.arange(length)] * n * m).flatten() df = pd.DataFrame({'id': id, "time": time, "value": np.stack(values).flatten(), "dimension": dimensions}) y = pd.Series(labels)
Replace l with length (#<I>)
blue-yonder_tsfresh
train
10f69a859c8c664a8cdffa7f64359776f75ed31b
diff --git a/htmresearch/frameworks/nlp/classification_model.py b/htmresearch/frameworks/nlp/classification_model.py index <HASH>..<HASH> 100644 --- a/htmresearch/frameworks/nlp/classification_model.py +++ b/htmresearch/frameworks/nlp/classification_model.py @@ -298,7 +298,7 @@ class ClassificationModel(object): @returns (ClassificationModel) The loaded model instance """ - savedModelDir = os.path.abspath(savedModelDir) + savedModelDir = str(os.path.abspath(savedModelDir)) modelPickleFilePath = os.path.join(savedModelDir, "model.pkl") with open(modelPickleFilePath, "rb") as modelPickleFile:
Use string type for NLP model load path
numenta_htmresearch
train
7491b9a76ad7179a097fd26a2b556041a1441fb1
diff --git a/drivers_experimental_linux.go b/drivers_experimental_linux.go index <HASH>..<HASH> 100644 --- a/drivers_experimental_linux.go +++ b/drivers_experimental_linux.go @@ -2,14 +2,10 @@ package libnetwork -import ( - "github.com/docker/libnetwork/drivers/ipvlan" - "github.com/docker/libnetwork/drivers/macvlan" -) +import "github.com/docker/libnetwork/drivers/ipvlan" func additionalDrivers() []initializer { return []initializer{ - {macvlan.Init, "macvlan"}, {ipvlan.Init, "ipvlan"}, } } diff --git a/drivers_linux.go b/drivers_linux.go index <HASH>..<HASH> 100644 --- a/drivers_linux.go +++ b/drivers_linux.go @@ -3,6 +3,7 @@ package libnetwork import ( "github.com/docker/libnetwork/drivers/bridge" "github.com/docker/libnetwork/drivers/host" + "github.com/docker/libnetwork/drivers/macvlan" "github.com/docker/libnetwork/drivers/null" "github.com/docker/libnetwork/drivers/overlay" "github.com/docker/libnetwork/drivers/remote" @@ -12,6 +13,7 @@ func getInitializers() []initializer { in := []initializer{ {bridge.Init, "bridge"}, {host.Init, "host"}, + {macvlan.Init, "macvlan"}, {null.Init, "null"}, {remote.Init, "remote"}, {overlay.Init, "overlay"},
Moving MacVlan driver out of experimental
docker_libnetwork
train
c36985a93dea52726547ca24d9f596cc77bf2d56
diff --git a/app/actions/prottable/info.py b/app/actions/prottable/info.py index <HASH>..<HASH> 100644 --- a/app/actions/prottable/info.py +++ b/app/actions/prottable/info.py @@ -23,11 +23,7 @@ def count_peps_psms(proteindata, p_acc, pool): proteindata[p_acc][pool]['peptides'] = len(data['peptides']) -def add_protein_data(proteins, pgdb, headerfields, pool_to_output=False): - """First creates a map with all master proteins with data, - then outputs protein data dicts for rows of a tsv. If a pool - is given then only output for that pool will be shown in the - protein table.""" +def create_proteindata_map(pgdb, pool_to_output): protein_psms_data = pgdb.get_all_protein_psms_with_sets() proteindata = {} psmdata = next(protein_psms_data) @@ -42,13 +38,22 @@ def add_protein_data(proteins, pgdb, headerfields, pool_to_output=False): last_pool, last_prot = samplepool, p_acc add_record_to_proteindata(proteindata, p_acc, samplepool, psmdata) count_peps_psms(proteindata, last_prot, last_pool) - # now generate tsv output + return proteindata + + +def add_protein_data(proteins, pgdb, headerfields, pool_to_output=False): + """First creates a map with all master proteins with data, + then outputs protein data dicts for rows of a tsv. If a pool + is given then only output for that pool will be shown in the + protein table.""" + proteindata = create_proteindata_map(pgdb, pool_to_output) for protein in proteins: outprotein = {k: v for k, v in protein.items()} protein_acc = protein[prottabledata.HEADER_PROTEIN] if not protein_acc in proteindata: continue - outprotein.update(get_protein_data(proteindata, protein_acc, headerfields)) + outprotein.update(get_protein_data(proteindata, protein_acc, + headerfields)) outprotein = {k: str(v) for k, v in outprotein.items()} yield outprotein @@ -56,8 +61,6 @@ def add_protein_data(proteins, pgdb, headerfields, pool_to_output=False): def get_protein_data(proteindata, p_acc, headerfields): """Parses protein data for a certain protein into tsv output dictionary""" - description = 'na' - coverage = 'na' unipepcount = 'na' proteincount = 'na' outdict = {}
Split proteindata map creation from output generator to be able to reuse it
glormph_msstitch
train
f8f01077ca0af52d6971b7872742710960c42bb0
diff --git a/grammpy_transforms/UnitRulesRemove/remove_unit_rules.py b/grammpy_transforms/UnitRulesRemove/remove_unit_rules.py index <HASH>..<HASH> 100644 --- a/grammpy_transforms/UnitRulesRemove/remove_unit_rules.py +++ b/grammpy_transforms/UnitRulesRemove/remove_unit_rules.py @@ -13,28 +13,31 @@ from typing import List from grammpy import * from .find_symbols_reachable_by_unit_rules import find_nonterminals_reachable_by_unit_rules + class ReducedUnitRule(Rule): by_rules = [] # type: List[Rule] end_rule = None # type: Rule -def _is_unit(rule: Rule) -> bool: +def _is_unit(rule): return len(rule.left) == 1 and len(rule.right) == 1 and \ isclass(rule.fromSymbol) and isclass(rule.toSymbol) and \ issubclass(rule.fromSymbol, Nonterminal) and issubclass(rule.toSymbol, Nonterminal) -def _create_rule(path: List[Rule], rule: Rule) -> ReducedUnitRule: - created = type("Reduced"+rule.__name__,(ReducedUnitRule,),ReducedUnitRule.__dict__.copy()) + +def _create_rule(path, rule): + created = type("Reduced" + rule.__name__, (ReducedUnitRule,), ReducedUnitRule.__dict__.copy()) created.rule = ([path[0].fromSymbol], rule.right) created.end_rule = rule created.by_rules = path return created -def remove_unit_rules(grammar: Grammar, transform_grammar=False) -> Grammar: + +def remove_unit_rules(grammar, transform_grammar=False): if transform_grammar is False: grammar = copy(grammar) - #get connections + # get connections res = find_nonterminals_reachable_by_unit_rules(grammar) - #iterate through rules + # iterate through rules for rule in grammar.rules(): if _is_unit(rule): grammar.remove_rule(rule) @@ -45,4 +48,3 @@ def remove_unit_rules(grammar: Grammar, transform_grammar=False) -> Grammar: created = _create_rule(path, rule) grammar.add_rule(created) return grammar -
Remove type hinting for remove_unit_rules file
PatrikValkovic_grammpy
train
956e49b69bbe409a9f528c33388e32a0767ae1c8
diff --git a/src/GDAO/Model/CollectionInterface.php b/src/GDAO/Model/CollectionInterface.php index <HASH>..<HASH> 100644 --- a/src/GDAO/Model/CollectionInterface.php +++ b/src/GDAO/Model/CollectionInterface.php @@ -226,26 +226,6 @@ interface CollectionInterface extends \ArrayAccess, \Countable, \IteratorAggrega * */ public function __set($key, \GDAO\Model\RecordInterface $val); - - /** - * - * ArrayAccess: set a key value; appends to the array when using [] - * notation. - * - * NOTE: Implementers of this class must make sure that $val is an instance - * of \GDAO\Model\RecordInterface else throw a - * \GDAO\Model\CollectionCanOnlyContainGDAORecordsException exception. - * - * @param string $key The requested key. - * - * @param \GDAO\Model\RecordInterface $val The value to set it to. - * - * @return void - * - * @throws \GDAO\Model\CollectionCanOnlyContainGDAORecordsException - * - */ - public function offsetSet($key, \GDAO\Model\RecordInterface $val); /** *
Removed offsetSet from CollectionInterface.
rotexsoft_gdao
train
5b0010cb2a180addd8cf8f911c66b6237ed4396f
diff --git a/thumbor/handlers/__init__.py b/thumbor/handlers/__init__.py index <HASH>..<HASH> 100644 --- a/thumbor/handlers/__init__.py +++ b/thumbor/handlers/__init__.py @@ -747,6 +747,7 @@ class ImageApiHandler(ContextHandler): return False return True + @gen.coroutine def write_file(self, id, body): storage = self.context.modules.upload_photo_storage - storage.put(id, body) + yield gen.maybe_future(storage.put(id, body)) diff --git a/thumbor/handlers/image_resource.py b/thumbor/handlers/image_resource.py index <HASH>..<HASH> 100644 --- a/thumbor/handlers/image_resource.py +++ b/thumbor/handlers/image_resource.py @@ -44,6 +44,8 @@ class ImageResourceHandler(ImageApiHandler): else: self._error(404, 'Image not found at the given URL') + @tornado.web.asynchronous + @tornado.gen.coroutine def put(self, id): id = id[:self.context.config.MAX_ID_LENGTH] # Check if image overwriting is allowed @@ -53,7 +55,7 @@ class ImageResourceHandler(ImageApiHandler): # Check if the image uploaded is valid if self.validate(self.request.body): - self.write_file(id, self.request.body) + yield gen.maybe_future(self.write_file(id, self.request.body)) self.set_status(204) @tornado.web.asynchronous @@ -68,7 +70,7 @@ class ImageResourceHandler(ImageApiHandler): # Check if image exists exists = yield gen.maybe_future(self.context.modules.storage.exists(id)) if exists: - self.context.modules.storage.remove(id) + yield gen.maybe_future(self.context.modules.storage.remove(id)) self.set_status(204) else: self._error(404, 'Image not found at the given URL')
Fix the bug that put method is not asynchronous which will stuck the thumbor server (#<I>) * Fix bug the put method stucks the tornado main thread, and remove method does not have a yield * Put add web.asynchronous and gen.coroutine
thumbor_thumbor
train
1e1f967076c2e255cbca8dca0bc16f7ee135a5d4
diff --git a/storybook/main.js b/storybook/main.js index <HASH>..<HASH> 100644 --- a/storybook/main.js +++ b/storybook/main.js @@ -1,4 +1,12 @@ module.exports = { addons: ['@storybook/addon-storysource'], - stories: ['../src/js/components/**/stories/*.(ts|tsx|js|jsx)'], + stories: [ + '../src/js/components/**/stories/typescript/*.tsx', + '../src/js/components/**/stories/*.(ts|tsx|js|jsx)', + '../src/js/components/**/*stories.js', + '../src/js/contexts/**/*stories.js', + '../src/js/contexts/**/stories/typescript/*.tsx', + '../src/js/contexts/**/stories/*.(ts|tsx|js|jsx)', + '../src/js/all/**/stories/*.(ts|tsx|js|jsx)', + ], };
Updated loader of storybook to include context stories and 'All' stories (#<I>) * Updated loader of storybook to include context stories and 'All' stories. * Loaded missing typescript stories
grommet_grommet
train
eda0d6ed0b3facbdaa6a1e415775441a9d621e2d
diff --git a/src/Pdo.php b/src/Pdo.php index <HASH>..<HASH> 100644 --- a/src/Pdo.php +++ b/src/Pdo.php @@ -21,11 +21,9 @@ trait Pdo if (!isset($fields)) { $fields = []; foreach ($annotations['properties'] as $prop => $anno) { - if (isset($anno['Private'])) { - continue; - } - if (property_exists($this, $prop) - && !isset($anno['Virtual']) + if ($prop{0} != '_' + && property_exists($this, $prop) + && !isset($anno['Virtual'], $anno['Private']) && !is_array($this->$prop) ) { $fields[] = $prop;
ugh, ignore here too - prolly want to refactor that away
ornament-orm_core
train
8e6d7d81e5398a10b03cb99685463bdff6bb38d4
diff --git a/generators/app/index.js b/generators/app/index.js index <HASH>..<HASH> 100644 --- a/generators/app/index.js +++ b/generators/app/index.js @@ -69,6 +69,13 @@ module.exports = JhipsterGenerator.extend({ defaults: 'jhi' }); + // This adds support for a `--yarn` flag + this.option('yarn', { + desc: 'Use yarn instead of npm install', + type: Boolean, + defaults: false + }); + this.currentQuestion = 0; this.totalQuestions = constants.QUESTIONS; this.skipClient = this.configOptions.skipClient = this.options['skip-client'] || this.config.get('skipClient'); @@ -77,8 +84,9 @@ module.exports = JhipsterGenerator.extend({ this.jhiPrefix = this.configOptions.jhiPrefix || this.config.get('jhiPrefix') || this.options['jhi-prefix']; this.withEntities = this.options['with-entities']; this.checkInstall = this.options['check-install']; - + this.yarnInstall = this.configOptions.yarnInstall = this.options['yarn']; }, + initializing: { displayLogo: function () { this.printJHipsterLogo(); @@ -149,6 +157,21 @@ module.exports = JhipsterGenerator.extend({ }.bind(this)); }, + checkYarn: function () { + if (!this.checkInstall || this.skipClient || !this.yarn) return; + var done = this.async(); + exec('yarn --version', function (err) { + if (err) { + this.warning('yarn is not found on your computer.\n', + ' Using npm instead'); + this.yarnInstall = false; + } else { + this.yarnInstall = true; + } + done(); + }.bind(this)); + }, + validate: function () { if (this.skipServer && this.skipClient) { this.error(chalk.red('You can not pass both ' + chalk.yellow('--skip-client') + ' and ' + chalk.yellow('--skip-server') + ' together')); @@ -284,7 +307,6 @@ module.exports = JhipsterGenerator.extend({ }, writing: { - cleanup: function () { cleanup.cleanupOldFiles(this, this.javaDir, this.testDir); }, diff --git a/generators/client/index.js b/generators/client/index.js index <HASH>..<HASH> 100644 --- a/generators/client/index.js +++ b/generators/client/index.js @@ -92,6 +92,13 @@ module.exports = JhipsterClientGenerator.extend({ defaults: false }); + // This adds support for a `--yarn` flag + this.option('yarn', { + desc: 'Use yarn instead of npm install', + type: Boolean, + defaults: false + }); + this.skipServer = this.configOptions.skipServer || this.config.get('skipServer'); this.skipUserManagement = this.configOptions.skipUserManagement || this.options['skip-user-management'] || this.config.get('skipUserManagement'); this.authenticationType = this.options['auth']; @@ -110,6 +117,7 @@ module.exports = JhipsterClientGenerator.extend({ this.totalQuestions = this.configOptions.totalQuestions ? this.configOptions.totalQuestions : QUESTIONS; this.baseName = this.configOptions.baseName; this.logo = this.configOptions.logo; + this.yarnInstall = this.configOptions.yarnInstall || this.options['yarn']; }, initializing: { @@ -296,9 +304,15 @@ module.exports = JhipsterClientGenerator.extend({ } }; if (!this.options['skip-install']) { - this.installDependencies({ - callback: injectDependenciesAndConstants.bind(this) - }); + if (!this.yarnInstall) { + this.installDependencies({ + callback: injectDependenciesAndConstants.bind(this) + }); + } else { + this.spawnCommandSync('yarn'); + this.spawnCommandSync('bower', ['install']); + injectDependenciesAndConstants.call(this); + } } else { injectDependenciesAndConstants.call(this); }
Add --yarn flag to the generator
jhipster_generator-jhipster
train
409d03a0492eddd609c9dd23e526f008c0199f4f
diff --git a/public/assets/dashboard.js b/public/assets/dashboard.js index <HASH>..<HASH> 100644 --- a/public/assets/dashboard.js +++ b/public/assets/dashboard.js @@ -618,7 +618,7 @@ function menuDashboard() { $.each(menu, function (i, m) { $("#dashboard-menu").append(Mustache.render(tpl, m)) }) - if (getCookie("setor") === "admin") { + if (getCookie("id") === "1") { $("#dashboard-menu").append(Mustache.render(tpl, { "icon": "settings_ethernet", "title": "DEV",
show dev tab on id user 1
edineibauer_uebDashboard
train
46feef25707d9fb8c812ccabef9c9d10caa852ab
diff --git a/src/main/java/com/mistraltech/smog/core/PropertyMatcher.java b/src/main/java/com/mistraltech/smog/core/PropertyMatcher.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/mistraltech/smog/core/PropertyMatcher.java +++ b/src/main/java/com/mistraltech/smog/core/PropertyMatcher.java @@ -40,32 +40,32 @@ public class PropertyMatcher<T> extends BaseMatcher<T> implements PathProvider { private PathProvider pathProvider; /** - * Constructor. + * Minimal constructor. * * @param propertyName name of the attribute that this PropertyMatcher matches against in the target object - * @param pathProvider provides this PropertyMatcher with its path context. I.e. the property path that leads - * to the object containing this attribute in the target object graph */ - public PropertyMatcher(String propertyName, PathProvider pathProvider) { - this(propertyName); - setPathProvider(pathProvider); + public PropertyMatcher(String propertyName) { + if (propertyName == null) { + throw new IllegalArgumentException("No property name"); + } + this.propertyName = propertyName; } /** - * Constructor that does not take a PathProvider. + * Constructor that takes and assigns a {@link PathProvider}. * * @param propertyName name of the attribute that this PropertyMatcher matches against in the target object + * @param pathProvider provides this PropertyMatcher with its path context. I.e. the property path that leads + * to the object containing this attribute in the target object graph */ - public PropertyMatcher(String propertyName) { - if (propertyName == null) { - throw new IllegalArgumentException("No property name"); - } - this.propertyName = propertyName; + public PropertyMatcher(String propertyName, PathProvider pathProvider) { + this(propertyName); + setPathProvider(pathProvider); } /** * Sets the path provider, which provides this PropertyMatcher with its path context. I.e. the property path that leads - * to the object containing this attribute in the target object graph + * to the object containing this attribute in the target object graph. * * @param pathProvider the path provider */ diff --git a/src/main/java/com/mistraltech/smog/core/ReflectingPropertyMatcher.java b/src/main/java/com/mistraltech/smog/core/ReflectingPropertyMatcher.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/mistraltech/smog/core/ReflectingPropertyMatcher.java +++ b/src/main/java/com/mistraltech/smog/core/ReflectingPropertyMatcher.java @@ -9,14 +9,14 @@ import java.lang.reflect.Method; public class ReflectingPropertyMatcher<T> extends PropertyMatcher<T> { - public ReflectingPropertyMatcher(String propertyName, PathProvider pathProvider) { - super(propertyName, pathProvider); - } - public ReflectingPropertyMatcher(String propertyName) { super(propertyName); } + public ReflectingPropertyMatcher(String propertyName, PathProvider pathProvider) { + super(propertyName, pathProvider); + } + @Override public boolean matches(Object item) { return super.matches(getPropertyValue(item));
Trivial re-organisation of constructors in PropertyMatcher.
mistraltechnologies_smog
train
7d66838fe3137ca120ce4f731c8a293cfcf0b949
diff --git a/autoinput.js b/autoinput.js index <HASH>..<HASH> 100644 --- a/autoinput.js +++ b/autoinput.js @@ -47,6 +47,6 @@ function wrapAndJoin(pm, pos, type, attrs = null, predicate = null) { } function setAs(pm, pos, type, attrs) { - pm.apply(pm.tr.setType(pos, pos, new Node(type, attrs)) + pm.apply(pm.tr.setBlockType(pos, pos, new Node(type, attrs)) .delete(new Pos(pos.path, 0), pos)) }
Fix some brokenness in the block menu
ProseMirror_prosemirror-inputrules
train
2c6c30bcc6bcf3fab166ec0db75d5d51f253113f
diff --git a/rb/lib/selenium/webdriver/ie/server.rb b/rb/lib/selenium/webdriver/ie/server.rb index <HASH>..<HASH> 100644 --- a/rb/lib/selenium/webdriver/ie/server.rb +++ b/rb/lib/selenium/webdriver/ie/server.rb @@ -4,13 +4,14 @@ module Selenium class Server STOP_TIMEOUT = 5 + DEPRECATION_WARNING = %{warning: the IE driver is moving the a standalone executable. Please download the IEDriverServer from http://code.google.com/p/selenium/downloads/list and place the executable on your PATH.\nFalling back to bundled DLLs for now..} def self.get binary = Platform.find_binary("IEDriverServer") if binary new(binary) else - # TODO: deprecation warning + warn DEPRECATION_WARNING require 'selenium/webdriver/ie/in_process_server' InProcessServer.new end
JariBakken: Deprecation warning when the standalone IE server is missing. r<I>
SeleniumHQ_selenium
train
4ff0185add426a8cfaa1ba75e675500f5f48a436
diff --git a/builder/virtualbox/driver.go b/builder/virtualbox/driver.go index <HASH>..<HASH> 100644 --- a/builder/virtualbox/driver.go +++ b/builder/virtualbox/driver.go @@ -50,6 +50,9 @@ func (d *VBox42Driver) IsRunning(name string) (bool, error) { } for _, line := range strings.Split(stdout.String(), "\n") { + // Need to trim off CR character when running in windows + line = strings.TrimRight(line, "\r"); + if line == `VMState="running"` { return true, nil } diff --git a/communicator/ssh/communicator.go b/communicator/ssh/communicator.go index <HASH>..<HASH> 100644 --- a/communicator/ssh/communicator.go +++ b/communicator/ssh/communicator.go @@ -130,6 +130,11 @@ func (c *comm) Upload(path string, input io.Reader) error { target_dir := filepath.Dir(path) target_file := filepath.Base(path) + // On windows, filepath.Dir uses backslash seperators (ie. "\tmp"). + // This does not work when the target host is unix. Switch to forward slash + // which works for unix and windows + target_dir = filepath.ToSlash(target_dir) + // Start the sink mode on the other side // TODO(mitchellh): There are probably issues with shell escaping the path log.Println("Starting remote scp process in sink mode")
Windows fixes Strip carriage returns from end of lines in virtualbox/driver.go, fixing wait for virtualbox host shutdown. Fix target upload directory path in communicator.go to use forward slashes. (When running on windows path/filepath returns backslashes...which does not work when the target host is unix).
hashicorp_packer
train
37626cbba117aaae0475fdba3212005c0301840b
diff --git a/Kwf/Assets/Dispatcher.php b/Kwf/Assets/Dispatcher.php index <HASH>..<HASH> 100644 --- a/Kwf/Assets/Dispatcher.php +++ b/Kwf/Assets/Dispatcher.php @@ -164,7 +164,7 @@ class Kwf_Assets_Dispatcher $contents = $dependency->getContents($language); $mtime = $dependency->getMTime(); } - if ($extension == 'js') $mimeType = 'text/javascript; charset=utf-8'; + if ($extension == 'js' || $extension == 'defer.js') $mimeType = 'text/javascript; charset=utf-8'; else if ($extension == 'css' || $extension == 'printcss') $mimeType = 'text/css; charset=utf8'; } else { if ($dependency instanceof Kwf_Assets_Package) { diff --git a/Kwf/Assets/Package.php b/Kwf/Assets/Package.php index <HASH>..<HASH> 100644 --- a/Kwf/Assets/Package.php +++ b/Kwf/Assets/Package.php @@ -168,7 +168,7 @@ class Kwf_Assets_Package //manually build json, names array can be relatively large and merging all entries would be slow if ($mimeType == 'text/javascript') $ext = 'js'; - if ($mimeType == 'text/javascript; defer') $ext = 'defer.js'; + else if ($mimeType == 'text/javascript; defer') $ext = 'defer.js'; else if ($mimeType == 'text/css') $ext = 'css'; else if ($mimeType == 'text/css; media=print') $ext = 'printcss'; $file = $this->getPackageUrl($ext, $language);
send correct mime type for defer.js files
koala-framework_koala-framework
train
40f5b885919b6f69a73aae9aafd33c895d58db5d
diff --git a/lib/honeybadger.js b/lib/honeybadger.js index <HASH>..<HASH> 100644 --- a/lib/honeybadger.js +++ b/lib/honeybadger.js @@ -56,7 +56,7 @@ function Honeybadger(opts) { } function withUncaught() { - if (!uncaughtExceptionInstalled) { + if (this.reportUncaught !== false && !uncaughtExceptionInstalled) { var self = this; uncaughtExceptionHandler = function (err) { self.notify(err, function() {
in withUncaught, check this.reportUncaught before creating listener
honeybadger-io_honeybadger-node
train
ca3fd9b481e45e1eaed02bb345f6146f7f46d25c
diff --git a/osmdroid-android/src/main/java/org/osmdroid/tileprovider/cachemanager/CacheManager.java b/osmdroid-android/src/main/java/org/osmdroid/tileprovider/cachemanager/CacheManager.java index <HASH>..<HASH> 100644 --- a/osmdroid-android/src/main/java/org/osmdroid/tileprovider/cachemanager/CacheManager.java +++ b/osmdroid-android/src/main/java/org/osmdroid/tileprovider/cachemanager/CacheManager.java @@ -806,14 +806,14 @@ public class CacheManager { public static class CacheManagerTask extends AsyncTask<Object, Integer, Integer> { private final CacheManager mManager; private final CacheManagerAction mAction; - private final Iterable<Long> mTiles; + private final IterableWithSize<Long> mTiles; private final int mZoomMin; private final int mZoomMax; private final ArrayList<CacheManagerCallback> mCallbacks = new ArrayList<>(); private CacheManagerTask(final CacheManager pManager, final CacheManagerAction pAction, - final Iterable<Long> pTiles, - final int pZoomMin, final int pZoomMax) { + final IterableWithSize<Long> pTiles, + final int pZoomMin, final int pZoomMax) { mManager = pManager; mAction = pAction; mTiles = pTiles; @@ -824,7 +824,7 @@ public class CacheManager { public CacheManagerTask(final CacheManager pManager, final CacheManagerAction pAction, final List<Long> pTiles, final int pZoomMin, final int pZoomMax) { - this(pManager, pAction, (Iterable<Long>) pTiles, pZoomMin, pZoomMax); + this(pManager, pAction, new ListWrapper<>(pTiles), pZoomMin, pZoomMax); } public CacheManagerTask(final CacheManager pManager, final CacheManagerAction pAction, @@ -847,12 +847,7 @@ public class CacheManager { @Override protected void onPreExecute(){ - int total = 0; - if (mTiles instanceof IterableWithSize) { - total = ((IterableWithSize) mTiles).size(); - } else if (mTiles instanceof Collection) { - total = ((Collection) mTiles).size(); - } + final int total = mTiles.size(); for (final CacheManagerCallback callback : mCallbacks) { try { callback.setPossibleTilesInArea(total); @@ -983,6 +978,24 @@ public class CacheManager { boolean tileAction(final long pMapTileIndex); } + private static class ListWrapper<T> implements IterableWithSize<T> { + private final List<T> list; + + private ListWrapper(List<T> list) { + this.list = list; + } + + @Override + public int size() { + return list.size(); + } + + @Override + public Iterator<T> iterator() { + return list.iterator(); + } + } + interface IterableWithSize<T> extends Iterable<T> { int size(); }
feature/#<I>: Add a common type for List and IterableWithSize Introduce ListWrapper to make it possible to use the same IterableWithSize<T> reference for both List<T> and IterableWithSize<T>.
osmdroid_osmdroid
train
81f8100083b234cf2391b0e0a6301019cb520145
diff --git a/dynamic_initial_data/base.py b/dynamic_initial_data/base.py index <HASH>..<HASH> 100644 --- a/dynamic_initial_data/base.py +++ b/dynamic_initial_data/base.py @@ -163,7 +163,7 @@ class InitialDataUpdater(object): registered_for_deletion_receipts, ['model_obj_type_id', 'model_obj_id'], update_fields=['register_time']) # Delete all receipts and their associated model objects that weren't updated - for receipt in RegisteredForDeletionReceipt.objects.exclude(register_time=now).prefetch_related('model_obj'): + for receipt in RegisteredForDeletionReceipt.objects.exclude(register_time=now): try: receipt.model_obj.delete() except: diff --git a/dynamic_initial_data/version.py b/dynamic_initial_data/version.py index <HASH>..<HASH> 100644 --- a/dynamic_initial_data/version.py +++ b/dynamic_initial_data/version.py @@ -1 +1 @@ -__version__ = '0.3.3' +__version__ = '0.3.4'
removed prefetch related since it fails on invalid ctypes
ambitioninc_django-dynamic-initial-data
train
4d5ac91f882c7a1abf787b4b05a7f762c0e60884
diff --git a/pkg/storage/cacher.go b/pkg/storage/cacher.go index <HASH>..<HASH> 100644 --- a/pkg/storage/cacher.go +++ b/pkg/storage/cacher.go @@ -367,7 +367,11 @@ func (c *Cacher) List(ctx context.Context, key string, resourceVersion string, p return err } + trace := util.NewTrace(fmt.Sprintf("cacher %v: List", c.objectType.String())) + defer trace.LogIfLong(250 * time.Millisecond) + c.ready.wait() + trace.Step("Ready") // List elements from cache, with at least 'listRV'. listPtr, err := meta.GetItemsPtr(listObj) @@ -380,10 +384,11 @@ func (c *Cacher) List(ctx context.Context, key string, resourceVersion string, p } filter := filterFunction(key, c.keyFunc, pred) - objs, readResourceVersion, err := c.watchCache.WaitUntilFreshAndList(listRV) + objs, readResourceVersion, err := c.watchCache.WaitUntilFreshAndList(listRV, trace) if err != nil { return fmt.Errorf("failed to wait for fresh list: %v", err) } + trace.Step(fmt.Sprintf("Listed %d items from cache", len(objs))) for _, obj := range objs { object, ok := obj.(runtime.Object) if !ok { @@ -393,6 +398,7 @@ func (c *Cacher) List(ctx context.Context, key string, resourceVersion string, p listVal.Set(reflect.Append(listVal, reflect.ValueOf(object).Elem())) } } + trace.Step(fmt.Sprintf("Filtered %d items", listVal.Len())) if c.versioner != nil { if err := c.versioner.UpdateList(listObj, readResourceVersion); err != nil { return err diff --git a/pkg/storage/watch_cache.go b/pkg/storage/watch_cache.go index <HASH>..<HASH> 100644 --- a/pkg/storage/watch_cache.go +++ b/pkg/storage/watch_cache.go @@ -27,6 +27,7 @@ import ( "k8s.io/kubernetes/pkg/api/meta" "k8s.io/kubernetes/pkg/client/cache" "k8s.io/kubernetes/pkg/runtime" + "k8s.io/kubernetes/pkg/util" "k8s.io/kubernetes/pkg/util/clock" "k8s.io/kubernetes/pkg/watch" ) @@ -206,7 +207,7 @@ func (w *watchCache) List() []interface{} { return w.store.List() } -func (w *watchCache) WaitUntilFreshAndList(resourceVersion uint64) ([]interface{}, uint64, error) { +func (w *watchCache) WaitUntilFreshAndList(resourceVersion uint64, trace *util.Trace) ([]interface{}, uint64, error) { startTime := w.clock.Now() go func() { // Wake us up when the time limit has expired. The docs @@ -228,6 +229,9 @@ func (w *watchCache) WaitUntilFreshAndList(resourceVersion uint64) ([]interface{ } w.cond.Wait() } + if trace != nil { + trace.Step("Cache is fresh enough") + } return w.store.List(), w.resourceVersion, nil } diff --git a/pkg/storage/watch_cache_test.go b/pkg/storage/watch_cache_test.go index <HASH>..<HASH> 100644 --- a/pkg/storage/watch_cache_test.go +++ b/pkg/storage/watch_cache_test.go @@ -248,7 +248,7 @@ func TestWaitUntilFreshAndList(t *testing.T) { store.Add(makeTestPod("bar", 5)) }() - list, resourceVersion, err := store.WaitUntilFreshAndList(5) + list, resourceVersion, err := store.WaitUntilFreshAndList(5, nil) if err != nil { t.Fatalf("unexpected error: %v", err) } @@ -278,7 +278,7 @@ func TestWaitUntilFreshAndListTimeout(t *testing.T) { store.Add(makeTestPod("bar", 5)) }() - _, _, err := store.WaitUntilFreshAndList(5) + _, _, err := store.WaitUntilFreshAndList(5, nil) if err == nil { t.Fatalf("unexpected lack of timeout error") } @@ -300,7 +300,7 @@ func TestReflectorForWatchCache(t *testing.T) { store := newTestWatchCache(5) { - _, version, err := store.WaitUntilFreshAndList(0) + _, version, err := store.WaitUntilFreshAndList(0, nil) if err != nil { t.Fatalf("unexpected error: %v", err) } @@ -323,7 +323,7 @@ func TestReflectorForWatchCache(t *testing.T) { r.ListAndWatch(wait.NeverStop) { - _, version, err := store.WaitUntilFreshAndList(10) + _, version, err := store.WaitUntilFreshAndList(10, nil) if err != nil { t.Fatalf("unexpected error: %v", err) }
Add tracing to listing in Cacher
kubernetes_kubernetes
train
c14429fea6e3cd2b3ba09a4b0ce3960e96a6584b
diff --git a/library/PHPIMS/Client.php b/library/PHPIMS/Client.php index <HASH>..<HASH> 100644 --- a/library/PHPIMS/Client.php +++ b/library/PHPIMS/Client.php @@ -118,6 +118,7 @@ class Client { * * @param string $path Path to the local image * @return string + * @throws PHPIMS\Client\Exception */ public function getImageIdentifier($path) { if (!is_file($path)) { @@ -138,7 +139,6 @@ class Client { * @param string $path Path to the local image * @param array $metadata Metadata to attach to the image * @return PHPIMS\Client\Response - * @throws PHPIMS\Client\Exception */ public function addImage($path, array $metadata = null) { $imageIdentifier = $this->getImageIdentifier($path); diff --git a/tests/PHPIMS/ClientTest.php b/tests/PHPIMS/ClientTest.php index <HASH>..<HASH> 100644 --- a/tests/PHPIMS/ClientTest.php +++ b/tests/PHPIMS/ClientTest.php @@ -115,8 +115,13 @@ class ClientTest extends \PHPUnit_Framework_TestCase { * @expectedException PHPIMS\Client\Exception * @expectedExceptionMessage File does not exist: foobar */ - public function testAddImageThatDoesNotExist() { - $this->client->addImage('foobar'); + public function testGenerateImageIdentifierForFileThatDoesNotExist() { + $this->client->getImageIdentifier('foobar'); + } + + public function testGenerateImageIdentifier() { + $hash = $this->client->getImageIdentifier(__DIR__ . '/_files/image.png'); + $this->assertSame('929db9c5fc3099f7576f5655207eba47.png', $hash); } public function testAddImage() {
Fixed phpdoc and added a test
imbo_imboclient-php
train
7f8d5dffb58d847e092ed29dd5f2731b5afce015
diff --git a/src/org/fit/cssbox/layout/BlockBox.java b/src/org/fit/cssbox/layout/BlockBox.java index <HASH>..<HASH> 100644 --- a/src/org/fit/cssbox/layout/BlockBox.java +++ b/src/org/fit/cssbox/layout/BlockBox.java @@ -1828,12 +1828,16 @@ public class BlockBox extends ElementBox margin.right = contw - content.width - border.left - padding.left - padding.right - border.right - margin.left; //if (margin.right < 0) margin.right = 0; //"treated as zero" + if (margin.right < 0 && cblock.canIncreaseWidth()) + margin.right = 0; } else //everything specified, ignore right margin { margin.right = contw - content.width - border.left - padding.left - padding.right - border.right - margin.left; //if (margin.right < 0) margin.right = 0; //"treated as zero" + if (margin.right < 0 && cblock.canIncreaseWidth()) + margin.right = 0; } } } diff --git a/src/org/fit/cssbox/layout/TableBox.java b/src/org/fit/cssbox/layout/TableBox.java index <HASH>..<HASH> 100644 --- a/src/org/fit/cssbox/layout/TableBox.java +++ b/src/org/fit/cssbox/layout/TableBox.java @@ -118,6 +118,8 @@ public class TableBox extends BlockBox //calculate the column widths calculateColumns(); + for (int i = 0; i < getColumnCount(); i++) + System.out.println("Col " + i + ": " + columns.elementAt(i).getContentWidth()); //layout the bodies if (header != null) diff --git a/src/org/fit/cssbox/layout/Viewport.java b/src/org/fit/cssbox/layout/Viewport.java index <HASH>..<HASH> 100644 --- a/src/org/fit/cssbox/layout/Viewport.java +++ b/src/org/fit/cssbox/layout/Viewport.java @@ -135,6 +135,12 @@ public class Viewport extends BlockBox return false; } + @Override + public boolean canIncreaseWidth() + { + return true; + } + @Override public boolean isVisible() {
Introduced block boxes adjustable by contents (for tables)
radkovo_CSSBox
train
183f2f3f97e4abf5beadd7f759d2970d48b3af8c
diff --git a/store/helper/helper.go b/store/helper/helper.go index <HASH>..<HASH> 100644 --- a/store/helper/helper.go +++ b/store/helper/helper.go @@ -204,6 +204,7 @@ type HotTableIndex struct { func (h *Helper) FetchRegionTableIndex(metrics map[uint64]RegionMetric, allSchemas []*model.DBInfo) ([]HotTableIndex, error) { hotTables := make([]HotTableIndex, 0, len(metrics)) for regionID, regionMetric := range metrics { + regionMetric := regionMetric t := HotTableIndex{RegionID: regionID, RegionMetric: &regionMetric} region, err := h.RegionCache.LocateRegionByID(tikv.NewBackofferWithVars(context.Background(), 500, nil), regionID) if err != nil { diff --git a/store/helper/helper_test.go b/store/helper/helper_test.go index <HASH>..<HASH> 100644 --- a/store/helper/helper_test.go +++ b/store/helper/helper_test.go @@ -27,6 +27,7 @@ import ( "github.com/pingcap/parser/model" "github.com/pingcap/tidb/store/helper" "github.com/pingcap/tidb/store/mockstore" + "github.com/pingcap/tidb/store/mockstore/cluster" "github.com/pingcap/tidb/store/tikv" "github.com/pingcap/tidb/util/pdapi" "go.uber.org/zap" @@ -63,7 +64,12 @@ func (s *mockStore) TLSConfig() *tls.Config { func (s *HelperTestSuite) SetUpSuite(c *C) { url := s.mockPDHTTPServer(c) time.Sleep(100 * time.Millisecond) - mockTikvStore, err := mockstore.NewMockStore() + mockTikvStore, err := mockstore.NewMockStore( + mockstore.WithClusterInspector(func(c cluster.Cluster) { + mockstore.BootstrapWithMultiRegions(c, []byte("x")) + }), + ) + s.store = &mockStore{ mockTikvStore.(tikv.Storage), []string{url[len("http://"):]}, @@ -78,18 +84,25 @@ func (s *HelperTestSuite) TestHotRegion(c *C) { } regionMetric, err := h.FetchHotRegion(pdapi.HotRead) c.Assert(err, IsNil, Commentf("err: %+v", err)) - expected := make(map[uint64]helper.RegionMetric) - expected[1] = helper.RegionMetric{ - FlowBytes: 100, - MaxHotDegree: 1, - Count: 0, + expected := map[uint64]helper.RegionMetric{ + 2: { + FlowBytes: 100, + MaxHotDegree: 1, + Count: 0, + }, + 4: { + FlowBytes: 200, + MaxHotDegree: 2, + Count: 0, + }, } c.Assert(regionMetric, DeepEquals, expected) dbInfo := &model.DBInfo{ Name: model.NewCIStr("test"), } c.Assert(err, IsNil) - _, err = h.FetchRegionTableIndex(regionMetric, []*model.DBInfo{dbInfo}) + res, err := h.FetchRegionTableIndex(regionMetric, []*model.DBInfo{dbInfo}) + c.Assert(res[0].RegionMetric, Not(Equals), res[1].RegionMetric) c.Assert(err, IsNil, Commentf("err: %+v", err)) } @@ -141,9 +154,14 @@ func (s *HelperTestSuite) mockHotRegionResponse(w http.ResponseWriter, req *http RegionsStat: []helper.RegionStat{ { FlowBytes: 100, - RegionID: 1, + RegionID: 2, HotDegree: 1, }, + { + FlowBytes: 200, + RegionID: 4, + HotDegree: 2, + }, }, } resp := helper.StoreHotRegionInfos{ diff --git a/store/tikv/test_util.go b/store/tikv/test_util.go index <HASH>..<HASH> 100644 --- a/store/tikv/test_util.go +++ b/store/tikv/test_util.go @@ -18,7 +18,7 @@ import ( "github.com/pingcap/errors" "github.com/pingcap/tidb/config" "github.com/pingcap/tidb/kv" - "github.com/tikv/pd/client" + pd "github.com/tikv/pd/client" ) // NewTestTiKVStore creates a test store with Option
infoschema: fix inaccurate statistics of TIDB_HOT_REGIONS (#<I>)
pingcap_tidb
train
6825ca80c134687b240b439cb4add7c69f8d8e10
diff --git a/src/cf/commands/application/push.go b/src/cf/commands/application/push.go index <HASH>..<HASH> 100644 --- a/src/cf/commands/application/push.go +++ b/src/cf/commands/application/push.go @@ -137,6 +137,11 @@ func (cmd *Push) bindAppToRoute(app cf.Application, params cf.AppParams, didCrea return } + if params.Has("no-route") && params.Get("no-route") == true { + cmd.ui.Say("App %s is a worker, skipping route creation", terminal.EntityNameColor(app.Name)) + return + } + routeFlagsPresent := c.String("n") != "" || c.String("d") != "" || c.Bool("no-hostname") if len(app.Routes) > 0 && !routeFlagsPresent { return diff --git a/src/cf/commands/application/push_test.go b/src/cf/commands/application/push_test.go index <HASH>..<HASH> 100644 --- a/src/cf/commands/application/push_test.go +++ b/src/cf/commands/application/push_test.go @@ -653,6 +653,25 @@ func TestPushingAppWithNoHostname(t *testing.T) { assert.Equal(t, deps.routeRepo.CreatedDomainGuid, "bar-domain-guid") } +func TestPushingAppAsWorker(t *testing.T) { + deps := getPushDependencies() + deps.appRepo.ReadNotFound = true + + workerManifest := singleAppManifest() + workerManifest.Applications[0].Set("no-route", true) + deps.manifestRepo.ReadManifestManifest = workerManifest + + ui := callPush(t, []string{ + "worker-app", + }, deps) + + testassert.SliceContains(t, ui.Outputs, testassert.Lines{ + {"worker-app", "is a worker", "skipping route creation"}, + }) + assert.Equal(t, deps.routeRepo.BoundAppGuid, "") + assert.Equal(t, deps.routeRepo.BoundRouteGuid, "") +} + func TestPushingAppWithMemoryInMegaBytes(t *testing.T) { deps := getPushDependencies() deps.appRepo.ReadNotFound = true diff --git a/src/cf/manifest/manifest.go b/src/cf/manifest/manifest.go index <HASH>..<HASH> 100644 --- a/src/cf/manifest/manifest.go +++ b/src/cf/manifest/manifest.go @@ -117,7 +117,7 @@ func mapToAppParams(yamlMap generic.Map) (appParams cf.AppParams, errs ManifestE return } - for _, key := range []string{"buildpack", "command", "disk_quota", "domain", "host", "name", "path", "stack"} { + for _, key := range []string{"buildpack", "command", "disk_quota", "domain", "host", "name", "path", "stack", "no-route"} { if yamlMap.Has(key) { setStringVal(appParams, key, yamlMap.Get(key), &errs) }
Respect "no-route" bool in manifests This will prevent `gcf push` from creating a route for you automatically, and is mostly useful for worker apps. I expect that this will be a lt more useful after #<I> hits.
cloudfoundry_cli
train
24dfc05881662808e165b106c86187cbc3be3f9b
diff --git a/lib/model/document.rb b/lib/model/document.rb index <HASH>..<HASH> 100644 --- a/lib/model/document.rb +++ b/lib/model/document.rb @@ -252,6 +252,9 @@ module SimpleXml tree = SubTree.new(sub_tree_def, self) @sub_tree_map[tree.id] = tree end + sub_tree_map.values.each do |tree| + tree.process + end end def extract_stratifications diff --git a/lib/model/precondition.rb b/lib/model/precondition.rb index <HASH>..<HASH> 100644 --- a/lib/model/precondition.rb +++ b/lib/model/precondition.rb @@ -220,7 +220,7 @@ module SimpleXml end def copy(other) - @id = other.id + @id = other.id rescue binding.pry @preconditions = other.preconditions @reference = other.reference @conjunction_code = other.conjunction_code diff --git a/lib/model/sub_tree.rb b/lib/model/sub_tree.rb index <HASH>..<HASH> 100644 --- a/lib/model/sub_tree.rb +++ b/lib/model/sub_tree.rb @@ -3,7 +3,7 @@ module SimpleXml class SubTree include SimpleXml::Utilities - attr_accessor :id, :name, :precondition + attr_reader :id, :name, :precondition def initialize(entry, doc, negated=false) @doc = doc @@ -13,14 +13,24 @@ module SimpleXml @name = attr_val('@displayName') children = children_of(@entry) + raise "multiple children of subtree... not clear how to handle this" if children.length > 1 + @child = children.first + @processed = false + end + + def process + return if @processed preconditions = [] - children.each do |child| - preconditions << Precondition.new(child, doc) - end - raise "multiple children of subtree... not clear how to handle this" if preconditions.length > 1 + preconditions << Precondition.new(@child, @doc) @precondition = preconditions.first + @processed = true convert_to_variable if attr_val('@qdmVariable') == 'true' - end + end + + def precondition + process + @precondition + end def convert_to_variable # wrap a reference precondition with a parent @@ -41,6 +51,7 @@ module SimpleXml end def convert_to_data_criteria(operator='clause') + process DataCriteria.convert_precondition_to_criteria(@precondition, @doc, operator) end end
Add support for processing nested subtrees as needed
projecttacoma_simplexml_parser
train
77c79f983be811aa3f4875083c1b1f4e95507a16
diff --git a/productmd/common.py b/productmd/common.py index <HASH>..<HASH> 100644 --- a/productmd/common.py +++ b/productmd/common.py @@ -473,7 +473,23 @@ def _parse_release_id_part(release_id, prefix=""): short, version = release_id.split("-") release_type = "ga" else: - short, version, release_type = release_id.rsplit("-", 2) + release_type = None + for type_ in RELEASE_TYPES: + # Try to find a known release type. + if release_id.endswith(type_): + release_type = type_ + break + + if release_type: + # Found, remove it from the parsed string (because there could be a + # dash causing problems). + release_id = release_id[:-len(release_type)] + + short, version, release_type_extracted = release_id.rsplit("-", 2) + + # If known release type is found, use it; otherwise fall back to the + # one we parsed out. + release_type = release_type or release_type_extracted result = { "short": short, "version": version,
Fix parse release id with dash in type We have added updates-testing type, which breaks the parsing function. Renaming it is not an option at this point, so the parsing function is updated to extract known list of types first and only then try to take arbitrary value.
release-engineering_productmd
train
cda8fd632a67d6503811f69d81578f2ab41f3b6d
diff --git a/keyring/backends/kwallet.py b/keyring/backends/kwallet.py index <HASH>..<HASH> 100644 --- a/keyring/backends/kwallet.py +++ b/keyring/backends/kwallet.py @@ -135,8 +135,6 @@ class DBusKeyring(KeyringBackend): ) return SimpleCredential(str(username), str(password)) - return None - def set_password(self, service, username, password): """Set password for the username of the service""" if not self.connected(service):
Omit 'return None' as it's implied.
jaraco_keyring
train
33e3ce9639320b4a047dd2e0607168e03de74fc6
diff --git a/ding0/grid/mv_grid/solvers/base.py b/ding0/grid/mv_grid/solvers/base.py index <HASH>..<HASH> 100644 --- a/ding0/grid/mv_grid/solvers/base.py +++ b/ding0/grid/mv_grid/solvers/base.py @@ -193,10 +193,18 @@ class BaseSolution(object): g.add_edges_from(e) plt.figure() + ax = plt.gca() + ax.get_xaxis().set_visible(False) + ax.get_yaxis().set_visible(False) if anim is not None: nx.draw_networkx(g, nodes_pos, with_labels=False, node_size=50) - plt.savefig(anim.file_path + anim.file_prefix + (4 - len(str(anim.counter))) * '0' + str(anim.counter) + '.png') + plt.savefig(anim.file_path + + anim.file_prefix + + (4 - len(str(anim.counter))) * '0' + + str(anim.counter) + '.png', + dpi=150, + bbox_inches='tight') anim.counter += 1 plt.close() else: diff --git a/ding0/tools/animation.py b/ding0/tools/animation.py index <HASH>..<HASH> 100644 --- a/ding0/tools/animation.py +++ b/ding0/tools/animation.py @@ -13,21 +13,22 @@ __url__ = "https://github.com/openego/ding0/blob/master/LICENSE" __author__ = "nesnoj, gplssm" -import ding0 from ding0.tools import config as cfg_ding0 +from ding0.tools.logger import get_default_home_dir import os class AnimationDing0: - """ Class for visual animation of routing process. + """ Class for visual animation of the routing process (solving CVRP). (basically a central place to store information about output file and count of saved images). Use argument 'animation=True' of method 'NetworkDing0.mv_routing()' to enable image export. + The images are exported to ding0's home dir which is usually ~/.ding0/ . Subsequently, FFMPEG can be used to convert images to animation, e.g. - - ffmpeg -r 10 -i mv-routing_ani_%04d.png -c:v libx264 -vf fps=25 -pix_fmt yuv420p mv-routing_ani.mp4 + + ffmpeg -r 5 -i mv-routing_ani_%04d.png -vframes 200 -r 15 -vcodec libx264 -y -an mv-routing_ani.mp4 -s 640x480 See Also -------- @@ -36,8 +37,7 @@ class AnimationDing0: def __init__(self, **kwargs): output_animation_file_prefix = cfg_ding0.get('output', 'animation_file_prefix') - package_path = ding0.__path__[0] - self.file_path = os.path.join(package_path, 'output/animation/') + self.file_path = os.path.join(get_default_home_dir(), 'animation/') self.file_prefix = output_animation_file_prefix self.counter = 1
update animation feature change default path and resolution
openego_ding0
train
80adf5b2cc861ab6837ae03052378719e694bebc
diff --git a/tweepy/streaming.py b/tweepy/streaming.py index <HASH>..<HASH> 100644 --- a/tweepy/streaming.py +++ b/tweepy/streaming.py @@ -58,7 +58,7 @@ class Stream: ---------- running : bool Whether there's currently a stream running - session : Optional[:class:`requests.Session`] + session : :class:`requests.Session` Requests Session used to connect to the stream thread : Optional[:class:`threading.Thread`] Thread used to run the stream @@ -80,7 +80,7 @@ class Stream: self.verify = verify self.running = False - self.session = None + self.session = requests.Session() self.thread = None self.user_agent = ( f"Python/{python_version()} " @@ -103,9 +103,7 @@ class Stream: auth = OAuth1(self.consumer_key, self.consumer_secret, self.access_token, self.access_token_secret) - if self.session is None: - self.session = requests.Session() - self.session.headers["User-Agent"] = self.user_agent + self.session.headers["User-Agent"] = self.user_agent url = f"https://stream.twitter.com/1.1/{endpoint}.json"
Initialize Stream.session within Stream.__init__ Update the user agent based on Stream.user_agent even if Stream.session is already initialized
tweepy_tweepy
train
9a6e68a5e8f101a18680af97e4f1cc2f2e831465
diff --git a/doc2dash/__main__.py b/doc2dash/__main__.py index <HASH>..<HASH> 100644 --- a/doc2dash/__main__.py +++ b/doc2dash/__main__.py @@ -2,6 +2,7 @@ from __future__ import absolute_import, division, print_function import errno import logging +import logging.config import os import plistlib import shutil @@ -20,6 +21,15 @@ DEFAULT_DOCSET_PATH = os.path.expanduser( PNG_HEADER = b"\x89PNG\r\n\x1a\n" +class ClickEchoHandler(logging.Handler): + """ + Use click.echo() for logging. Has the advantage of stripping color codes + if output is redirected. Also is generally more predictable. + """ + def emit(self, record): + click.echo(record.getMessage()) + + @click.command() @click.argument("source", type=click.Path(exists=True, file_okay=False, readable=True)) @@ -72,11 +82,12 @@ def main(source, force, name, quiet, verbose, destination, add_to_dash, icon_data = None try: - level = determine_log_level(verbose=verbose, quiet=quiet) + logging.config.dictConfig( + create_log_config(verbose=verbose, quiet=quiet) + ) except ValueError as e: - click.echo(e.args[0] + '\n') + click.secho(e.args[0], fg="red") raise SystemExit(1) - logging.basicConfig(format='%(message)s', level=level) source, dest, name = setup_paths( source, destination, name=name, add_to_global=add_to_global, @@ -124,7 +135,7 @@ def main(source, force, name, quiet, verbose, destination, add_to_dash, os.system('open -a dash "{}"'.format(dest)) -def determine_log_level(verbose, quiet): +def create_log_config(verbose, quiet): """ We use logging's levels as an easy-to-use verbosity controller. """ @@ -138,7 +149,27 @@ def determine_log_level(verbose, quiet): level = logging.ERROR else: level = logging.INFO - return level + return { + "version": 1, + "formatters": { + "click_formatter": { + "format": '%(message)s', + }, + }, + "handlers": { + "click_handler": { + 'level': level, + 'class': 'doc2dash.__main__.ClickEchoHandler', + 'formatter': 'click_formatter', + } + }, + "loggers": { + "doc2dash": { + "handlers": ["click_handler"], + "level": level, + }, + } + } def setup_paths(source, destination, name, add_to_global, force): diff --git a/doc2dash/parsers/base.py b/doc2dash/parsers/base.py index <HASH>..<HASH> 100644 --- a/doc2dash/parsers/base.py +++ b/doc2dash/parsers/base.py @@ -97,6 +97,7 @@ class _BaseParser(object): for entry in entries: if not self.find_and_patch_entry(soup, entry): log.debug("Can't find anchor {} in {}." - .format(entry.anchor, fname)) + .format(entry.anchor, + click.format_filename(fname))) with open(full_path, 'w') as fp: fp.write(str(soup)) diff --git a/tests/test_main.py b/tests/test_main.py index <HASH>..<HASH> 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -7,7 +7,6 @@ import plistlib import shutil import sqlite3 -import click import pytest from click.testing import CliRunner @@ -81,23 +80,20 @@ def test_normal_flow(monkeypatch, tmpdir, runner): dt.name = 'testtype' dt.return_value = MagicMock(parse=yielder) monkeypatch.setattr(doc2dash.parsers, 'get_doctype', lambda _: dt) - with patch("doc2dash.__main__.log.info") as info, \ - patch("os.system") as system: + with patch("os.system") as system: result = runner.invoke( main.main, ["foo", "-n", "bar", "-a", "-i", str(png_file)] ) assert 0 == result.exit_code - out = '\n'.join(call[0][0] for call in info.call_args_list) + '\n' - assert out == ('''\ -Converting ''' + click.style("testtype", bold=True) + '''\ - docs from "foo" to "./bar.docset". + assert '''\ +Converting testtype docs from "foo" to "./bar.docset". Parsing HTML... -Added ''' + click.style("1", fg="green") + ''' index entries. +Added 1 index entries. Adding table of contents meta data... Adding to dash... -''') - assert system.call_args[0] == ('open -a dash "./bar.docset"', ) +''' == result.output + assert ('open -a dash "./bar.docset"', ) == system.call_args[0] class TestSetupPaths(object): @@ -248,12 +244,15 @@ class TestSetupLogging(object): """ Ensure verbosity options cause the correct log level. """ - assert main.determine_log_level(verbose, quiet) is expected + level = main.create_log_config( + verbose, quiet + )["loggers"]["doc2dash"]["level"] + assert level is expected def test_quiet_and_verbose(self): """ Fail if both -q and -v are passed. """ with pytest.raises(ValueError) as e: - main.determine_log_level(verbose=True, quiet=True) + main.create_log_config(verbose=True, quiet=True) assert "makes no sense" in e.value.args[0]
Write a special log handler for click
hynek_doc2dash
train
4f5ba143dbd0ea9733501def3d5283938fe94f64
diff --git a/public_html/index.php b/public_html/index.php index <HASH>..<HASH> 100644 --- a/public_html/index.php +++ b/public_html/index.php @@ -295,10 +295,10 @@ $tabContent .= Html::openElement('div', array( // About tab $about = '<div class="tab-pane" id="tab-about">'; -$about .= '<a href="//translatewiki.net/wiki/Translating:Intuition">' +$about .= '<a href="https://translatewiki.net/wiki/Translating:Intuition">' . Html::element( 'img', array( - 'src' => '//translatewiki.net/w/i.php?title=Special:TranslationStats&graphit=1&preview=&' - . 'count=edits&scale=weeks&days=30&width=520&height=400&group=tsint-0-all', + 'src' => 'https://translatewiki.net/w/i.php?title=Special:TranslationStats&graphit=1&' + . 'count=edits&scale=months&days=250&width=520&height=400&group=tsint-0-all', 'width' => 520, 'height' => 400, 'alt' => '',
dashboard: Update graph to cover 8 months instead of 4 weeks
Krinkle_intuition
train
00b215936d9bbfaf49260ba70dd6a622e10bcb8c
diff --git a/auto_tests/tests/simple_drawing.js b/auto_tests/tests/simple_drawing.js index <HASH>..<HASH> 100644 --- a/auto_tests/tests/simple_drawing.js +++ b/auto_tests/tests/simple_drawing.js @@ -87,6 +87,7 @@ SimpleDrawingTestCase.prototype.testDrawSimpleDash = function() { var g = new Dygraph(graph, [[1, 4], [2, 5], [3, 3], [4, 7], [5, 9]], opts); htx = g.hidden_ctx_; - assertEquals(29, CanvasAssertions.numLinesDrawn(htx, "#ff0000")); + // TODO(danvk): figure out a good way to restore this test. + // assertEquals(29, CanvasAssertions.numLinesDrawn(htx, "#ff0000")); CanvasAssertions.assertBalancedSaveRestore(htx); };
disable failing test; it still works, but the test logic is broken
danvk_dygraphs
train
2823a207633699fe738af499067f4dd859b01e16
diff --git a/molo/core/tests/test_bulk_actions.py b/molo/core/tests/test_bulk_actions.py index <HASH>..<HASH> 100644 --- a/molo/core/tests/test_bulk_actions.py +++ b/molo/core/tests/test_bulk_actions.py @@ -168,5 +168,6 @@ class TestCopyBulkAction(TestCase, MoloTestCaseMixin): self.assertEquals(article.status_string, 'scheduled') self.client.post(reverse('copy-to-all', args=(article.id,))) - new_article = ArticlePage.objects.descendant_of(self.main2).get(slug=article.slug) + new_article = ArticlePage.objects.descendant_of( + self.main2).get(slug=article.slug) self.assertEquals(new_article.status_string, 'scheduled')
Update test_bulk_actions.py
praekeltfoundation_molo
train
f70f72c635e1a4f2215ccc7ca19fd4fcdc284a79
diff --git a/scope/scope.py b/scope/scope.py index <HASH>..<HASH> 100644 --- a/scope/scope.py +++ b/scope/scope.py @@ -78,6 +78,11 @@ class TagBase: """Method called for defining arguments for the object. Should be implemented by subclasses.""" pass + def set_children(self, children): + """Set the children of the object.""" + self.children = children + return self + def serialize(self, context): """Method called for serializing object. Should be implemented by subclasses.""" pass
Add helper function for setting children.
lrgar_scope
train
6658e643546a353a4b7e75d04a74ada4185cc6e4
diff --git a/lib/sign-up.js b/lib/sign-up.js index <HASH>..<HASH> 100644 --- a/lib/sign-up.js +++ b/lib/sign-up.js @@ -12,10 +12,14 @@ function signUp (state, options) { return Promise.reject(new Error('options.username and options.password is required')) } - state.validate(options) + try { + state.validate(options) + } catch (error) { + return Promise.reject(error) + } if (options.profile) { - throw new Error('SignUp with profile data not yet implemented. Please see https://github.com/hoodiehq/hoodie-client-account/issues/11.') + return Promise.reject(new Error('SignUp with profile data not yet implemented. Please see https://github.com/hoodiehq/hoodie-client-account/issues/11.')) } return internals.request({
fix: signUp rejects on error, does not throw
hoodiehq_hoodie-account-client
train
822dc71eee7ec14a7b70ff204fcdfdb159737376
diff --git a/tests/test_output_format.py b/tests/test_output_format.py index <HASH>..<HASH> 100644 --- a/tests/test_output_format.py +++ b/tests/test_output_format.py @@ -153,3 +153,18 @@ def test_video(): return 'test' assert hug.output_format.avi_video(FakeVideoWithSave()) == 'test' + +def test_on_content_type(): + '''Ensure that it's possible to route the output type format by the requested content-type''' + formatter = hug.output_format.on_content_type({'application/json': hug.output_format.json, + 'text/plain': hug.output_format.text}) + class FakeRequest(object): + content_type = 'application/json' + + request = FakeRequest() + response = FakeRequest() + converted = hug.input_format.json(formatter(BytesIO(hug.output_format.json({'name': 'name'})), request, response)) + assert converted == {'name': 'name'} + + request.content_type = 'text/plain' + assert formatter('hi', request, response) == b'hi'
Add test for multiple output formats based on content type
hugapi_hug
train
a8c3dce963cbcc1715b300e1bd7fe1a69f69dd0d
diff --git a/assembly/automaton.py b/assembly/automaton.py index <HASH>..<HASH> 100644 --- a/assembly/automaton.py +++ b/assembly/automaton.py @@ -71,7 +71,6 @@ def assemble_pairs(p, pf, tag): cwd = os.getcwd() os.chdir(pf) prepare([pf] + sorted(glob("*.fastq") + glob("*.fastq.gz"))) - sh("chmod u+x run.sh") sh("./run.sh") sh("cp allpaths/ASSEMBLIES/run/final.contigs.fasta ../{0}".format(asm)) @@ -125,7 +124,6 @@ def soap_trios(p, pf, tag, extra): os.chdir(pf) prepare(sorted(glob("*.fastq") + glob("*.fastq.gz")) + \ ["--assemble_1st_rank_only", "-K 31", "--cpus=48"]) - sh("chmod u+x run.sh") sh("./run.sh") sh("cp asm31.closed.scafSeq ../{0}".format(asm)) diff --git a/formats/base.py b/formats/base.py index <HASH>..<HASH> 100644 --- a/formats/base.py +++ b/formats/base.py @@ -335,6 +335,8 @@ def write_file(filename, contents, meta="file", skipcheck=False): message = "{0} written to `{1}`.".format(meta, filename) logging.debug(message.capitalize()) + if meta == "run script": + sh("chmod u+x {0}".format(filename)) def read_until(handle, start): diff --git a/graphics/mummerplot.py b/graphics/mummerplot.py index <HASH>..<HASH> 100644 --- a/graphics/mummerplot.py +++ b/graphics/mummerplot.py @@ -38,7 +38,7 @@ def main(args): p.add_option("--refcov", default=.01, type="float", help="Minimum reference coverage [default: %default]") p.add_option("--all", default=False, action="store_true", - help="Plot one pdf file per query [default: %default]") + help="Plot one pdf file per ref in refidsfile [default: %default]") opts, args = p.parse_args(args) if len(args) != 4:
chmod after writing "run.sh" script
tanghaibao_jcvi
train
61429b4602a644bf7c6e7272be0ecca81270bebd
diff --git a/snapshot/lib/snapshot/detect_values.rb b/snapshot/lib/snapshot/detect_values.rb index <HASH>..<HASH> 100644 --- a/snapshot/lib/snapshot/detect_values.rb +++ b/snapshot/lib/snapshot/detect_values.rb @@ -19,6 +19,10 @@ module Snapshot end end + if config[:test_without_building] == true && config[:derived_data_path].to_s.length == 0 + UI.user_error!("Cannot use test_without_building option without a derived_data_path!") + end + Snapshot.project.select_scheme(preferred_to_include: "UITests") # Devices diff --git a/snapshot/lib/snapshot/options.rb b/snapshot/lib/snapshot/options.rb index <HASH>..<HASH> 100644 --- a/snapshot/lib/snapshot/options.rb +++ b/snapshot/lib/snapshot/options.rb @@ -137,6 +137,13 @@ module Snapshot description: "Should the project be cleaned before building it?", is_string: false, default_value: false), + FastlaneCore::ConfigItem.new(key: :test_without_building, + short_option: "-T", + env_name: "SNAPSHOT_TEST_WITHOUT_BUILDING", + description: "Test without building, requires a derived data path", + is_string: false, + type: Boolean, + optional: true), FastlaneCore::ConfigItem.new(key: :configuration, short_option: "-q", env_name: "SNAPSHOT_CONFIGURATION", diff --git a/snapshot/lib/snapshot/test_command_generator_base.rb b/snapshot/lib/snapshot/test_command_generator_base.rb index <HASH>..<HASH> 100644 --- a/snapshot/lib/snapshot/test_command_generator_base.rb +++ b/snapshot/lib/snapshot/test_command_generator_base.rb @@ -36,10 +36,13 @@ module Snapshot def actions actions = [] - actions << :clean if Snapshot.config[:clean] - actions << :build # https://github.com/fastlane/snapshot/issues/246 - actions << :test - + if Snapshot.config[:test_without_building] + actions << "test-without-building" + else + actions << :clean if Snapshot.config[:clean] + actions << :build # https://github.com/fastlane/snapshot/issues/246 + actions << :test + end return actions end diff --git a/snapshot/spec/test_command_generator_spec.rb b/snapshot/spec/test_command_generator_spec.rb index <HASH>..<HASH> 100644 --- a/snapshot/spec/test_command_generator_spec.rb +++ b/snapshot/spec/test_command_generator_spec.rb @@ -252,6 +252,18 @@ describe Snapshot do expect(command.join('')).to include("-derivedDataPath 'fake/derived/path'") end end + + context 'test-without-building' do + before do + configure options.merge(derived_data_path: 'fake/derived/path', test_without_building: true) + end + + it 'uses the "test-without-building" command and not the default "build test"', requires_xcode: true do + command = Snapshot::TestCommandGenerator.generate(devices: ["iPhone 6"], language: "en", locale: nil) + expect(command.join('')).to include("test-without-building") + expect(command.join('')).not_to include("build test") + end + end end describe "Valid macOS Configuration" do
[snapshot] Add test-without-building capability (#<I>) * add test-without-building capability to snapshot * add user_error when running test_without_building without derived_data_path * cosmetic changes * fix test
fastlane_fastlane
train
8a584caa8c35c5c050fa11cf7fc46f9765d7cbb6
diff --git a/pipenv/cli.py b/pipenv/cli.py index <HASH>..<HASH> 100644 --- a/pipenv/cli.py +++ b/pipenv/cli.py @@ -324,11 +324,6 @@ def ensure_pipfile(validate=True): python = which('python') if not USING_DEFAULT_PYTHON else False project.create_pipfile(python=python) - click.echo(crayons.white(u'Discovering imports from local codebase…', bold=True)) - for req in import_from_code('.'): - click.echo(' Found {0}!'.format(crayons.green(req))) - project.add_package_to_pipfile(req) - # Validate the Pipfile's contents. if validate and project.virtualenv_exists and not PIPENV_SKIP_VALIDATION: # Ensure that Pipfile is using proper casing.
don't import code automatically, only use -c
pypa_pipenv
train
cf671f70974348ac202f613c6b090f05d4f2543b
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ def read_long_description(): setup(name='pi-ina219', - version='1.1.0', + version='1.2.0', author='Chris Borrill', author_email='[email protected]', description=DESC,
Increment version in preperation for release of version <I>
chrisb2_pi_ina219
train
6909c12bbd060d6ac952e63541e5ac998dd09525
diff --git a/lib/engineyard-serverside/configuration.rb b/lib/engineyard-serverside/configuration.rb index <HASH>..<HASH> 100644 --- a/lib/engineyard-serverside/configuration.rb +++ b/lib/engineyard-serverside/configuration.rb @@ -17,7 +17,7 @@ module EY def initialize(opts={}) @release_path = opts[:release_path] - config = JSON.parse(opts["config"] || "{}") + config = JSON.parse(opts.delete("config") || "{}") @configuration = DEFAULT_CONFIG.merge(config).merge(opts) end
Don't double up the config key in the config.
engineyard_engineyard-serverside
train
df208d6711981dfca866e2d8670eec46b5b3d143
diff --git a/openstack_dashboard/dashboards/project/stacks/tests.py b/openstack_dashboard/dashboards/project/stacks/tests.py index <HASH>..<HASH> 100644 --- a/openstack_dashboard/dashboards/project/stacks/tests.py +++ b/openstack_dashboard/dashboards/project/stacks/tests.py @@ -33,6 +33,7 @@ from openstack_dashboard.dashboards.project.stacks import tables INDEX_URL = reverse('horizon:project:stacks:index') +DETAIL_URL = 'horizon:project:stacks:detail' class MockResource(object): @@ -797,6 +798,84 @@ class StackTests(test.TestCase): self.assertEqual(res.context['stack_preview']['stack_name'], stack.stack_name) + @test.create_stubs({api.heat: ('stack_get', 'template_get')}) + def test_detail_stack_topology(self): + stack = self.stacks.first() + template = self.stack_templates.first() + api.heat.stack_get(IsA(http.HttpRequest), stack.id) \ + .MultipleTimes().AndReturn(stack) + api.heat.template_get(IsA(http.HttpRequest), stack.id) \ + .AndReturn(json.loads(template.validate)) + self.mox.ReplayAll() + + url = '?'.join([reverse(DETAIL_URL, args=[stack.id]), + '='.join(['tab', 'stack_details__stack_topology'])]) + res = self.client.get(url) + tab = res.context['tab_group'].get_tab('topology') + d3_data = tab.data['d3_data'] + self.assertEqual(tab.template_name, + 'project/stacks/_detail_topology.html') + # status is CREATE_COMPLETE, so we expect the topology to display it + self.assertIn('info_box', d3_data) + self.assertIn('stack-green.svg', d3_data) + self.assertIn('Create Complete', d3_data) + + @test.create_stubs({api.heat: ('stack_get', 'template_get')}) + def test_detail_stack_overview(self): + stack = self.stacks.first() + template = self.stack_templates.first() + api.heat.stack_get(IsA(http.HttpRequest), stack.id) \ + .MultipleTimes().AndReturn(stack) + api.heat.template_get(IsA(http.HttpRequest), stack.id) \ + .AndReturn(json.loads(template.validate)) + self.mox.ReplayAll() + + url = '?'.join([reverse(DETAIL_URL, args=[stack.id]), + '='.join(['tab', 'stack_details__stack_overview'])]) + res = self.client.get(url) + tab = res.context['tab_group'].get_tab('overview') + overview_data = tab.data['stack'] + self.assertEqual(tab.template_name, + 'project/stacks/_detail_overview.html') + self.assertEqual(stack.stack_name, overview_data.stack_name) + + @test.create_stubs({api.heat: ('stack_get', 'template_get')}) + def test_detail_stack_resources(self): + stack = self.stacks.first() + template = self.stack_templates.first() + api.heat.stack_get(IsA(http.HttpRequest), stack.id) \ + .MultipleTimes().AndReturn(stack) + api.heat.template_get(IsA(http.HttpRequest), stack.id) \ + .AndReturn(json.loads(template.validate)) + self.mox.ReplayAll() + + url = '?'.join([reverse(DETAIL_URL, args=[stack.id]), + '='.join(['tab', 'stack_details__resource_overview'])]) + res = self.client.get(url) + tab = res.context['tab_group'].get_tab('resources') + self.assertEqual(tab.template_name, + 'project/stacks/_detail_resources.html') + + @test.create_stubs({api.heat: ('stack_get', 'template_get')}) + def test_detail_stack_template(self): + stack = self.stacks.first() + template = self.stack_templates.first() + api.heat.stack_get(IsA(http.HttpRequest), stack.id) \ + .AndReturn(stack) + api.heat.template_get(IsA(http.HttpRequest), stack.id) \ + .AndReturn(json.loads(template.validate)) + self.mox.ReplayAll() + + url = '?'.join([reverse(DETAIL_URL, args=[stack.id]), + '='.join(['tab', 'stack_details__stack_template'])]) + res = self.client.get(url) + tab = res.context['tab_group'].get_tab('stack_template') + template_data = tab.data['stack_template'] + self.assertEqual(tab.template_name, + 'project/stacks/_stack_template.html') + self.assertIn(json.loads(template.validate)['Description'], + template_data) + class TemplateFormTests(test.TestCase):
Add missing unit tests for Heat Stacks Details We don't have tests for the Details page and tabs. Change-Id: I<I>e<I>ab<I>d1c<I>de<I>dc<I>b7af0cdd2dc<I>a Closes-Bug: #<I>
openstack_horizon
train
5112aaffdf65f19e7f3487d329a44dd8ab81737e
diff --git a/lib/puppet/pops/types/annotation.rb b/lib/puppet/pops/types/annotation.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/pops/types/annotation.rb +++ b/lib/puppet/pops/types/annotation.rb @@ -27,7 +27,7 @@ module Types def self.annotate(o) adapter = get(o) if adapter.nil? - if o.is_a?(PObjectType) + if o.is_a?(Annotatable) i12n = o.annotations[_ptype] i12n = yield if i12n.nil? && block_given? else @@ -47,10 +47,10 @@ module Types # @return [Annotation<self>] an annotation of the same class as the receiver of the call # def self.annotate_new(o, i12n_hash) - if o.is_a?(PObjectType) && o.annotations.include?(_ptype) + if o.is_a?(Annotatable) && o.annotations.include?(_ptype) # Prevent clear or redefine of annotations declared on type action = i12n_hash == CLEAR ? 'clear' : 'redefine' - raise ArgumentError, "attempt to #{action} #{type_name} annotation declared on type #{o.name}" + raise ArgumentError, "attempt to #{action} #{type_name} annotation declared on #{o.label}" end if i12n_hash == CLEAR diff --git a/spec/unit/functions/annotate_spec.rb b/spec/unit/functions/annotate_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/functions/annotate_spec.rb +++ b/spec/unit/functions/annotate_spec.rb @@ -124,7 +124,7 @@ describe 'the annotate function' do notice(MyAdapter.annotate(MyObject).value) notice(MyAdapter.annotate(MyObject, clear).value) PUPPET - expect { eval_and_collect_notices(code) }.to raise_error(/attempt to clear MyAdapter annotation declared on type MyObject/) + expect { eval_and_collect_notices(code) }.to raise_error(/attempt to clear MyAdapter annotation declared on MyObject/) end it 'fails attempts to redefine a declared annotation' do @@ -138,7 +138,7 @@ describe 'the annotate function' do notice(MyAdapter.annotate(MyObject).value) notice(MyAdapter.annotate(MyObject, { 'id' => 3, 'value' => 'some other value' }).value) PUPPET - expect { eval_and_collect_notices(code) }.to raise_error(/attempt to redefine MyAdapter annotation declared on type MyObject/) + expect { eval_and_collect_notices(code) }.to raise_error(/attempt to redefine MyAdapter annotation declared on MyObject/) end it 'allows annotation that are not declared in the type' do
(PUP-<I>) Ensure that annotations can be used on all Annotatables An oversight in the implementation of PUP-<I> prevented annotations from being added to attributes, functions, and typesets. This commit ensures that all `Annotatables` can have annotations.
puppetlabs_puppet
train
5f1212e35b57f072cb56c0ba45d76472c996324d
diff --git a/lib/githubProcessor.js b/lib/githubProcessor.js index <HASH>..<HASH> 100644 --- a/lib/githubProcessor.js +++ b/lib/githubProcessor.js @@ -273,7 +273,7 @@ class GitHubProcessor { // to form a PR nor is the origin repo in our filter set. So, keep the commit as PR-specific. The commit itself // may or may not be fetched depending on filtering. This may duplicate the commits in the output but fetching from // GitHub will be deduplicated as the commit URLs are the same. - this._addRelation(request, 'pull_request_commits', 'commit', document._links.commits.href); + this._addCollection(request, 'pull_request_commits', 'commit', document._links.commits.href); } // link and queue the related issue. Getting the issue will bring in the comments for this PR @@ -765,8 +765,7 @@ class GitHubProcessor { request.linkResource(relation.origin, `${qualifier}`); request.linkSiblings(`${relation.qualifier}:pages`); request.linkCollection('unique', `${relation.qualifier}:pages:${relation.guid}`); - const id = relation.type === 'commit' ? 'sha' : 'id'; - const urns = document.elements.map(element => `urn:${relation.type}:${element[id]}`); + const urns = document.elements.map(element => `urn:${relation.type}:${element.id}`); request.linkResource('resources', urns); return document; }
revert PR commit to collection from relation
Microsoft_ghcrawler
train
6211fce718921707a2d026cdfd67f127a581439b
diff --git a/lib/discordrb/gateway.rb b/lib/discordrb/gateway.rb index <HASH>..<HASH> 100644 --- a/lib/discordrb/gateway.rb +++ b/lib/discordrb/gateway.rb @@ -316,6 +316,13 @@ module Discordrb end end + def handle_dispatch(packet) + case packet['t'].intern + when :READY + LOGGER.info("Discord using gateway protocol version: #{data['v']}, requested: #{GATEWAY_VERSION}") + end + end + # Called when the websocket has been disconnected in some way - say due to a pipe error while sending def handle_internal_close(e) close
Start implementing handle_dispatch Currently only logs the gateway version on READY
meew0_discordrb
train
88dac025010d6c98803c6c5fe1ad87387d4ddbbc
diff --git a/Poller.php b/Poller.php index <HASH>..<HASH> 100644 --- a/Poller.php +++ b/Poller.php @@ -27,7 +27,7 @@ use Zend\Ldap\Node\RootDse\ActiveDirectory; /** * Active Directory LDAP poller * - * TODO perform paginated requests + * TODO perform paginated requests after resolving of https://github.com/zendframework/zend-ldap/issues/41 * * @see https://msdn.microsoft.com/en-us/library/ms677627.aspx * @@ -64,11 +64,18 @@ class Poller private $entityManager; /** - * Base ldap search filter used to describe objects to fetch + * Base ldap search filter used to define objects during incremental fetch * * @var AbstractFilter */ - private $baseEntryFilter; + private $incrementalSyncEntryFilter; + + /** + * Base ldap search filter used to define objects during full fetch + * + * @var AbstractFilter + */ + private $fullSyncEntryFilter; /** * Base list of AD attributes to fetch @@ -113,7 +120,8 @@ class Poller * @param SynchronizerInterface $synchronizer * @param Ldap $ldap * @param EntityManagerInterface $em - * @param AbstractFilter $entryFilter + * @param AbstractFilter|string $fullSyncEntryFilter + * @param AbstractFilter|string $incrementalSyncEntryFilter * @param array $entryAttributesToFetch * @param bool $detectDeleted * @param string $pollerName @@ -122,31 +130,20 @@ class Poller SynchronizerInterface $synchronizer, Ldap $ldap, EntityManagerInterface $em, - $entryFilter, + $fullSyncEntryFilter, + $incrementalSyncEntryFilter, array $entryAttributesToFetch = [], $detectDeleted = true, $pollerName = 'default') { - $this->synchronizer = $synchronizer; - $this->ldap = $ldap; - $this->entityManager = $em; - $this->detectDeleted = $detectDeleted; - $this->name = $pollerName; - $this->entryAttributesToFetch = $entryAttributesToFetch; - - if (is_string($entryFilter)) { - $this->baseEntryFilter = new Filter\StringFilter($entryFilter); - } elseif ($entryFilter instanceof AbstractFilter) { - $this->baseEntryFilter = $entryFilter; - } else { - throw new InvalidArgumentException( - sprintf( - 'baseEntryFilter argument must be either instance of %s or string. %s given', - AbstractFilter::class, - gettype($entryFilter) - ) - ); - } + $this->synchronizer = $synchronizer; + $this->ldap = $ldap; + $this->entityManager = $em; + $this->fullSyncEntryFilter = $this->setupEntryFilter($fullSyncEntryFilter); + $this->incrementalSyncEntryFilter = $this->setupEntryFilter($incrementalSyncEntryFilter); + $this->entryAttributesToFetch = $entryAttributesToFetch; + $this->detectDeleted = $detectDeleted; + $this->name = $pollerName; } /** @@ -243,6 +240,34 @@ class Poller } /** + * Validates specified zend ldap filter and cast it to AbstractFilter implementation + * + * @param string|AbstractFilter $entryFilter zend ldap filter + * + * @return AbstractFilter + * + * @throws InvalidArgumentException in case of invalid filter + */ + private function setupEntryFilter($entryFilter) + { + if (is_string($entryFilter)) { + $filter = new Filter\StringFilter($entryFilter); + } elseif ($entryFilter instanceof AbstractFilter) { + $filter = $entryFilter; + } else { + throw new InvalidArgumentException( + sprintf( + 'baseEntryFilter argument must be either instance of %s or string. %s given', + AbstractFilter::class, + gettype($entryFilter) + ) + ); + } + + return $filter; + } + + /** * Returns true is full AD sync required and false otherwise * * @param bool $forceFullSync @@ -284,7 +309,7 @@ class Poller private function fullSync(PollTask $currentTask, $highestCommitedUSN) { $searchFilter = Filter::andFilter( - $this->baseEntryFilter, + $this->fullSyncEntryFilter, Filter::greaterOrEqual('uSNChanged', 0), Filter::lessOrEqual('uSNChanged', $highestCommitedUSN) ); @@ -308,7 +333,7 @@ class Poller { // fetch changed $changedFilter = Filter::andFilter( - $this->baseEntryFilter, + $this->incrementalSyncEntryFilter, Filter::greaterOrEqual('uSNChanged', $usnChangedStartFrom), Filter::lessOrEqual('uSNChanged', $highestCommitedUSN) );
add ability to specify different entry filters for inc and full sync in order to (for example) allow more lightweight ldap requests for full sync
GlobalTradingTechnologies_ad-poller
train
69848be141725d82eb916b8f86720658c6abea5b
diff --git a/stats/active_apps.go b/stats/active_apps.go index <HASH>..<HASH> 100644 --- a/stats/active_apps.go +++ b/stats/active_apps.go @@ -92,7 +92,7 @@ type ActiveApps struct { func NewActiveApps() *ActiveApps { x := &ActiveApps{} - x.t = time.NewTicker(1 * time.Minute) + x.t = time.NewTicker(ActiveAppsTrimInterval) x.m = make(map[string]*activeAppsEntry) x.i.Init()
use pre-defined const value `ActiveAppsTrimInterval` for apps trim interval
cloudfoundry_gorouter
train
0601f133cbb4c4398b8e2eb3efa394fdd7e09941
diff --git a/web/ui/gulpfile.js b/web/ui/gulpfile.js index <HASH>..<HASH> 100644 --- a/web/ui/gulpfile.js +++ b/web/ui/gulpfile.js @@ -128,7 +128,7 @@ gulp.task("debug3rdparty", function(){ gulp.task("concat", function(){ return gulp.src(controlplaneFiles) .pipe(sourcemaps.init()) - .pipe(to5(to5Config)) + // .pipe(to5(to5Config)) .pipe(concat("controlplane.js")) .pipe(sourcemaps.write("./", { sourceRoot: "src" })) .pipe(gulp.dest(paths.srcBuild)); diff --git a/web/ui/src/Services/ServiceDetailsController.js b/web/ui/src/Services/ServiceDetailsController.js index <HASH>..<HASH> 100644 --- a/web/ui/src/Services/ServiceDetailsController.js +++ b/web/ui/src/Services/ServiceDetailsController.js @@ -1087,8 +1087,7 @@ $scope.configTable = { sorting: { Filename: "asc" - }, - searchColumns: ['Filename'] + } }; $scope.instancesTable = { sorting: {
CC-<I> Search Widget Remove search from config file table.
control-center_serviced
train
620d2c84431c6e4ecde5f6964c10a02b7dce4f47
diff --git a/src/com/mebigfatguy/fbcontrib/detect/ClassEnvy.java b/src/com/mebigfatguy/fbcontrib/detect/ClassEnvy.java index <HASH>..<HASH> 100755 --- a/src/com/mebigfatguy/fbcontrib/detect/ClassEnvy.java +++ b/src/com/mebigfatguy/fbcontrib/detect/ClassEnvy.java @@ -319,6 +319,7 @@ public class ClassEnvy extends BytecodeScanningDetector { * * @return whether or not the class is general purpose */ + @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = "EXS_EXCEPTION_SOFTENING_RETURN_FALSE", justification = "No other simple way to determine whether class exists") private boolean generalPurpose(final String className) { if (className.startsWith("java.") || className.startsWith("javax.")) { diff --git a/src/com/mebigfatguy/fbcontrib/detect/CollectionNamingConfusion.java b/src/com/mebigfatguy/fbcontrib/detect/CollectionNamingConfusion.java index <HASH>..<HASH> 100644 --- a/src/com/mebigfatguy/fbcontrib/detect/CollectionNamingConfusion.java +++ b/src/com/mebigfatguy/fbcontrib/detect/CollectionNamingConfusion.java @@ -1,17 +1,17 @@ /* * fb-contrib - Auxiliary detectors for Java programs * Copyright (C) 2005-2016 Dave Brosius - * + * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. - * + * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. - * + * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA @@ -99,13 +99,14 @@ public class CollectionNamingConfusion extends PreorderVisitor implements Detect /** * looks for a name that mentions a collection type but the wrong type for * the variable - * + * * @param name * the variable name * @param signature * the variable signature * @return whether the name doesn't match the type */ + @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = "EXS_EXCEPTION_SOFTENING_RETURN_FALSE", justification = "No other simple way to determine whether class exists") private boolean checkConfusedName(String name, String signature) { try { name = name.toLowerCase();
ignore exception softening when there is no other way to do the job, #<I>
mebigfatguy_fb-contrib
train
1f5f3262497c6ef18ad2f8d6c8a03f00aac395e1
diff --git a/bin/sdk-cli.js b/bin/sdk-cli.js index <HASH>..<HASH> 100755 --- a/bin/sdk-cli.js +++ b/bin/sdk-cli.js @@ -47,7 +47,6 @@ yargs 'view-script': { description: 'Entry for rendered-page-side JavaScript file', type: 'string', - required: false, coerce: value => path.resolve( __dirname, '../', value ), requiresArg: true, },
SDK: Cleanup unnecessary required: false in yargs options (#<I>)
Automattic_wp-calypso
train
1672f1e21a450b715ef70eba42ac22c5de87581f
diff --git a/clients/web/test/spec/folderSpec.js b/clients/web/test/spec/folderSpec.js index <HASH>..<HASH> 100644 --- a/clients/web/test/spec/folderSpec.js +++ b/clients/web/test/spec/folderSpec.js @@ -148,6 +148,8 @@ describe('Test folder creation, editing, and deletion', function () { return Backbone.history.fragment.search('folder') > -1; }, 'the url state to change'); + girderTest.waitForLoad(); + runs(function () { var privateFolderFragment = Backbone.history.fragment; girderTest.anonymousLoadPage(true, privateFolderFragment, true, girderTest.login('admin', 'Admin', 'Admin', 'adminpassword!'));
Attempt to fix race condition in web_client_folder test This is the one that's failing the most often
girder_girder
train
a4decd899c53d217b57f971b528100840c426cd1
diff --git a/pixiedust/display/chart/renderers/mapbox/mapBoxMapDisplay.py b/pixiedust/display/chart/renderers/mapbox/mapBoxMapDisplay.py index <HASH>..<HASH> 100644 --- a/pixiedust/display/chart/renderers/mapbox/mapBoxMapDisplay.py +++ b/pixiedust/display/chart/renderers/mapbox/mapBoxMapDisplay.py @@ -131,8 +131,12 @@ class MapViewDisplay(MapBoxBaseDisplay): # handle custom layers userlayers = [] - for key in ShellAccess: - v = ShellAccess[key] + l = (ShellAccess,ShellAccess) + papp = self.options.get("nostore_pixieapp") + if papp is not None and ShellAccess[papp] is not None: + l = (ShellAccess[papp], dir(ShellAccess[papp])) + for key in [a for a in l[1] if not callable(getattr(l[0], a)) and not a.startswith("_")]: + v = getattr(l[0],key) if isinstance(v, dict) and "maptype" in v and v["maptype"].lower() == "mapbox" and "source" in v and "type" in v["source"] and v["source"]["type"] == "geojson" and "id" in v and "data" in v["source"]: gj = geojson.loads(json.dumps(v["source"]["data"])) isvalid = geojson.is_valid(gj) diff --git a/pixiedust/display/templates/pixiedust.js b/pixiedust/display/templates/pixiedust.js index <HASH>..<HASH> 100644 --- a/pixiedust/display/templates/pixiedust.js +++ b/pixiedust/display/templates/pixiedust.js @@ -206,7 +206,10 @@ function addOptions(command, options, override=true){ return command; } -function readExecInfo(pd_controls, element){ +function readExecInfo(pd_controls, element, searchParents){ + if (searchParents === null || searchParents === undefined ){ + searchParents = true; + } {#special case pd_refresh points to another element #} var refreshTarget = element.getAttribute("pd_refresh"); if (refreshTarget){ @@ -317,6 +320,11 @@ function readExecInfo(pd_controls, element){ execInfo.entity = element.hasAttribute("pd_entity") ? element.getAttribute("pd_entity") || "pixieapp_entity" : null; function applyEntity(c, e, doptions){ + {#add pixieapp info #} + var match = c.match(/display\((\w*),/); + if (match){ + doptions.nostore_pixieapp = match[1]; + } if (!e){ return addOptions(c, doptions); } @@ -359,6 +367,9 @@ function readExecInfo(pd_controls, element){ } if (!hasOptions && !execInfo.targetDivId && !execInfo.script){ + if (!searchParents){ + return null; + } return element.hasAttribute("pixiedust")?null:readExecInfo(pd_controls, element.parentElement); } @@ -390,7 +401,7 @@ function readExecInfo(pd_controls, element){ return execInfo; } -function runElement(element){ +function runElement(element, searchParents){ var pd_controls = element.getAttribute("pixiedust"); if (!pd_controls){ $(element).parents("[pixiedust]").each(function(){ @@ -401,11 +412,11 @@ function runElement(element){ if (pd_controls){ pd_controls = JSON.parse(pd_controls); {#read the current element#} - execQueue.push( readExecInfo(pd_controls, element) ); + execQueue.push( readExecInfo(pd_controls, element, searchParents) ); {#get other execution targets if any#} $(element).children("target[pd_target]").each(function(){ - execQueue.push( readExecInfo(pd_controls, this)) + execQueue.push( readExecInfo(pd_controls, this, searchParents)) }); } return execQueue; @@ -471,7 +482,7 @@ $(document).on("pd_event", function(event, eventInfo){ }else if ( eventInfo.type == "pd_load" && eventInfo.targetNode){ var execQueue = [] eventInfo.targetNode.find("div").each(function(){ - thisQueue = runElement(this); + thisQueue = runElement(this, false); var loadingDiv = this; $.each( thisQueue, function(index, value){ if (value){
Read custom map box layers from pixieapp when available
pixiedust_pixiedust
train
009eafb0221aab81261dc1d0da694b9c4792e28d
diff --git a/wicket-orientdb-demo/src/main/java/ru/ydn/wicket/wicketorientdb/WicketApplication.java b/wicket-orientdb-demo/src/main/java/ru/ydn/wicket/wicketorientdb/WicketApplication.java index <HASH>..<HASH> 100644 --- a/wicket-orientdb-demo/src/main/java/ru/ydn/wicket/wicketorientdb/WicketApplication.java +++ b/wicket-orientdb-demo/src/main/java/ru/ydn/wicket/wicketorientdb/WicketApplication.java @@ -42,8 +42,8 @@ public class WicketApplication extends OrientDbWebApplication }); getOrientDbSettings().setDBUrl("local:localhost/"+DB_NAME); - getOrientDbSettings().setDefaultUserName("admin"); - getOrientDbSettings().setDefaultUserPassword("admin"); + getOrientDbSettings().setDBUserName("admin"); + getOrientDbSettings().setDBUserPassword("admin"); } @Override
Fix problem in demo app with default username and password
OrienteerBAP_wicket-orientdb
train
cbec30d4dee8cdbe726e38291b30e6d9477b1612
diff --git a/mod/assign/locallib.php b/mod/assign/locallib.php index <HASH>..<HASH> 100644 --- a/mod/assign/locallib.php +++ b/mod/assign/locallib.php @@ -5417,8 +5417,12 @@ class assign { $user = $DB->get_record('user', array('id' => $submission->userid), '*', MUST_EXIST); $name = fullname($user); } else { - $group = $DB->get_record('groups', array('id' => $submission->groupid), '*', MUST_EXIST); - $name = $group->name; + $group = $this->get_submission_group($submission->userid); + if ($group) { + $name = $group->name; + } else { + $name = get_string('defaultteam', 'assign'); + } } $status = get_string('submissionstatus_' . $submission->status, 'assign'); $params = array('id'=>$submission->userid, 'fullname'=>$name, 'status'=>$status);
MDL-<I> Assign: Fix when submitting to a group assignment from the default group
moodle_moodle
train
1aa6228ebb0cae2d67150dffc5898e528ad58379
diff --git a/adminrestrict/test_settings.py b/adminrestrict/test_settings.py index <HASH>..<HASH> 100644 --- a/adminrestrict/test_settings.py +++ b/adminrestrict/test_settings.py @@ -42,13 +42,7 @@ MIDDLEWARE_CLASSES = ( 'adminrestrict.middleware.AdminPagesRestrictMiddleware' ) -if django.VERSION[0] >= 2: - MIDDLEWARE = ( - 'django.middleware.common.CommonMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'adminrestrict.middleware.AdminPagesRestrictMiddleware' - ) +MIDDLEWARE = MIDDLEWARE_CLASSES ROOT_URLCONF = 'adminrestrict.test_urls'
set MIDDLEWARE = MIDDLEWARE_CLASSES
robromano_django-adminrestrict
train
57acd59b456f5eb86d44816ef9c6821758688ea0
diff --git a/rstcheck/__init__.py b/rstcheck/__init__.py index <HASH>..<HASH> 100755 --- a/rstcheck/__init__.py +++ b/rstcheck/__init__.py @@ -23,8 +23,6 @@ # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Checks code blocks in reStructuredText.""" - - import argparse import configparser import contextlib diff --git a/tests/test_as_cli_tool.py b/tests/test_as_cli_tool.py index <HASH>..<HASH> 100644 --- a/tests/test_as_cli_tool.py +++ b/tests/test_as_cli_tool.py @@ -1,5 +1,4 @@ """Test suite for rstcheck as CLI tool.""" - import pathlib import subprocess # noqa: S404 import sys diff --git a/tests/test_rstcheck.py b/tests/test_rstcheck.py index <HASH>..<HASH> 100644 --- a/tests/test_rstcheck.py +++ b/tests/test_rstcheck.py @@ -1,5 +1,4 @@ """Test suite for rstcheck.""" - import typing import pytest
remove blank line between module docstr and imports
myint_rstcheck
train
378b3d44efc7ae222b53067d9d00a4a1a53d9707
diff --git a/spec/deprecation_spec.rb b/spec/deprecation_spec.rb index <HASH>..<HASH> 100644 --- a/spec/deprecation_spec.rb +++ b/spec/deprecation_spec.rb @@ -19,7 +19,7 @@ describe EY::Serverside do end it "preserves the old constants" do - names = %w[BundleInstaller CLI Deploy DeployBase Deploy::Configuration + names = %w[CLI Deploy DeployBase Deploy::Configuration DeployHook LockfileParser LoggedOutput Server Task Strategies Strategies::Git]
Don't mention BundleInstaller in deprecation warnings, it's gone now.
engineyard_engineyard-serverside
train
0f19abd3443e3732ee02fa1659589b77660ce609
diff --git a/lib/datasource/maven/util.js b/lib/datasource/maven/util.js index <HASH>..<HASH> 100644 --- a/lib/datasource/maven/util.js +++ b/lib/datasource/maven/util.js @@ -5,8 +5,13 @@ module.exports = { downloadHttpProtocol, }; +/** + * @param {import('url').URL | string} pkgUrl + */ function isMavenCentral(pkgUrl) { - return pkgUrl.host === 'central.maven.org'; + return ( + (typeof pkgUrl === 'string' ? pkgUrl : pkgUrl.host) === 'central.maven.org' + ); } function isTemporalError(err) {
fix(maven): pkgUrl can be string (#<I>) Indirect used by `downloadHttpProtocol` from sbt datasource
renovatebot_renovate
train
e1afddff09db927ab2256b68229a33ce18ddddc0
diff --git a/src/frontend/org/voltcore/zk/LeaderElector.java b/src/frontend/org/voltcore/zk/LeaderElector.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltcore/zk/LeaderElector.java +++ b/src/frontend/org/voltcore/zk/LeaderElector.java @@ -118,8 +118,8 @@ public class LeaderElector { protected void pProcess(WatchedEvent event) { try { if (!m_done.get()) { - if (es != null) { - es.submit(childrenEventHandler); + if (getExecutorService() != null) { + getExecutorService().submit(childrenEventHandler); } } } catch (RejectedExecutionException e) { @@ -127,15 +127,16 @@ public class LeaderElector { } }; - private final Watcher watcher = new Watcher() { + private final CancellableWatcher watcher = new CancellableWatcher(es) { @Override - public void process(WatchedEvent event) { + public void pProcess(WatchedEvent event) { try { if (!m_done.get()) { es.submit(electionEventHandler); } } catch (RejectedExecutionException e) {} } + }; public LeaderElector(ZooKeeper zk, String dir, String prefix, byte[] data, @@ -146,6 +147,7 @@ public class LeaderElector { this.data = data; this.cb = cb; es = CoreUtils.getCachedSingleThreadExecutor("Leader elector-" + dir, 15000); + watcher.setExecutorService(es); } /** diff --git a/src/frontend/org/voltcore/zk/ZKUtil.java b/src/frontend/org/voltcore/zk/ZKUtil.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltcore/zk/ZKUtil.java +++ b/src/frontend/org/voltcore/zk/ZKUtil.java @@ -472,7 +472,7 @@ public class ZKUtil { */ public static abstract class CancellableWatcher implements Watcher { volatile boolean canceled = false; - final ExecutorService es; + private ExecutorService es; public CancellableWatcher(ExecutorService es) { this.es = es; @@ -484,7 +484,7 @@ public class ZKUtil { @Override public void process(final WatchedEvent event) { - es.execute(new Runnable() { + getExecutorService().execute(new Runnable() { @Override public void run() { if (canceled) return; @@ -494,5 +494,19 @@ public class ZKUtil { } abstract protected void pProcess(final WatchedEvent event); + + /** + * @return the es + */ + public ExecutorService getExecutorService() { + return es; + } + + /** + * @param es the es to set + */ + public void setExecutorService(ExecutorService es) { + this.es = es; + } } }
Use Cancallable watcher for election event handler as well.
VoltDB_voltdb
train
9e3e35ac727fdc2703afe3a6e1b4421a591e1504
diff --git a/cluster/image.go b/cluster/image.go index <HASH>..<HASH> 100644 --- a/cluster/image.go +++ b/cluster/image.go @@ -1,10 +1,11 @@ -// Copyright 2015 docker-cluster authors. All rights reserved. +// Copyright 2016 docker-cluster authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package cluster import ( + "bytes" "errors" "fmt" "net" @@ -13,6 +14,7 @@ import ( "sync" "github.com/fsouza/go-dockerclient" + "github.com/tsuru/tsuru/provision/docker/fix" ) type ImageHistory struct { @@ -98,10 +100,39 @@ func (c *Cluster) RemoveFromRegistry(imageId string) error { } request.Close = true rsp, err := timeout10Client.Do(request) - if err == nil { - rsp.Body.Close() + if err != nil { + return err } - return err + rsp.Body.Close() + if rsp.StatusCode == 404 { + var w bytes.Buffer + pullOpts := docker.PullImageOptions{ + Registry: registryServer, + Tag: imageTag, + Repository: imageId, + OutputStream: &w, + } + pullErr := c.PullImage(pullOpts, docker.AuthConfiguration{}) + if pullErr != nil { + return pullErr + } + digest, pullErr := fix.GetImageDigest(w.String()) + if pullErr != nil { + return pullErr + } + url := fmt.Sprintf("http://%s/v2/%s/manifests/%s", registryServer, imageTag, digest) + request, err = http.NewRequest("DELETE", url, nil) + if err != nil { + return err + } + request.Close = true + rsp, err := timeout10Client.Do(request) + if err == nil { + rsp.Body.Close() + } + return err + } + return nil } // PullImage pulls an image from a remote registry server, returning an error diff --git a/cluster/image_test.go b/cluster/image_test.go index <HASH>..<HASH> 100644 --- a/cluster/image_test.go +++ b/cluster/image_test.go @@ -1,4 +1,4 @@ -// Copyright 2014 docker-cluster authors. All rights reserved. +// Copyright 2016 docker-cluster authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -11,6 +11,7 @@ import ( "reflect" "regexp" "sort" + "strings" "testing" "github.com/fsouza/go-dockerclient" @@ -700,3 +701,73 @@ func TestInspectImageNotFound(t *testing.T) { t.Fatalf("Expected no such image error, got: %#v", err) } } + +func TestRemoveImageFromRegistry(t *testing.T) { + var called bool + server1 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/v1/repositories/test/test/" { + called = true + } + })) + defer server1.Close() + hostPort := strings.TrimPrefix(server1.URL, "http://") + name := hostPort + "/test/test" + stor := &MapStorage{} + err := stor.StoreImage(name, "id1", server1.URL) + if err != nil { + t.Fatal(err) + } + cluster, err := New(nil, stor, + Node{Address: server1.URL}, + ) + if err != nil { + t.Fatal(err) + } + err = cluster.RemoveFromRegistry(name) + if err != nil { + t.Fatal(err) + } + if !called { + t.Fatal("Should call registry v1, but don't") + } +} + +func TestRemoveImageFromRegistryV2(t *testing.T) { + var called bool + server1, err := dtesting.NewServer("127.0.0.1:0", nil, nil) + if err != nil { + t.Fatal(err) + } + defer server1.Stop() + server1.CustomHandler("/v2/test/test/manifests/sha256:digest", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + called = true + })) + server1.CustomHandler("/v1/repositories/test/test/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + called = false + })) + server1.CustomHandler("/images/create", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte("Digest: sha256:digest")) + server1.DefaultHandler().ServeHTTP(w, r) + })) + hostPort := strings.TrimPrefix(server1.URL(), "http://") + name := hostPort + "test/test" + stor := &MapStorage{} + err = stor.StoreImage(name, "id1", server1.URL()) + if err != nil { + t.Fatal(err) + } + cluster, err := New(nil, stor, + Node{Address: server1.URL()}, + ) + if err != nil { + t.Fatal(err) + } + err = cluster.RemoveFromRegistry(name) + if err != nil { + t.Fatal(err) + } + if !called { + t.Fatal("Should call registry v2, but don't") + } +}
cluster/image: remove image from registry v2. Related to tsuru/tsuru#<I>
tsuru_docker-cluster
train
bf4463d84090ba1e0303db8336dd47ee91ddc2f8
diff --git a/examples/ex5.rb b/examples/ex5.rb index <HASH>..<HASH> 100644 --- a/examples/ex5.rb +++ b/examples/ex5.rb @@ -1,5 +1,5 @@ (wav = WavIn.new("ex1.wav")) >> WavOut.new("ex5.wav") >> blackhole -play 0.5.seconds # silence +wav.stop; play 0.5.seconds # silence wav.play; play 1.second # play first second (r = Ramp.new(1.0, 2.0, 1.minute)) >> blackhole
Updated example to reflect API change: WavIn starts unpaused
alltom_ruck
train
f69623ed427ad4406461db9cdb4ada15a1541f0e
diff --git a/checkers/misc.py b/checkers/misc.py index <HASH>..<HASH> 100644 --- a/checkers/misc.py +++ b/checkers/misc.py @@ -57,13 +57,13 @@ separated by a comma.' def _check_note(self, notes, lineno, line): match = notes.search(line) if match: - self.add_message('W0511', args=line[match.start():-1], line=lineno) + self.add_message('fixme', args=line[match.start():-1], line=lineno) def _check_encoding(self, lineno, line, file_encoding): try: return unicode(line, file_encoding) except UnicodeDecodeError, ex: - self.add_message('W0512', line=lineno, + self.add_message('invalid-encoded-data', line=lineno, args=(file_encoding, ex.args[2])) def process_module(self, module): diff --git a/test/test_misc.py b/test/test_misc.py index <HASH>..<HASH> 100644 --- a/test/test_misc.py +++ b/test/test_misc.py @@ -55,7 +55,7 @@ class FixmeTest(CheckerTestCase): """a = 1 # FIXME """) as module: with self.assertAddsMessages( - Message(msg_id='W0511', line=2, args=u'FIXME')): + Message(msg_id='fixme', line=2, args=u'FIXME')): self.checker.process_module(module) @set_config(notes=[])
Only emit symbolic warnings from the misc checker.
PyCQA_pylint
train
c0687cd5fa118d807f1a7593abc5abbdfbe764f4
diff --git a/src/Symfony/Component/Routing/RouteCollection.php b/src/Symfony/Component/Routing/RouteCollection.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Routing/RouteCollection.php +++ b/src/Symfony/Component/Routing/RouteCollection.php @@ -163,7 +163,10 @@ class RouteCollection implements \IteratorAggregate, \Countable public function remove($name) { // just for BC - $root = $this->getRoot(); + $root = $this; + while ($root->parent) { + $root = $root->parent; + } foreach ((array) $name as $n) { unset($root->routes[$n]);
remove() should not use deprecated getParent() so it does not trigger deprecation internally
symfony_symfony
train