hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
2a11be26723f14a2098bedea3a3bb66bceff4d61
diff --git a/delocate/tests/test_tools.py b/delocate/tests/test_tools.py index <HASH>..<HASH> 100644 --- a/delocate/tests/test_tools.py +++ b/delocate/tests/test_tools.py @@ -5,7 +5,8 @@ import os from os.path import join as pjoin, split as psplit, abspath, dirname import shutil -from ..tools import back_tick, ensure_writable, zip2dir, dir2zip +from ..tools import (back_tick, ensure_writable, zip2dir, dir2zip, + find_package_dirs) from ..tmpdirs import InTemporaryDirectory @@ -66,3 +67,23 @@ def test_zip2(): shutil.rmtree('a_dir') zip2dir('another.ext', 'third_dir') assert_equal(os.listdir('third_dir'), ['file2.txt']) + + +def test_find_package_dirs(): + # Test utility for finding package directories + with InTemporaryDirectory(): + os.mkdir('to_test') + a_dir = pjoin('to_test', 'a_dir') + b_dir = pjoin('to_test', 'b_dir') + c_dir = pjoin('to_test', 'c_dir') + for dir in (a_dir, b_dir, c_dir): + os.mkdir(dir) + assert_equal(find_package_dirs('to_test'), set([])) + _write_file(pjoin(a_dir, '__init__.py'), "# a package") + assert_equal(find_package_dirs('to_test'), set([a_dir])) + _write_file(pjoin(c_dir, '__init__.py'), "# another package") + assert_equal(find_package_dirs('to_test'), set([a_dir, c_dir])) + # Not recursive + assert_equal(find_package_dirs('.'), set()) + _write_file(pjoin('to_test', '__init__.py'), "# base package") + assert_equal(find_package_dirs('.'), set([pjoin('.', 'to_test')])) diff --git a/delocate/tools.py b/delocate/tools.py index <HASH>..<HASH> 100644 --- a/delocate/tools.py +++ b/delocate/tools.py @@ -3,7 +3,7 @@ from subprocess import Popen, PIPE import os -from os.path import join as pjoin, relpath +from os.path import join as pjoin, relpath, isdir, exists import zipfile import re import stat @@ -305,6 +305,27 @@ def dir2zip(in_dir, zip_fname): z.close() +def find_package_dirs(root_path): + """ Find python package directories in directory `root_path` + + Parameters + ---------- + root_path : str + Directory to search for package subdirectories + + Returns + ------- + package_sdirs : set + Set of strings where each is a subdirectory of `root_path`, containing + an ``__init__.py`` file. Paths prefixed by `root_path` + """ + package_sdirs = set() + for entry in os.listdir(root_path): + fname = pjoin(root_path, entry) + if isdir(fname) and exists(pjoin(fname, '__init__.py')): + package_sdirs.add(fname) + return package_sdirs + def tree_libs(start_path, filt_func = None): """ Collect unique install names for directory tree `start_path`
RF+TST: add utility to find package directories Find package sub-directories in a directory.
matthew-brett_delocate
train
d42186cc9e27b84239d52698c5ccf03a3aaf36d6
diff --git a/core.js b/core.js index <HASH>..<HASH> 100644 --- a/core.js +++ b/core.js @@ -3,7 +3,6 @@ const {EventEmitter} = events; const path = require('path'); const fs = require('fs'); const url = require('url'); -const child_process = require('child_process'); const os = require('os'); const util = require('util'); const {URL} = url;
Remove dead child_process require from core.js
exokitxr_exokit
train
6670fa986f16a2c0fab38380ae133625d039562a
diff --git a/tests/framework/widgets/MenuTest.php b/tests/framework/widgets/MenuTest.php index <HASH>..<HASH> 100644 --- a/tests/framework/widgets/MenuTest.php +++ b/tests/framework/widgets/MenuTest.php @@ -35,11 +35,11 @@ class MenuTest extends \yiiunit\TestCase ] ]); - $this->assertEqualsWithoutLE(<<<HTML + $expected = <<<HTML <ul><li><a href="#"><span class="glyphicon glyphicon-user"></span> Users</a></li> <li><a href="#">Authors &amp; Publications</a></li></ul> -HTML - , $output); +HTML; + $this->assertEqualsWithoutLE($expected, $output); $output = Menu::widget([ 'route' => 'test/test', @@ -59,12 +59,11 @@ HTML ] ]); - $this->assertEqualsWithoutLE(<<<HTML + $expected = <<<HTML <ul><li><a href="#"><span class="glyphicon glyphicon-user"></span> Users</a></li> <li><a href="#">Authors &amp; Publications</a></li></ul> -HTML - , $output); - +HTML; + $this->assertEqualsWithoutLE($expected, $output); } /** @@ -93,11 +92,11 @@ HTML ] ]); - $this->assertEqualsWithoutLE(<<<HTML + $expected = <<<HTML <div><a href="#">item1</a></div> <a href="#">item2</a> -HTML - , $output); +HTML; + $this->assertEqualsWithoutLE($expected, $output); $output = Menu::widget([ 'route' => 'test/test', @@ -119,11 +118,44 @@ HTML 'itemOptions' => ['tag' => false] ]); - $this->assertEqualsWithoutLE(<<<HTML + $expected = <<<HTML <a href="#">item1</a> <a href="#">item2</a> -HTML - , $output); +HTML; + + $this->assertEqualsWithoutLE($expected, $output); + } + + public function testItemTemplate() + { + $output = Menu::widget([ + 'route' => 'test/test', + 'params' => [], + 'linkTemplate' => '', + 'labelTemplate' => '', + 'items' => [ + [ + 'label' => 'item1', + 'url' => '#', + 'template' => 'label: {label}; url: {url}' + ], + [ + 'label' => 'item2', + 'template' => 'label: {label}' + ], + [ + 'label' => 'item3 (no template)', + ], + ] + ]); + + $expected = <<<HTML +<ul><li>label: item1; url: #</li> +<li>label: item2</li> +<li></li></ul> +HTML; + + $this->assertEqualsWithoutLE($expected, $output); }
Updated MenuTest. Verified that item template override works correctly Closes #<I>
yiisoft_yii2
train
b8349101ece785d7e72a2fc85a7036de74939384
diff --git a/api/server.go b/api/server.go index <HASH>..<HASH> 100644 --- a/api/server.go +++ b/api/server.go @@ -27,7 +27,7 @@ import ( "gopkg.in/tylerb/graceful.v1" ) -const Version = "0.13.0-rc2" +const Version = "0.13.0-rc3" func getProvisioner() (string, error) { provisioner, err := config.GetString("provisioner")
api/server: bump to <I>-rc3
tsuru_tsuru
train
a7b08cced302e7384e8575a9108c4efe21865d27
diff --git a/autopep8.py b/autopep8.py index <HASH>..<HASH> 100755 --- a/autopep8.py +++ b/autopep8.py @@ -3359,9 +3359,14 @@ def apply_config_defaults(parser, arguments): def global_config_arg(arguments): """Get --global-config arg from arguments. """ - for arg in arguments: - if arg.startswith('--global-config'): - config_file = arg[16:] + for i, arg in enumerate(arguments): + if arg.startswith('--g'): + if '=' in arg: + config_file = arg.split('=', 1)[1] + elif i + 1 < len(arguments): + config_file = arguments[i + 1] + else: + config_file = '' return os.path.expanduser(config_file) diff --git a/test/test_autopep8.py b/test/test_autopep8.py index <HASH>..<HASH> 100755 --- a/test/test_autopep8.py +++ b/test/test_autopep8.py @@ -4565,8 +4565,36 @@ class ParseArgsTests(unittest.TestCase): def test_config_false_with_local(self): args = autopep8.parse_args(['*.py', '--global-config=False'], apply_config=True) + self.assertEqual(args.global_config, 'False') self.assertEqual(args.indent_size, 2) + def test_config_false_with_local_space(self): + args = autopep8.parse_args(['*.py', '--global-config', 'False'], + apply_config=True) + self.assertEqual(args.global_config, 'False') + self.assertEqual(args.indent_size, 2) + + def test_config_false_with_local_autocomplete(self): + args = autopep8.parse_args(['*.py', '--g', 'False'], + apply_config=True) + self.assertEqual(args.global_config, 'False') + self.assertEqual(args.indent_size, 2) + + def test_global_config_arg(self): + args = ['*.py', '--global-config=False'] + config_file = autopep8.global_config_arg(args) + self.assertEqual(config_file, 'False') + + def test_global_config_arg_space(self): + args = ['*.py', '--global-config', 'False'] + config_file = autopep8.global_config_arg(args) + self.assertEqual(config_file, 'False') + + def test_global_config_arg_autocomplete(self): + args = ['*.py', '--g', 'False'] + config_file = autopep8.global_config_arg(args) + self.assertEqual(config_file, 'False') + def test_config_false_without_local(self): os.remove(self.LOCAL_CONFIG) args = autopep8.parse_args(['*.py', '--global-config=False'],
FIX autocomplete for --global-config arg and with space sep
hhatto_autopep8
train
5b4666cd22c94405cc39c9f8d9a99608c6304d5f
diff --git a/src/Exceptions/ExceptionHandler.php b/src/Exceptions/ExceptionHandler.php index <HASH>..<HASH> 100644 --- a/src/Exceptions/ExceptionHandler.php +++ b/src/Exceptions/ExceptionHandler.php @@ -25,7 +25,7 @@ class ExceptionHandler implements ExceptionHandlerInterface */ public function handleException($e, BotMan $bot) { - $exceptions = $this->exceptions->where('exception', class_basename($e)); + $exceptions = $this->exceptions->where('exception', (new \ReflectionClass($e))->getShortName()); $exceptions->each(function ($handler) use ($e, $bot) { call_user_func_array($handler['closure'], [$e, $bot]);
Do not user Laravel class_basename function in ExceptionHandler (#<I>)
botman_botman
train
bd32b8a98bd53883e1a9d4d286c7db9b45246095
diff --git a/ibis/expr/window.py b/ibis/expr/window.py index <HASH>..<HASH> 100644 --- a/ibis/expr/window.py +++ b/ibis/expr/window.py @@ -298,7 +298,15 @@ class Window: def rows_with_max_lookback(rows, max_lookback): - """Create a bound preceding value for use with trailing window functions""" + """Create a bound preceding value for use with trailing window functions + + Notes + ----- + This function is exposed for use by external clients, but Ibis itself does + not currently do anything with the max_lookback parameter in any of its + backends. + + """ return RowsWithMaxLookback(rows, max_lookback) diff --git a/ibis/pandas/execution/tests/test_window.py b/ibis/pandas/execution/tests/test_window.py index <HASH>..<HASH> 100644 --- a/ibis/pandas/execution/tests/test_window.py +++ b/ibis/pandas/execution/tests/test_window.py @@ -466,8 +466,10 @@ def test_window_with_mlb(): ibis.trailing_window(rows_with_mlb, order_by='time') ) ) - with pytest.raises(NotImplementedError): - expr.execute() + result = expr.execute() + expected = df + expected['sum'] = expected.a.rolling(5, min_periods=1).sum() + tm.assert_frame_equal(result, expected) rows_with_mlb = rows_with_max_lookback(5, 10) with pytest.raises(com.IbisInputError): diff --git a/ibis/pandas/execution/window.py b/ibis/pandas/execution/window.py index <HASH>..<HASH> 100644 --- a/ibis/pandas/execution/window.py +++ b/ibis/pandas/execution/window.py @@ -90,10 +90,6 @@ def execute_window_op( **kwargs, ) - if window.max_lookback is not None: - raise NotImplementedError('Rows with max lookback is not implemented ' - 'for pandas backend.') - following = window.following order_by = window._order_by
Remove pandas exception for rows_with_max_lookback Without this exception external clients will be able to use rows_with_max_lookback with pandas
ibis-project_ibis
train
9263766be23c67bb82f51775847ba2b868970c79
diff --git a/source/rafcon/gui/start.py b/source/rafcon/gui/start.py index <HASH>..<HASH> 100755 --- a/source/rafcon/gui/start.py +++ b/source/rafcon/gui/start.py @@ -35,7 +35,6 @@ from rafcon.gui.controllers.main_window import MainWindowController from rafcon.gui.views.main_window import MainWindowView from rafcon.gui.runtime_config import global_runtime_config import rafcon.gui.models.auto_backup -from rafcon.gui.utils import wait_for_gui from rafcon.gui.utils.splash_screen import SplashScreen from rafcon.gui.helpers import installation import rafcon.gui.backup.session as backup_session @@ -50,6 +49,7 @@ from rafcon.core.execution.execution_status import StateMachineExecutionStatus from rafcon.core.config import global_config # utils +from rafcon.gui.utils import wait_for_gui import rafcon.utils.filesystem as filesystem from rafcon.utils import profiler from rafcon.utils import plugins @@ -193,8 +193,7 @@ def stop_gtk(): glib.idle_add(gtk.main_quit) # Run the GTK loop until no more events are being generated and thus the GUI is fully destroyed - while gtk.events_pending(): - gtk.main_iteration(False) + wait_for_gui() def post_gui_destruction(): @@ -299,8 +298,7 @@ def main(): splash_screen.set_text("Loading GUI...") setup_gui() - while gtk.events_pending(): - gtk.main_iteration(False) + wait_for_gui() post_setup_plugins(user_input)
Make use of wait_for_gui Shorten code by making use of new function wait_for_gui
DLR-RM_RAFCON
train
58387dce69d20cb91e9449203dde5cbe9f85ba33
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -11,8 +11,7 @@ setup( author='Stephen Holsapple', author_email='[email protected]', url='http://www.google.com', - package_dir={'py509': ''}, - packages=['py509'], + packages=['py509', 'py509.asn1'], install_requires=[ 'certifi', 'cryptography',
Nope, I'm just wrong and need to specify manually.
sholsapp_py509
train
d814276910c21f7ff20120c5ca425ab04ed78549
diff --git a/src/js/BottomNavigations/__tests__/BottomNav.js b/src/js/BottomNavigations/__tests__/BottomNav.js index <HASH>..<HASH> 100644 --- a/src/js/BottomNavigations/__tests__/BottomNav.js +++ b/src/js/BottomNavigations/__tests__/BottomNav.js @@ -24,6 +24,7 @@ describe('BottomNav', () => { active={false} onNavChange={jest.fn()} fixed={false} + index={0} /> ); @@ -32,13 +33,14 @@ describe('BottomNav', () => { expect(bottomNavNode.classList.contains(className)).toBe(true); }); - it('passes the label and click event to the onClick and onNavChange props', () => { + it('passes the index and click event to the onClick and onNavChange props', () => { const props = { onNavChange: jest.fn(), onClick: jest.fn(), label: 'a', fixed: false, active: false, + index: 0, }; const nav = renderIntoDocument(<BottomNav {...props} />); @@ -47,14 +49,14 @@ describe('BottomNav', () => { Simulate.click(navNode); expect(props.onNavChange.mock.calls.length).toBe(1); - expect(props.onNavChange.mock.calls[0][0]).toBe(props.label); + expect(props.onNavChange.mock.calls[0][0]).toBe(props.index); expect(props.onClick.mock.calls.length).toBe(1); - expect(props.onClick.mock.calls[0][0]).toBe(props.label); + expect(props.onClick.mock.calls[0][0]).toBe(props.index); }); it('renders as a button by default', () => { - const nav = renderIntoDocument(<BottomNav label="A" active={false} fixed={false} onNavChange={jest.fn()} />); + const nav = renderIntoDocument(<BottomNav label="A" active={false} fixed={false} onNavChange={jest.fn()} index={0} />); const btns = scryRenderedDOMComponentsWithTag(nav, 'button'); expect(btns.length).toBe(1); }); @@ -75,6 +77,7 @@ describe('BottomNav', () => { fixed: false, component: Link, onNavChange: jest.fn(), + index: 0, }; const nav = renderIntoDocument(<BottomNav {...props} />); @@ -108,6 +111,7 @@ describe('BottomNav', () => { fixed: false, label: 'A', onNavChange: jest.fn(), + index: 0, }; const nav = renderIntoDocument(<BottomNav {...props} />); @@ -148,6 +152,7 @@ describe('BottomNav', () => { const props = { label: 'Hello, World!', onNavChange: jest.fn(), + index: 0, }; let nav = renderIntoDocument(<BottomNav {...props} active={false} fixed={false} />); diff --git a/src/js/BottomNavigations/__tests__/BottomNavigation.js b/src/js/BottomNavigations/__tests__/BottomNavigation.js index <HASH>..<HASH> 100644 --- a/src/js/BottomNavigations/__tests__/BottomNavigation.js +++ b/src/js/BottomNavigations/__tests__/BottomNavigation.js @@ -49,13 +49,14 @@ describe('BottomNavigation', () => { label: 'C', }], onChange: jest.fn(), + activeIndex: 0, }; const nav = renderIntoDocument(<BottomNavigation {...props} />); - nav.handleNavChange('B'); + nav.handleNavChange(2); expect(props.onChange.mock.calls.length).toBe(1); - expect(props.onChange.mock.calls[0][0]).toBe('B'); + expect(props.onChange.mock.calls[0][0]).toBe(2); }); it('generates a BottomNav component for each action', () => {
Forgot to update tests after index changes for BottomNav.. Whoops
mlaursen_react-md
train
c2f675ad960679a9913533d0730d167c2339f9a8
diff --git a/dateparser/freshness_date_parser.py b/dateparser/freshness_date_parser.py index <HASH>..<HASH> 100644 --- a/dateparser/freshness_date_parser.py +++ b/dateparser/freshness_date_parser.py @@ -126,11 +126,11 @@ class FreshnessDateDataParser(object): break td = relativedelta(**kwargs) - if re.search(r'\bin\b', date_string): - date = self.now + td - elif re.search(r'\bago\b', date_string): - date = self.now - td - elif 'future' in prefer_dates_from: + if ( + re.search(r'\bin\b', date_string) or + ('future' in prefer_dates_from and + not re.search(r'\bago\b', date_string)) + ): date = self.now + td else: date = self.now - td
Checks didn't like that. Let's try this way
scrapinghub_dateparser
train
731b66fe6dc19525a98ce98641ce51d758e12ad0
diff --git a/routes/api/jobs.js b/routes/api/jobs.js index <HASH>..<HASH> 100644 --- a/routes/api/jobs.js +++ b/routes/api/jobs.js @@ -67,9 +67,13 @@ exports.jobs_start = function(req, res) { res.statusCode = 400; return res.end("you must configure " + url + " before you can start a job for it"); } - var repo_metadata = _.find(origin_user_obj.github_metadata[origin_user_obj.github.id].repos, function(item) { - return repo_config.url == item.html_url.toLowerCase(); - }); + var repo_metadata = null; + // We don't have github metadata unless we have a linked github account. + if (origin_user_obj.github.id) { + repo_metadata = _.find(origin_user_obj.github_metadata[origin_user_obj.github.id].repos, function(item) { + return repo_config.url == item.html_url.toLowerCase(); + }); + } var repo_ssh_url; // If we have Github metadata, use that. It is loosely coupled and can self-heal things like // a configured Github Repo being renamed in Github (such as happened with Klingsbo)
detect manual setup case (no github_metadata defined) and support it. closes #<I>.
Strider-CD_strider
train
f412b842dd9cf943cdd9a5ce2b2b6e66a42c0a98
diff --git a/src/LeagueWrap/Api/Champion.php b/src/LeagueWrap/Api/Champion.php index <HASH>..<HASH> 100644 --- a/src/LeagueWrap/Api/Champion.php +++ b/src/LeagueWrap/Api/Champion.php @@ -84,7 +84,9 @@ class Champion extends AbstractApi { */ public function free() { - $this->free = 'true'; - return $this->all(); + $this->free = 'true'; + $championList = $this->all(); + $this->free = 'false'; + return $championList; } } diff --git a/src/LeagueWrap/Client.php b/src/LeagueWrap/Client.php index <HASH>..<HASH> 100644 --- a/src/LeagueWrap/Client.php +++ b/src/LeagueWrap/Client.php @@ -20,7 +20,7 @@ class Client implements ClientInterface { $this->guzzle = new Guzzle([ 'base_url' => $url, 'defaults' => ['headers' => ['Accept-Encoding' => 'gzip,deflate']] - ]); + ]); } /** diff --git a/tests/Api/ChampionTest.php b/tests/Api/ChampionTest.php index <HASH>..<HASH> 100644 --- a/tests/Api/ChampionTest.php +++ b/tests/Api/ChampionTest.php @@ -52,6 +52,28 @@ class ApiChampionTest extends PHPUnit_Framework_TestCase { $this->assertTrue($champions[53] instanceof LeagueWrap\Dto\Champion); } + public function testFreeWillNotBeStoredPermanently() + { + $this->client->shouldReceive('baseUrl') + ->twice(); + $this->client->shouldReceive('request') + ->with('na/v1.2/champion', [ + 'freeToPlay' => 'true', + 'api_key' => 'key', + ])->once() + ->andReturn(file_get_contents('tests/Json/champion.free.json')); + $this->client->shouldReceive('request') + ->with('na/v1.2/champion', [ + 'freeToPlay' => 'false', + 'api_key' => 'key', + ])->once() + ->andReturn(file_get_contents('tests/Json/champion.json')); + + $api = new Api('key', $this->client); + $champion = $api->champion(); + $this->assertNotEquals($champion->free(), $champion->all()); + } + public function testAllIterator() { $this->client->shouldReceive('baseUrl')
Champion->free() now resets the freeToPlay marker
paquettg_leaguewrap
train
e622ff434b7d124a8f945c73bb7c55415bcd92df
diff --git a/test/k8sT/Chaos.go b/test/k8sT/Chaos.go index <HASH>..<HASH> 100644 --- a/test/k8sT/Chaos.go +++ b/test/k8sT/Chaos.go @@ -232,12 +232,11 @@ var _ = Describe("K8sChaosTest", func() { ctx, helpers.DefaultNamespace, netperfClient, - fmt.Sprintf("netperf -l 300 -t TCP_STREAM -H %s", podsIps[netperfServer])) + fmt.Sprintf("netperf -l 60 -t TCP_STREAM -H %s", podsIps[netperfServer])) restartCilium() By("Stopping netperf client test") - cancel() res.WaitUntilFinish() res.ExpectSuccess("Failed while cilium was restarting") }) @@ -250,7 +249,7 @@ var _ = Describe("K8sChaosTest", func() { ctx, helpers.DefaultNamespace, netperfClient, - fmt.Sprintf("netperf -l 300 -t TCP_STREAM -H %s", podsIps[netperfServer])) + fmt.Sprintf("netperf -l 60 -t TCP_STREAM -H %s", podsIps[netperfServer])) By("Installing the L3-L4 Policy") _, err := kubectl.CiliumPolicyAction( @@ -260,7 +259,6 @@ var _ = Describe("K8sChaosTest", func() { restartCilium() By("Stopping netperf client test") - cancel() res.WaitUntilFinish() res.ExpectSuccess("Failed while cilium was restarting") })
[CI] Don't kill netperf process in chaos tests
cilium_cilium
train
f61c41a9a00ae148b14f4bb493a37224c3a6e033
diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -267,3 +267,18 @@ Equal( "Check that every function has a doctring" ); +// check that no line of the code is longer than 80 characters +Refute((function () { + var tooLong = function (line) { + return line.length > 80; + }; + + var exceeded = []; + $.keys($).forEach(function (k) { + if (!$[k].toString().split('\n').filter(tooLong).length) { return; } + console.log("a line in [%s] is > 80 characters", k); + exceeded.push(k); + }); + return exceeded.length; +}())); +
fail tests if any line is > <I> chars
ansuz_ansuzjs
train
235b55d2f6d733124ffa27d418e3be57240ebc75
diff --git a/spec/mysql2/error_spec.rb b/spec/mysql2/error_spec.rb index <HASH>..<HASH> 100644 --- a/spec/mysql2/error_spec.rb +++ b/spec/mysql2/error_spec.rb @@ -53,7 +53,9 @@ describe Mysql2::Error do error end - it "returns error messages as UTF-8" do + it "returns error messages as UTF-8 by default" do + Encoding.default_internal = nil + error.message.encoding.should eql(Encoding::UTF_8) error.message.valid_encoding?
reset Encoding.default_internal before depending on it's behavior
brianmario_mysql2
train
443b1256ea7c0eb5b3cd2f992d21718e0fd583c5
diff --git a/src/Utils.js b/src/Utils.js index <HASH>..<HASH> 100644 --- a/src/Utils.js +++ b/src/Utils.js @@ -30,7 +30,7 @@ var Utils = { * log.error('VIRHE! Tulostuu punaisen ja lihavoidun ERROR-tagin kanssa.'); * log.fatal('KRIITTINEN VIRHE! Tulostuu punaisen ja lihavoidun FATAL-tagin kanssa.'); */ - log: new Logger('[NetMatch %t] '.grey, argv.d && argv.d - 1), + log: new Logger('[NetMatch %t] '.grey, argv.d && (argv.d > 1 ? argv.d - 1 : 1)), /** * Palauttaa satunnaisen luvun väliltä minVal...maxVal, mahdollisesti liukulukuna
Construct the logger to work better with boolean param --debug
cb-hackers_node-NetMatch
train
ecc907b33acf1725a603ed2921acb7a9f68194b6
diff --git a/lib/blockscore/person.rb b/lib/blockscore/person.rb index <HASH>..<HASH> 100644 --- a/lib/blockscore/person.rb +++ b/lib/blockscore/person.rb @@ -11,7 +11,7 @@ module BlockScore def initialize(options = {}) super - @question_set = BlockScore::QuestionSet.new(:person_id => id) + @question_set = BlockScore::QuestionSet.new(:person_id => id, :person => self) end def question_set diff --git a/lib/blockscore/question_set.rb b/lib/blockscore/question_set.rb index <HASH>..<HASH> 100644 --- a/lib/blockscore/question_set.rb +++ b/lib/blockscore/question_set.rb @@ -6,9 +6,19 @@ module BlockScore class QuestionSet < Base include BlockScore::Actions::Create include BlockScore::Actions::Retrieve - # should limit the index to the current Person's QuestionSets... include BlockScore::Actions::All + def create(params) + self.class.create(params) + end + + def retrieve(id) + self.class.retrieve(id) + end + + def all(options = {}) + self.class.all(options) + end def score(answers) self.class.post "#{self.class.endpoint}#{id}/score", :answers => answers diff --git a/test/question_set_test.rb b/test/question_set_test.rb index <HASH>..<HASH> 100644 --- a/test/question_set_test.rb +++ b/test/question_set_test.rb @@ -2,8 +2,40 @@ require 'test_helper' require 'test/unit/active_support' class QuestionSetResourceTest < ActiveSupport::TestCase - include ResourceTest + # QuestionSetResourceTest cannot include ResourceTest because + # QuestionSets are only accessible through their Person. + def test_create_question_set + person = TestClient.create_person + response = person.question_set.create + assert_equal response.class, BlockScore::QuestionSet + end + + def test_retrieve_question_set + person = TestClient.create_person + qs = person.question_set.create + response = person.question_set.retrieve(qs.id) + assert_equal response.class, BlockScore::QuestionSet + end + + def test_list_question_set + person = TestClient.create_person + response = person.question_set.all # list ALL question_sets + assert_equal response.class, Array + end + + def test_list_question_set_with_count + person = TestClient.create_person + response = person.question_set.all(:count => 2) + assert_equal response.class, Array + end + + def test_list_question_set_with_count_and_offset + person = TestClient.create_person + response = person.question_set.all(:count => 2, :offset => 2) + assert_equal response.class, Array + end + def test_score question_set = TestClient.create_question_set @answers = [ @@ -30,6 +62,6 @@ class QuestionSetResourceTest < ActiveSupport::TestCase ] response = question_set.score(@answers) - assert_equal resource_to_class(resource), response.class + assert_equal response.class, BlockScore::QuestionSet end end
make sure question_set methods can be called through Person instances
BlockScore_blockscore-ruby
train
7673d77fbb8f0073ca03ddc709c5ad7332b78d2b
diff --git a/asammdf/blocks/utils.py b/asammdf/blocks/utils.py index <HASH>..<HASH> 100644 --- a/asammdf/blocks/utils.py +++ b/asammdf/blocks/utils.py @@ -207,13 +207,18 @@ def matlab_compatible(name): """ - compatible_name = [ch if ch in ALLOWED_MATLAB_CHARS else "_" for ch in name] + compatible_name = [ + ch if ch in ALLOWED_MATLAB_CHARS else "_" + for ch in name + ] compatible_name = "".join(compatible_name) if compatible_name[0] not in string.ascii_letters: compatible_name = "M_" + compatible_name - return compatible_name + # max variable name is 63 and 3 chars are reserved + # for get_unique_name in case of multiple channel name occurence + return compatible_name[:60] def get_text_v3(address, stream): diff --git a/asammdf/mdf.py b/asammdf/mdf.py index <HASH>..<HASH> 100644 --- a/asammdf/mdf.py +++ b/asammdf/mdf.py @@ -432,7 +432,6 @@ class MDF(object): included_channels.add(ch_nr) else: if group.get("CAN_logging", False): - print([ch.name for ch in group['channels']]) where = ( self.whereis("CAN_DataFrame") + self.whereis("CAN_ErrorFrame") @@ -953,9 +952,8 @@ class MDF(object): * `mat` : Matlab .mat version 4, 5 or 7.3 export. If *single_time_base==False* the channels will be renamed in the mat - file to 'DataGroup_<cntr>_<channel name>'. The channel group - master will be renamed to - 'DataGroup_<cntr>_<channel name>_master' + file to 'D<cntr>_<channel name>'. The channel group + master will be renamed to 'DM<cntr>_<channel name>' ( *<cntr>* is the data group index starting from 0) * `pandas` : export all channels as a single pandas DataFrame @@ -1425,8 +1423,8 @@ class MDF(object): if not single_time_base: mdict = {} - master_name_template = "DataGroup_{}_{}_master" - channel_name_template = "DataGroup_{}_{}" + master_name_template = "DGM{}_{}" + channel_name_template = "DG{}_{}" used_names = UniqueDB() for i, grp in enumerate(self.groups):
try to ensure maximum Matlab variable name for .mat export
danielhrisca_asammdf
train
5329c61d84ca12d05b6fe64934f6242ecb32015d
diff --git a/subprocrunner/_subprocess_runner.py b/subprocrunner/_subprocess_runner.py index <HASH>..<HASH> 100644 --- a/subprocrunner/_subprocess_runner.py +++ b/subprocrunner/_subprocess_runner.py @@ -99,6 +99,14 @@ class SubprocessRunner: self.__quiet = quiet + def __repr__(self) -> str: + return "SubprocessRunner(command='{}', returncode={}, dryrun={}, quiet={})".format( + self.command_str, + self.returncode if self.returncode is not None else "'not yet executed'", + self.dry_run, + self.__quiet, + ) + @property def dry_run(self) -> bool: return self.__dry_run diff --git a/test/test_subproc_runner.py b/test/test_subproc_runner.py index <HASH>..<HASH> 100644 --- a/test/test_subproc_runner.py +++ b/test/test_subproc_runner.py @@ -34,6 +34,15 @@ else: raise NotImplementedError(os_type) +class Test_SubprocessRunner_repr: + def test_normal(self): + expected = ( + "SubprocessRunner(command='ls hoge', returncode='not yet executed', " + "dryrun=False, quiet=False)" + ) + assert str(SubprocessRunner(command=["ls", "hoge"])) == expected + + class Test_SubprocessRunner_run: @pytest.mark.parametrize( ["command", "dry_run", "expected"],
Add __repr__ method to SubprocessRunner class
thombashi_subprocrunner
train
9c5e55ebcbde0e2ce836e6bff377afc752df0fb6
diff --git a/Simple Flask server/server.py b/Simple Flask server/server.py index <HASH>..<HASH> 100644 --- a/Simple Flask server/server.py +++ b/Simple Flask server/server.py @@ -168,13 +168,13 @@ class ActionView(View): result = self.action(request, *args, **kwargs) # Is it a redirect ? - if result.__class__ == PlugItRedirect: + if isinstance(result, PlugItRedirect): response = make_response("") response.headers['EbuIo-PlugIt-Redirect'] = result.url if result.no_prefix: response.headers['EbuIo-PlugIt-Redirect-NoPrefix'] = 'True' return response - elif result.__class__ == PlugItSendFile: + elif isinstance(result, PlugItSendFile): response = send_file(result.filename, mimetype=result.mimetype, as_attachment=result.as_attachment, attachment_filename=result.attachment_filename) response.headers['EbuIo-PlugIt-ItAFile'] = 'True' return response
Type-checking with isinstance
ebu_PlugIt
train
e8c114aacfce9c404e41d16cec045e15fecd06cc
diff --git a/scisalt/matplotlib/colorbar.py b/scisalt/matplotlib/colorbar.py index <HASH>..<HASH> 100644 --- a/scisalt/matplotlib/colorbar.py +++ b/scisalt/matplotlib/colorbar.py @@ -41,12 +41,12 @@ def colorbar(ax, im, fig=None, loc="right", size="5%", pad="3%"): width = fig.get_figwidth() new = width * (1 + _pc2f(size) + _pc2f(pad)) _logger.debug('Setting new figure width: {}'.format(new)) - fig.set_size_inches(new, fig.get_figheight(), forward=True) + # fig.set_size_inches(new, fig.get_figheight(), forward=True) elif loc == "top" or loc == "bottom": height = fig.get_figheight() new = height * (1 + _pc2f(size) + _pc2f(pad)) _logger.debug('Setting new figure height: {}'.format(new)) - fig.set_figheight(fig.get_figwidth(), new, forward=True) + # fig.set_figheight(fig.get_figwidth(), new, forward=True) divider = _ag1.make_axes_locatable(ax) cax = divider.append_axes(loc, size=size, pad=pad) diff --git a/scisalt/matplotlib/imshow.py b/scisalt/matplotlib/imshow.py index <HASH>..<HASH> 100644 --- a/scisalt/matplotlib/imshow.py +++ b/scisalt/matplotlib/imshow.py @@ -19,6 +19,11 @@ __all__ = [ 'scaled_figsize' ] +class smplot(object): + def __init__(self, **kwargs): + for key in kwargs: + setattr(self, key, kwargs[key]) + def imshow(X, ax=None, add_cbar=True, rescale_fig=True, **kwargs): """ @@ -109,12 +114,12 @@ def _plot_array(*args, plottype, ax=None, add_cbar=True, rescale_fig=True, **kwa cb = _cb(ax_h, im) if ax is None: - return fig, ax_h, im + return smplot(fig=fig, ax=ax_h, ax_h=ax_h, im=im) else: if add_cbar: - return im, cb + return smplot(im=im, cb=cb) else: - return im + return smplot(im=im) def scaled_figsize(X, figsize=None, h_pad=None, v_pad=None):
Some mostly behind-the-scenes changes
joelfrederico_SciSalt
train
87ae215a6a56c14bd9e652e159553b700be6e42e
diff --git a/pgmpy/factors/Factor.py b/pgmpy/factors/Factor.py index <HASH>..<HASH> 100644 --- a/pgmpy/factors/Factor.py +++ b/pgmpy/factors/Factor.py @@ -1,11 +1,10 @@ import functools +from itertools import product from collections import namedtuple -from copy import deepcopy import numpy as np from pgmpy.extern import tabulate -from pgmpy.utils.mathext import cartesian State = namedtuple('State', ['var', 'state']) @@ -13,7 +12,7 @@ State = namedtuple('State', ['var', 'state']) class Factor: """ - Base class for *Factor*. + Base class for Factor. Public Methods -------------- @@ -103,42 +102,6 @@ class Factor: """ return self.variables - #TODO: Fix this method - def assignment(self, index): - """ - Returns a list of assignments for the corresponding index. - - Parameters - ---------- - index: integer, list-type, ndarray - index or indices whose assignment is to be computed - - Examples - -------- - >>> import numpy as np - >>> from pgmpy.factors import Factor - >>> phi = Factor(['diff', 'intel'], [2, 2], np.ones(4)) - >>> phi.assignment([1, 2]) - [[('diff', 0), ('intel', 1)], [('diff', 1), ('intel', 0)]] - """ - if isinstance(index, (int, np.integer)): - index = [index] - index = np.array(index) - - max_index = np.prod(self.cardinality) - 1 - if not all(i <= max_index for i in index): - raise IndexError("Index greater than max possible index") - - assignments = np.zeros((len(index), len(self.scope())), dtype=np.int) - rev_card = self.cardinality[::-1] - for i, card in enumerate(rev_card): - assignments[:, i] = index % card - index = index//card - - assignments = assignments[:, ::-1] - - return [[self.variables[key][val] for key, val in zip(self.variables.keys(), values)] for values in assignments] - def get_cardinality(self, variables): """ Returns cardinality of a given variable @@ -573,10 +536,9 @@ class Factor: string_list.append(html_string_header) if html: - html_string_header = '{tr}{variable_cols}{phi}'.format( - tr='<tr', + html_string_header = '<tr>{variable_cols}{phi}'.format( variable_cols=''.join(['<td><b>{var}</b></td>'.format(var=str(var)) for var in self.variables]), - phi='<td><b>{phi_or_p}{vars}</b><d></tr>'.format(phi_or_P=phi_or_p, + phi='<td><b>{phi_or_p}{vars}</b><d></tr>'.format(phi_or_p=phi_or_p, vars=', '.join([str(var) for var in self.variables]))) string_list.append(html_string_header) else: @@ -589,23 +551,23 @@ class Factor: # gen starts with giving fun initial value of b=[0, 0, 0] then fun tries # to increment it # by 1. - def fun(b, index=len(self.cardinality)-1): - b[index] += 1 - if b[index] == self.cardinality[index]: - b[index] = 0 - fun(b, index-1) - return b - - def gen(): - b = [0] * len(self.variables) - yield b - for i in range(np.prod(self.cardinality)-1): - yield fun(b) + # def fun(b, index=len(self.cardinality)-1): + # b[index] += 1 + # if b[index] == self.cardinality[index]: + # b[index] = 0 + # fun(b, index-1) + # return b + # + # def gen(): + # b = [0] * len(self.variables) + # yield b + # for i in range(np.prod(self.cardinality)-1): + # yield fun(b) value_index = 0 factor_table = [] - for prob in gen(): - prob_list = ["%s_%d" % (list(self.variables)[i], prob[i]) + for prob in product(*[range(card) for card in self.cardinality]): + prob_list = ["{s}_{d}".format(s=list(self.variables)[i], d=prob[i]) for i in range(len(self.variables))] if html: html_string = """<tr>%s<td>%4.4f</td></tr>""" % ( @@ -614,7 +576,7 @@ class Factor: self.values[value_index]) string_list.append(html_string) else: - prob_list.append(self.values[value_index]) + prob_list.append(self.values.ravel()[value_index]) factor_table.append(prob_list) value_index += 1 @@ -635,7 +597,7 @@ class Factor: return self.divide(other, inplace=False) def __eq__(self, other): - if type(self) != type(other): + if not isinstance(self, Factor) and isinstance(other, Factor): return False elif set(self.scope()) != set(other.scope()):
removes assignment method because not getting used anywhere
pgmpy_pgmpy
train
37cb54d721d2de57166d47ed0ca6ac711e08651d
diff --git a/lib/ddr/antivirus/version.rb b/lib/ddr/antivirus/version.rb index <HASH>..<HASH> 100644 --- a/lib/ddr/antivirus/version.rb +++ b/lib/ddr/antivirus/version.rb @@ -1,5 +1,5 @@ module Ddr module Antivirus - VERSION = "1.2.1.post" + VERSION = "1.3.0.post" end end
Bumped version to <I>.post
duke-libraries_ddr-antivirus
train
468be0d80f22213c77c97ed2576c246b38252940
diff --git a/holoviews/core/data.py b/holoviews/core/data.py index <HASH>..<HASH> 100644 --- a/holoviews/core/data.py +++ b/holoviews/core/data.py @@ -247,13 +247,18 @@ class Columns(Element): return self.interface.dframe(as_table) - def array(self): + def array(self, as_table=False): if self.interface is None: - dims = self._cached_index_names + self._cached_value_names - return np.column_stack([self.dimension_values(d) for d in dims]) - else: - return self.interface.array() - + return super(Columns, self).array(as_table) + array = self.interface.array() + if as_table: + from ..element import Table + if array.dtype.kind in ['S', 'O', 'U']: + raise ValueError("%s data contains non-numeric type, " + "could not convert to array based " + "Element" % type(self).__name__) + return Table(array, **util.get_param_values(self, Table)) + return array @@ -456,7 +461,7 @@ class ColumnarDataFrame(ColumnarData): def array(self): - return self.element.data.iloc + return self.element.data.values def reindex(self, kdims=None, vdims=None): diff --git a/holoviews/core/element.py b/holoviews/core/element.py index <HASH>..<HASH> 100644 --- a/holoviews/core/element.py +++ b/holoviews/core/element.py @@ -11,7 +11,7 @@ from .ndmapping import OrderedDict, UniformNdMapping, NdMapping, item_check from .overlay import Overlayable, NdOverlay, Overlay, CompositeOverlay from .spaces import HoloMap, GridSpace from .tree import AttrTree -from .util import sanitize_identifier, is_dataframe +from .util import sanitize_identifier, is_dataframe, dimension_sort, get_param_values class Element(ViewableElement, Composable, Overlayable): @@ -428,6 +428,27 @@ class NdElement(NdMapping, Tabular): return list(values) + def array(self, as_table=False): + dims = self.kdims + self.vdims + columns, types = [], [] + for dim in dims: + column = self.dimension_values(d) + data.append(column) + types.append(column.dtype.kind) + if len(set(types)) > 1: + columns = [c.astype('object') for c in columns] + array = np.column_stack(columns) + if as_table: + from ..element import Table + if array.dtype.kind in ['S', 'O', 'U']: + raise ValueError("%s data contains non-numeric type, " + "could not convert to array based " + "Element" % type(self).__name__) + return Table(array, **get_param_values(self, Table)) + else: + return array + + def dframe(self, as_table=False): try: import pandas @@ -437,10 +458,11 @@ class NdElement(NdMapping, Tabular): df = pandas.DataFrame((k+v for (k, v) in self.data.items()), columns=columns) if as_table: from ..element import Table - return Table(df, **self.get_param_values(onlychanged=True)) + return Table(df, **get_param_values(self, Table)) return df + class Element3D(Element2D): extents = param.Tuple(default=(None, None, None, diff --git a/holoviews/core/util.py b/holoviews/core/util.py index <HASH>..<HASH> 100644 --- a/holoviews/core/util.py +++ b/holoviews/core/util.py @@ -594,3 +594,8 @@ def is_dataframe(data): return((pd is not None and isinstance(data, pd.DataFrame)) or (dd is not None and isinstance(data, dd.DataFrame)) or (bz is not None and isinstance(data, bz.Data))) + + +def get_param_values(data, new_type): + return {k: v for k, v in self.get_param_values(onlychanged=True) + if k in new_type.params()}
Improved array and dframe methods
pyviz_holoviews
train
53fc92f7562a5369983ddd5f9c232863929e9c5b
diff --git a/spec/models/has_vcards/concerns/has_vcards_spec.rb b/spec/models/has_vcards/concerns/has_vcards_spec.rb index <HASH>..<HASH> 100644 --- a/spec/models/has_vcards/concerns/has_vcards_spec.rb +++ b/spec/models/has_vcards/concerns/has_vcards_spec.rb @@ -22,7 +22,7 @@ describe HasVcards::Concerns::HasVcards do end it 'delegates attribute accessors to the main vcard' do - attributes = %i[ full_name nickname family_name given_name additional_name honorific_prefix honorific_suffix ] + attributes = [ :full_name, :nickname, :family_name, :given_name, :additional_name, :honorific_prefix, :honorific_suffix ] attributes.each do |attr| expect(something.vcard).to receive(attr) diff --git a/spec/models/has_vcards/vcard_spec.rb b/spec/models/has_vcards/vcard_spec.rb index <HASH>..<HASH> 100644 --- a/spec/models/has_vcards/vcard_spec.rb +++ b/spec/models/has_vcards/vcard_spec.rb @@ -92,7 +92,7 @@ describe HasVcards::Vcard do end it 'delegates attribute accessors to the main address' do - attributes = %i[post_office_box extended_address street_address locality region postal_code country_name zip_locality] + attributes = [ :post_office_box, :extended_address, :street_address, :locality, :region, :postal_code, :country_name, :zip_locality] attributes.each do |attr| expect(@vcard.address).to receive(attr) @vcard.address.send(attr)
Do not use %i syntax to stay compatible with Ruby <I>
huerlisi_has_vcards
train
6f501624288d66db1c8d07db56bd0f67c472a7d6
diff --git a/driver/src/main/java/org/kaazing/k3po/driver/internal/netty/bootstrap/http/HttpServerChannelSink.java b/driver/src/main/java/org/kaazing/k3po/driver/internal/netty/bootstrap/http/HttpServerChannelSink.java index <HASH>..<HASH> 100644 --- a/driver/src/main/java/org/kaazing/k3po/driver/internal/netty/bootstrap/http/HttpServerChannelSink.java +++ b/driver/src/main/java/org/kaazing/k3po/driver/internal/netty/bootstrap/http/HttpServerChannelSink.java @@ -51,7 +51,7 @@ import org.kaazing.k3po.driver.internal.netty.channel.ChannelAddress; public class HttpServerChannelSink extends AbstractServerChannelSink<HttpServerChannel> { private final ConcurrentNavigableMap<URI, HttpServerChannel> httpBindings; - private final ConcurrentMap<URI, HttpTransport> httpTransportsByLocation; // TODO: use address for location stack + private final ConcurrentMap<ChannelAddress, HttpTransport> httpTransports; private final ChannelPipelineFactory pipelineFactory; public HttpServerChannelSink() { @@ -61,7 +61,7 @@ public class HttpServerChannelSink extends AbstractServerChannelSink<HttpServerC private HttpServerChannelSink(ConcurrentNavigableMap<URI, HttpServerChannel> httpBindings) { this.pipelineFactory = new HttpChildChannelPipelineFactory(httpBindings); this.httpBindings = httpBindings; - this.httpTransportsByLocation = new ConcurrentHashMap<URI, HttpTransport>(); + this.httpTransports = new ConcurrentHashMap<>(); } @Override @@ -77,8 +77,7 @@ public class HttpServerChannelSink extends AbstractServerChannelSink<HttpServerC } ChannelAddress address = httpLocalAddress.getTransport(); - URI location = address.getLocation(); - HttpTransport httpTransport = httpTransportsByLocation.get(location); + HttpTransport httpTransport = httpTransports.get(address); if (httpTransport == null) { String schemeName = address.getLocation().getScheme(); String httpSchemeName = httpLocalAddress.getLocation().getScheme(); @@ -91,7 +90,7 @@ public class HttpServerChannelSink extends AbstractServerChannelSink<HttpServerC // bind transport ChannelFuture bindFuture = bootstrap.bindAsync(address); HttpTransport newHttpTransport = new HttpTransport(bindFuture, 1); - httpTransport = httpTransportsByLocation.putIfAbsent(location, newHttpTransport); + httpTransport = httpTransports.putIfAbsent(address, newHttpTransport); if (httpTransport == null) { httpTransport = newHttpTransport; } @@ -126,14 +125,13 @@ public class HttpServerChannelSink extends AbstractServerChannelSink<HttpServerC } ChannelAddress address = httpLocalAddress.getTransport(); - URI location = address.getLocation(); - HttpTransport httpTransport = httpTransportsByLocation.get(location); + HttpTransport httpTransport = httpTransports.get(address); assert httpTransport != null; if (httpTransport.count.decrementAndGet() == 0) { // ensure only zero count is removed HttpTransport oldHttpTransport = new HttpTransport(httpTransport.future); - if (httpTransportsByLocation.remove(location, oldHttpTransport)) { + if (httpTransports.remove(address, oldHttpTransport)) { // unbind transport Channel transport = httpUnbindChannel.getTransport(); ChannelFuture unbindFuture = transport.unbind();
Using channel address as the key. Channel address has transport field that can be used in comparison
k3po_k3po
train
5d60be917de7770521e9fe57da9a69152488df0c
diff --git a/src/test/java/com/beust/jcommander/JCommanderTest.java b/src/test/java/com/beust/jcommander/JCommanderTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/beust/jcommander/JCommanderTest.java +++ b/src/test/java/com/beust/jcommander/JCommanderTest.java @@ -1305,10 +1305,36 @@ public class JCommanderTest { Assert.assertEquals(cc.author, "cedric"); } + static class CommandTemplate { + @Parameter + private List<String> parameters = new ArrayList<>(); + + @Parameter(names = "help", help = true) + private boolean help; + } + + public void noDashCommand() { + class P1 { + @Parameter(names = "hello") + private int test; + } + P1 p1 = new P1(); + JCommander j = new JCommander(); + j.addCommand("p1", p1); + j.parse("p1", "hello", "47"); + Assert.assertEquals(p1.test, 47); + } + @Test(enabled = false) public static void main(String[] args) { - new JCommanderTest().noDash(); + + CommandTemplate template = new CommandTemplate(); + JCommander jcommander = new JCommander(template); + jcommander.setProgramName("prog"); + jcommander.parse("help"); + + if (template.help) { + jcommander.usage(); + } } - // Tests: - // required unparsed parameter }
Add test for commands without a dash.
cbeust_jcommander
train
eedc0e9ecccfa79d52dbf2db492385d9cda9b78c
diff --git a/qtpylib/tools.py b/qtpylib/tools.py index <HASH>..<HASH> 100644 --- a/qtpylib/tools.py +++ b/qtpylib/tools.py @@ -299,7 +299,7 @@ def backdate(res, date=None, as_datetime=False, fmt='%Y-%m-%d', tz="UTC"): return new_date.strftime('%Y-%m-%d %H:%M:%S.%f') # ------------------------------------------- -def previousWeekday(day=None, fmt=None): +def previous_weekday(day=None, as_datetime=False): if day is None: day = datetime.datetime.now() else: @@ -309,9 +309,9 @@ def previousWeekday(day=None, fmt=None): while day.weekday() > 4: # Mon-Fri are 0-4 day -= datetime.timedelta(days=1) - if isinstance(fmt, str): - return day.strftime(fmt) - return day + if as_datetime: + return day + return day.strftime("%Y-%m-%d") # ------------------------------------------- def is_third_friday(day=None):
added option to return string or datetime object + renamed previousWeekday to previous_weekday
ranaroussi_qtpylib
train
1cf66db2bbf1820f8b19608fd93c96ebfcc7fb32
diff --git a/GestureHandler.js b/GestureHandler.js index <HASH>..<HASH> 100644 --- a/GestureHandler.js +++ b/GestureHandler.js @@ -439,7 +439,7 @@ function validatePanGestureHandlerProps(props) { if ( Array.isArray(props.activeOffsetY) && - (props.activeOffsetY[0] > 0 || props.activeOffsetX[1] < 0) + (props.activeOffsetY[0] > 0 || props.activeOffsetY[1] < 0) ) { throw new Error( `First element of activeOffsetY should be negative, a the second one should be positive` @@ -457,7 +457,7 @@ function validatePanGestureHandlerProps(props) { if ( Array.isArray(props.failOffsetY) && - (props.failOffsetY[0] > 0 || props.failOffsetX[1] < 0) + (props.failOffsetY[0] > 0 || props.failOffsetY[1] < 0) ) { throw new Error( `First element of failOffsetY should be negative, a the second one should be positive`
Fix reference to failOffsetY (#<I>) There is a typo which makes it so you cannot give the new `activeOffsetX/Y` and `failOffsetX/Y` props separately.
kmagiera_react-native-gesture-handler
train
d56547e83074ab8bbf1d75befa63568981fd77a4
diff --git a/uncompyle6/parsers/parse33.py b/uncompyle6/parsers/parse33.py index <HASH>..<HASH> 100644 --- a/uncompyle6/parsers/parse33.py +++ b/uncompyle6/parsers/parse33.py @@ -34,7 +34,6 @@ class Python33Parser(Python32Parser): self.remove_rules(""" # 3.3+ adds POP_BLOCKS whileTruestmt ::= SETUP_LOOP l_stmts JUMP_ABSOLUTE JUMP_BACK COME_FROM_LOOP - whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM_LOOP whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK NOP COME_FROM_LOOP whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_BLOCK NOP COME_FROM_LOOP whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK
Reinstate a <I> grammar rule
rocky_python-uncompyle6
train
42a6a2aba6d4b62e3eace305e22951aeb9d882f1
diff --git a/master/buildbot/process/builder.py b/master/buildbot/process/builder.py index <HASH>..<HASH> 100644 --- a/master/buildbot/process/builder.py +++ b/master/buildbot/process/builder.py @@ -106,9 +106,9 @@ class Builder(util_service.ReconfigurableServiceMixin, # build. builderid = yield self.getBuilderId() - self.master.data.updates.updateBuilderInfo(builderid, - builder_config.description, - builder_config.tags) + yield self.master.data.updates.updateBuilderInfo(builderid, + builder_config.description, + builder_config.tags) self.builder_status.setDescription(builder_config.description) self.builder_status.setTags(builder_config.tags)
process: Fix race condition in builder info updates during reconfig
buildbot_buildbot
train
d5b4e4f1e48936a92cfde46bc243373a06df7293
diff --git a/lib/atdis/model.rb b/lib/atdis/model.rb index <HASH>..<HASH> 100644 --- a/lib/atdis/model.rb +++ b/lib/atdis/model.rb @@ -34,8 +34,8 @@ module ATDIS end def translate_field_mappings(p) - f = ActiveSupport::OrderedHash.new - ca = ActiveSupport::OrderedHash.new + f = {} + ca = {} p.each do |k,v| if leaf_array?(v) f[k] = v[0] diff --git a/spec/atdis/model_spec.rb b/spec/atdis/model_spec.rb index <HASH>..<HASH> 100644 --- a/spec/atdis/model_spec.rb +++ b/spec/atdis/model_spec.rb @@ -182,14 +182,7 @@ describe ATDIS::Model do describe ".attribute_names_from_mappings" do it do - # Doing this nastiness to support Ruby 1.8 - h = ActiveSupport::OrderedHash.new - h[:foo] = :bar - h[:a] = :b - h2 = ActiveSupport::OrderedHash.new - h2[:foo] = :bar2 - h2[:a] = :b2 - h[:info] = h2 + h = {foo: :bar, a: :b, info: {foo: :bar2, a: :b2}} ATDIS::Model.attribute_names_from_mappings(h).should == [:bar, :b, :bar2, :b2] end end
Don't need OrderedHash as we are not supporting ruby <I>
openaustralia_atdis
train
914a2a325a94139065f4cb0b98f81d12792477df
diff --git a/test/e2e/auth/node_authn.go b/test/e2e/auth/node_authn.go index <HASH>..<HASH> 100644 --- a/test/e2e/auth/node_authn.go +++ b/test/e2e/auth/node_authn.go @@ -51,13 +51,6 @@ var _ = SIGDescribe("[Feature:NodeAuthenticator]", func() { nodeIPs := e2enode.GetAddressesByTypeAndFamily(&nodes.Items[0], v1.NodeInternalIP, family) framework.ExpectNotEqual(len(nodeIPs), 0) - - // make sure ServiceAccount admission controller is enabled, so secret generation on SA creation works - saName := "default" - sa, err := f.ClientSet.CoreV1().ServiceAccounts(ns).Get(context.TODO(), saName, metav1.GetOptions{}) - framework.ExpectNoError(err, "failed to retrieve service account (%s:%s)", ns, saName) - framework.ExpectNotEqual(len(sa.Secrets), 0) - }) ginkgo.It("The kubelet's main port 10250 should reject requests with no credentials", func() {
auth e2e: node_authn test: don't expect a SA secret The test was expecting an SA token in a secret but pods are getting their SA tokens via projected volumes by default. Also, the SA token controller function is getting reduced so the original check is likely to fail.
kubernetes_kubernetes
train
2e904106a5c577df6755e394f52ac718a648e7d5
diff --git a/src/client/pkg/discovery/discovery.go b/src/client/pkg/discovery/discovery.go index <HASH>..<HASH> 100644 --- a/src/client/pkg/discovery/discovery.go +++ b/src/client/pkg/discovery/discovery.go @@ -4,8 +4,10 @@ import ( "fmt" ) +// ErrCancelled is returned when an action is cancelled by the user var ErrCancelled = fmt.Errorf("pachyderm: cancelled by user") +// Client defines Pachyderm's interface to key-value stores such as etcd. type Client interface { // Close closes the underlying connection. Close() error @@ -38,6 +40,7 @@ type Client interface { CheckAndSet(key string, value string, ttl uint64, oldValue string) error } +// NewEtcdClient creates an etcdClient with the given addresses. func NewEtcdClient(addresses ...string) Client { return newEtcdClient(addresses...) }
Fix linting in discovery package
pachyderm_pachyderm
train
52c52dfca005ff97681fc5a47726b75e527d638c
diff --git a/src/Popover.js b/src/Popover.js index <HASH>..<HASH> 100644 --- a/src/Popover.js +++ b/src/Popover.js @@ -23,7 +23,7 @@ type PlacementEightPoints = | 'autoHorizontalBottom'; type Props = { - placement: PlacementFourSides | PlacementEightPoints, + placement?: PlacementFourSides | PlacementEightPoints, classPrefix: string, children?: React.Node, title?: React.Node, @@ -36,9 +36,6 @@ type Props = { }; class Popover extends React.Component<Props> { - static defaultProps = { - placement: 'top' - }; render() { const { classPrefix, @@ -54,15 +51,10 @@ class Popover extends React.Component<Props> { } = this.props; const addPrefix = prefix(classPrefix); - - const classes = classNames( - classPrefix, - addPrefix(`placement-${_.kebabCase(placement)}`), - className, - { - [addPrefix('full')]: full - } - ); + const classes = classNames(classPrefix, className, { + [addPrefix(`placement-${_.kebabCase(placement || '')}`)]: placement, + [addPrefix('full')]: full + }); const styles = { display: 'block', diff --git a/src/Tooltip.js b/src/Tooltip.js index <HASH>..<HASH> 100644 --- a/src/Tooltip.js +++ b/src/Tooltip.js @@ -36,10 +36,6 @@ type Props = { }; class Tooltip extends React.Component<Props> { - static defaultProps = { - placement: 'top' - }; - render() { let { placement, @@ -55,11 +51,9 @@ class Tooltip extends React.Component<Props> { } = this.props; const addPrefix = prefix(classPrefix); - const classes = classNames( - classPrefix, - addPrefix(`placement-${_.kebabCase(placement)}`), - className - ); + const classes = classNames(classPrefix, className, { + [addPrefix(`placement-${_.kebabCase(placement || '')}`)]: placement + }); const styles = { left: positionLeft, top: positionTop, diff --git a/test/PopoverSpec.js b/test/PopoverSpec.js index <HASH>..<HASH> 100644 --- a/test/PopoverSpec.js +++ b/test/PopoverSpec.js @@ -11,7 +11,7 @@ describe('Popover', () => { const instance = ReactTestUtils.renderIntoDocument(<Popover>{title}</Popover>); const instanceDom = findDOMNode(instance); assert.equal(instanceDom.tagName, 'DIV'); - assert.ok(instanceDom.className.match(/\bpopover\b/)); + assert.equal(instanceDom.className, 'rs-popover'); assert.equal(innerText(instanceDom), title); }); diff --git a/test/TooltipSpec.js b/test/TooltipSpec.js index <HASH>..<HASH> 100644 --- a/test/TooltipSpec.js +++ b/test/TooltipSpec.js @@ -11,7 +11,7 @@ describe('Tooltip', () => { const instance = ReactTestUtils.renderIntoDocument(<Tooltip>{title}</Tooltip>); const instanceDom = findDOMNode(instance); assert.equal(instanceDom.tagName, 'DIV'); - assert.ok(instanceDom.className.match(/\btooltip\b/)); + assert.equal(instanceDom.className, 'rs-tooltip'); assert.equal(innerText(instanceDom), title); });
In Popover and Tooltip, delete the default for placement (#<I>)
rsuite_rsuite
train
5fc31716dd4676318b9857f8ce3bd2e9afd6a5ca
diff --git a/src/Composer/Util/Http/CurlDownloader.php b/src/Composer/Util/Http/CurlDownloader.php index <HASH>..<HASH> 100644 --- a/src/Composer/Util/Http/CurlDownloader.php +++ b/src/Composer/Util/Http/CurlDownloader.php @@ -74,7 +74,7 @@ class CurlDownloader $this->multiHandle = $mh = curl_multi_init(); if (function_exists('curl_multi_setopt')) { - curl_multi_setopt($mh, CURLMOPT_PIPELINING, /*CURLPIPE_HTTP1 | CURLPIPE_MULTIPLEX*/ 3); + curl_multi_setopt($mh, CURLMOPT_PIPELINING, PHP_VERSION_ID >= 70400 ? /* CURLPIPE_MULTIPLEX */ 2 : /*CURLPIPE_HTTP1 | CURLPIPE_MULTIPLEX*/ 3); if (defined('CURLMOPT_MAX_HOST_CONNECTIONS')) { curl_multi_setopt($mh, CURLMOPT_MAX_HOST_CONNECTIONS, 8); }
Avoid using CURLPIPE_HTTP1 in php<I>+
composer_composer
train
f63f90134ad46059e7848515d8be5284644bd95c
diff --git a/actionpack/test/template/form_helper_test.rb b/actionpack/test/template/form_helper_test.rb index <HASH>..<HASH> 100644 --- a/actionpack/test/template/form_helper_test.rb +++ b/actionpack/test/template/form_helper_test.rb @@ -106,7 +106,6 @@ class FormHelperTest < ActionView::TestCase if object.is_a?(Hash) && object[:use_route].blank? && object[:controller].blank? object.merge!(:controller => "main", :action => "index") end - object super end @@ -269,7 +268,7 @@ class FormHelperTest < ActionView::TestCase assert_dom_equal expected, hidden_field("post", "title", :value => nil) end - def test_text_field_with_options + def test_hidden_field_with_options assert_dom_equal '<input id="post_title" name="post[title]" type="hidden" value="Something Else" />', hidden_field("post", "title", :value => "Something Else") end
Rename duplicated test, and give it a correct name. Remove nonsense line.
rails_rails
train
0d5d6486879437b8667d862c6e64d3a715d92339
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AddRemoveRulesTest.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AddRemoveRulesTest.java index <HASH>..<HASH> 100644 --- a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AddRemoveRulesTest.java +++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/AddRemoveRulesTest.java @@ -162,7 +162,6 @@ public class AddRemoveRulesTest { @Test - @Ignore("NPE in AddRemoveRule.initNewSegment") public void test() throws Exception { KieSession knowledgeSession = base.newKieSession(); knowledgeSession.fireAllRules();
[DROOLS-<I>] Remove @Ignore from failing test
kiegroup_drools
train
c8f765986d9614fefbe80511c7cdfff693b16d1b
diff --git a/backbone.queryparams.js b/backbone.queryparams.js index <HASH>..<HASH> 100644 --- a/backbone.queryparams.js +++ b/backbone.queryparams.js @@ -37,12 +37,14 @@ var _getFragment = function(fragment, forcePushState) { } _.extend(Backbone.History.prototype, { - getFragment : function(fragment, forcePushState, excludeQueryString) { + getFragment : function(fragment, forcePushState) { + excludeQueryString = (this._wantsHashChange && this._wantsPushState && + !this._hasPushState); fragment = _getFragment.apply(this, arguments); - if (excludeQueryString) { - fragment = fragment.replace(queryStrip, ''); - } else if (! hasQueryString.test(fragment)) { + if (!hasQueryString.test(fragment)) { fragment += this.location.search; + } else if (excludeQueryString) { + fragment = fragment.replace(queryStrip, ''); } return fragment; },
Fix IE doubling the query parameters on the url (before and after the fragment), now it shows only before the fragment
jhudson8_backbone-query-parameters
train
fd146c672a32285248962e52c0978a73e0d4f061
diff --git a/python/rez/cli/build.py b/python/rez/cli/build.py index <HASH>..<HASH> 100644 --- a/python/rez/cli/build.py +++ b/python/rez/cli/build.py @@ -164,7 +164,6 @@ def setup_parser(parser): help="build type") parser.add_argument("-b", "--build-system", dest="build_system", choices=sorted(BUILD_SYSTEMS.keys()), - # type=lambda x: BUILD_SYSTEMS[x], default='eclipse') parser.add_argument("--retain-cache", dest="retain_cmake_cache", action="store_true", default=False, diff --git a/python/rez/cli/cmake.py b/python/rez/cli/cmake.py index <HASH>..<HASH> 100644 --- a/python/rez/cli/cmake.py +++ b/python/rez/cli/cmake.py @@ -46,7 +46,6 @@ def setup_parser(parser): help="build type") parser.add_argument("-b", "--build-system", dest="build_system", choices=sorted(rez.cmake.BUILD_SYSTEMS.keys()), - type=lambda x: rez.cmake.BUILD_SYSTEMS[x], default='eclipse') parser.add_argument("-i", "--install-directory", dest="install_dir", default=os.environ['REZ_LOCAL_PACKAGES_PATH'], diff --git a/python/rez/cmake.py b/python/rez/cmake.py index <HASH>..<HASH> 100644 --- a/python/rez/cmake.py +++ b/python/rez/cmake.py @@ -13,7 +13,7 @@ BUILD_SYSTEMS = {'eclipse': "Eclipse CDT4 - Unix Makefiles", 'make': "Unix Makefiles", 'xcode': "Xcode"} -class RezCMakeError(RezError): +class RezCMakeError(rez.exceptions.RezError): """ rez cmake error """
+ Minor bug fixes to previous commits.
nerdvegas_rez
train
7bc80acad0b8112691979a68b0e7f7ad59b64e21
diff --git a/serenata_toolbox/datasets.py b/serenata_toolbox/datasets.py index <HASH>..<HASH> 100644 --- a/serenata_toolbox/datasets.py +++ b/serenata_toolbox/datasets.py @@ -14,7 +14,8 @@ def fetch_latest_backup(destination_path, '2016-11-19-current-year.xz', '2016-11-19-last-year.xz', '2016-11-19-previous-years.xz', - '2016-11-19-reimbursements.xz'] + '2016-11-19-reimbursements.xz', + '2016-11-28-congressperson-civil-names.xz'] for filename in files: url = 'https://{}.amazonaws.com/{}/{}'.format(aws_region, aws_bucket,
Adds congressperson civil names to datasets
okfn-brasil_serenata-toolbox
train
c6939d57dcdce70ecfec7a6d67d4c00f770ec6a7
diff --git a/webhook.go b/webhook.go index <HASH>..<HASH> 100644 --- a/webhook.go +++ b/webhook.go @@ -186,7 +186,7 @@ func main() { } router.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) { - fmt.Fprintf(w, "OK") + fmt.Fprint(w, "OK") }) router.HandleFunc(hooksURL, hookHandler) @@ -273,7 +273,7 @@ func hookHandler(w http.ResponseWriter, r *http.Request) { msg := fmt.Sprintf("[%s] error evaluating hook: %s", rid, err) log.Print(msg) w.WriteHeader(http.StatusInternalServerError) - fmt.Fprintf(w, "Error occurred while evaluating hook rules.") + fmt.Fprint(w, "Error occurred while evaluating hook rules.") return } } @@ -291,17 +291,17 @@ func hookHandler(w http.ResponseWriter, r *http.Request) { if err != nil { w.WriteHeader(http.StatusInternalServerError) if matchedHook.CaptureCommandOutputOnError { - fmt.Fprintf(w, response) + fmt.Fprint(w, response) } else { w.Header().Set("Content-Type", "text/plain; charset=utf-8") - fmt.Fprintf(w, "Error occurred while executing the hook's command. Please check your logs for more details.") + fmt.Fprint(w, "Error occurred while executing the hook's command. Please check your logs for more details.") } } else { // Check if a success return code is configured for the hook if matchedHook.SuccessHttpResponseCode != 0 { writeHttpResponseCode(w, rid, matchedHook.ID, matchedHook.SuccessHttpResponseCode) } - fmt.Fprintf(w, response) + fmt.Fprint(w, response) } } else { go handleHook(matchedHook, rid, &headers, &query, &payload, &body) @@ -311,7 +311,7 @@ func hookHandler(w http.ResponseWriter, r *http.Request) { writeHttpResponseCode(w, rid, matchedHook.ID, matchedHook.SuccessHttpResponseCode) } - fmt.Fprintf(w, matchedHook.ResponseMessage) + fmt.Fprint(w, matchedHook.ResponseMessage) } return } @@ -324,10 +324,10 @@ func hookHandler(w http.ResponseWriter, r *http.Request) { // if none of the hooks got triggered log.Printf("[%s] %s got matched, but didn't get triggered because the trigger rules were not satisfied\n", rid, matchedHook.ID) - fmt.Fprintf(w, "Hook rules were not satisfied.") + fmt.Fprint(w, "Hook rules were not satisfied.") } else { w.WriteHeader(http.StatusNotFound) - fmt.Fprintf(w, "Hook not found.") + fmt.Fprint(w, "Hook not found.") } }
Replaced fmt.Frpintf calls with fmt.Fprint when there's no formatting used This fixes #<I>
adnanh_webhook
train
a27354a82b0954a22612c022d86c8b1df9e120ef
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -209,7 +209,8 @@ setup( packages=find_packages(exclude=['test', 'test.*']), - install_requires=['colorama', 'doppel', 'enum-compat', 'packaging', 'six'], + install_requires=['colorama', 'doppel==0.1.0.dev0', 'enum-compat', + 'packaging', 'six'], extras_require={ 'msbuild': ['lxml'], 'lint': ['flake8'],
Explicitly specify doppel version This works around an issue with pip (or PyPI?) where it won't pick up doppel, presumably because it's a dev release.
jimporter_bfg9000
train
c255bcd40054a6d889d04d19ff8325717720ccd1
diff --git a/lib/waterline.js b/lib/waterline.js index <HASH>..<HASH> 100644 --- a/lib/waterline.js +++ b/lib/waterline.js @@ -207,14 +207,18 @@ Waterline.prototype.initialize = function(options, cb) { }, next); }] - }, function(err) { - if (err) return cb(err); - self.bootstrap(function(err) { - if (err) return cb(err); - cb(null, { collections: self.collections, connections: self.connections }); - }); - }); + }, function asyncCb(err) { + if (err) { + return cb(err); + } + var ontology = { + collections: self.collections, + connections: self.connections + }; + + cb(null, ontology); + }); }; /** @@ -235,51 +239,3 @@ Waterline.prototype.teardown = function teardown(cb) { connection._adapter.teardown(item, next); }, cb); }; - -/** - * Bootstrap - * - * Auto-migrate all collections - */ - -Waterline.prototype.bootstrap = function bootstrap(cb) { - var self = this; - - // - // TODO: - // Come back to this -- see https://github.com/balderdashy/waterline/issues/259 - // (the stuff in this file works fine-- the work would be structural changes elsewhere) - // - - // // Use the schema to get a list of junction tables idents - // // and then determine which are "logical" collections - // // (i.e. everything EXCEPT junction tables) - // var junctionTableIdents = _(this.schema).filter({junctionTable: true}).pluck('identity').value(); - // var logicalCollections = _(this.collections).omit(junctionTableIdents).value(); - - // // Flatten logical collections obj into an array for convenience - // var toBeSynced = _.reduce(logicalCollections, function(logicals,coll,ident) { - // logicals.push(coll); - // return logicals; - // }, []); - - // // console.log(junctionTableIdents); - // // console.log(Object.keys(logicalCollections)); - // // console.log('\n', - // // 'Migrating collections ::', - // // _(toBeSynced).pluck('identity').value() - // // ); - - // For now: - var toBeSynced = _.reduce(this.collections, function(resources, collection, ident) { - resources.push(collection); - return resources; - }, []); - - // Run auto-migration strategies on each collection - // async.each(toBeSynced, function(collection, next) { - async.eachSeries(toBeSynced, function(collection, next) { - // async.eachLimit(toBeSynced, 9, function(collection, next) { - collection.sync(next); - }, cb); -};
pull auto-migrations out of waterline core
balderdashy_waterline
train
ec7cca33e8b10ad36797ab079e39b9d766a0ae5b
diff --git a/jira/client.py b/jira/client.py index <HASH>..<HASH> 100644 --- a/jira/client.py +++ b/jira/client.py @@ -404,6 +404,34 @@ class JIRA(object): filters = [Filter(self._options, self._session, raw_filter_json) for raw_filter_json in r_json] return filters + def create_filter(self, name=None, description = None, + jql = None, favourite=None): + """ + Create a new filter and return a filter Resource for it. + + Keyword arguments: + name -- name of the new filter + description -- useful human readable description of the new filter + jql -- query string that defines the filter + favourite -- whether to add this filter to the current user's favorites + + """ + data = {} + if name is not None: + data['name']=name + if description is not None: + data['description']=description + if jql is not None: + data['jql']=jql + if favourite is not None: + data['favourite']=favourite + url = self._get_url('filter') + r = self._session.post(url, headers={'content-type': 'application/json'}, data=json.dumps(data)) + raise_on_error(r) + + raw_filter_json = json.loads(r.text) + return Filter(self._options, self._session, raw=raw_filter_json) + # Groups # non-resource
Added create_filter(name = None, description = None, jql = None, favourite = None) It uses the post feature of /rest/api/2/filter to create a new filter.
pycontribs_jira
train
39ddfbe950cf567bb23e70a1723f4b847d7c4974
diff --git a/salt/modules/smtp.py b/salt/modules/smtp.py index <HASH>..<HASH> 100644 --- a/salt/modules/smtp.py +++ b/salt/modules/smtp.py @@ -105,7 +105,7 @@ def send_msg(recipient, log.debug("Exception: {0}" . format(_error)) return False - if not use_ssl in ['True', 'true']: + if use_ssl not in ('True', 'true'): smtpconn.ehlo() if smtpconn.has_extn('STARTTLS'): try:
Fix PEP8 E<I> - test for membership should be "not in"
saltstack_salt
train
b719398d55e3a3f0d65141a69988149e5b3d04d6
diff --git a/cli/cumulusci.py b/cli/cumulusci.py index <HASH>..<HASH> 100644 --- a/cli/cumulusci.py +++ b/cli/cumulusci.py @@ -37,10 +37,8 @@ class Config(object): 'OAUTH_CLIENT_ID', 'OAUTH_CLIENT_SECRET', 'OAUTH_CALLBACK_URL', - 'INSTANCE_URL', 'REFRESH_TOKEN', 'MRBELVEDERE_PACKAGE_KEY', - 'APEXTESTSDB_USER_ID', 'APEXTESTSDB_TOKEN', ) def __init__(self): @@ -392,6 +390,12 @@ def ci_apextestsdb_upload(config, environment): # opt: --execution-url args += ['--execution-url', config.build_url] + # opt: --environment + if environment: + args += ['--environment', environment] + + click.echo("Calling: cumulusci dev apextestsdb_upload %s" % ' '.join(args)) + apextestsdb_upload.main(args=args, standalone_mode=False, obj=config)
Pass --environment option from ci apextestsdb_upload and print the subcommand being called with arguments
SFDO-Tooling_CumulusCI
train
35e11677f60d94d5934ee26f6c9ec8329cd09656
diff --git a/test/functional/python2icu.js b/test/functional/python2icu.js index <HASH>..<HASH> 100644 --- a/test/functional/python2icu.js +++ b/test/functional/python2icu.js @@ -1,4 +1,19 @@ -var fs = require('fs'); -var path = require('path'); var tap = require('tap'); var po2icu = require('../../lib/po2icu'); + +tap.test('pythonDigitToICU', function (t) { + var testFile = './test/fixtures/python.formatted.po'; + var object = po2icu.poFileToICUSync('es', testFile); + + var icuIdealKey = '{minutes, plural,\n' + + ' one {1 minute ago}\n' + + ' other {{minutes} minutes ago}\n' + + '}'; + var icuIdealValue = '{minutes, plural,\n' + + ' one {hace 1 minuto}\n' + + ' other {hace {minutes} minutos}\n' + + '}'; + t.ok(object.hasOwnProperty(icuIdealKey)); + t.equal(icuIdealValue, object[icuIdealKey]); + t.end(); +});
Add functional test for formatted po file
LLK_po2icu
train
b200428dd672ac1de2701d72b296482e3353b44b
diff --git a/lib/eiscp.rb b/lib/eiscp.rb index <HASH>..<HASH> 100644 --- a/lib/eiscp.rb +++ b/lib/eiscp.rb @@ -4,6 +4,6 @@ module EISCP VERSION = '0.0.3' end -require 'eiscp/receiver' -require 'eiscp/message' -require 'eiscp/command' +require_relative './eiscp/receiver' +require_relative './eiscp/message' +require_relative './eiscp/command' diff --git a/lib/eiscp/command.rb b/lib/eiscp/command.rb index <HASH>..<HASH> 100644 --- a/lib/eiscp/command.rb +++ b/lib/eiscp/command.rb @@ -1,5 +1,5 @@ require 'yaml' -require 'eiscp/receiver' +require_relative './receiver' require 'ostruct' module Command diff --git a/lib/eiscp/receiver.rb b/lib/eiscp/receiver.rb index <HASH>..<HASH> 100644 --- a/lib/eiscp/receiver.rb +++ b/lib/eiscp/receiver.rb @@ -1,5 +1,5 @@ require 'socket' -require 'eiscp/message' +require_relative './message' require 'resolv' module EISCP @@ -153,5 +153,6 @@ module EISCP end end - end + end +end
require_relative suggests refactoring is needed but this works for now
mikerodrigues_onkyo_eiscp_ruby
train
1dc9fe7f96fbb0cafa1b6c8ba2e4fb982b77bc5b
diff --git a/glamkit_collections/contrib/work_creator/models.py b/glamkit_collections/contrib/work_creator/models.py index <HASH>..<HASH> 100644 --- a/glamkit_collections/contrib/work_creator/models.py +++ b/glamkit_collections/contrib/work_creator/models.py @@ -321,7 +321,6 @@ class WorkBase( blank=True, ) wikipedia_link = models.URLField(blank=True, help_text="e.g. 'https://en.wikipedia.org/wiki/Beauty_and_the_Beast_(2014_film)'") - admin_notes = models.TextField(blank=True) images = models.ManyToManyField('icekit_plugins_image.Image', through="WorkImage") class Meta:
admin_notes is now in parent ICEkit
ic-labs_django-icekit
train
61b32009079282b62ab2db591b60c4f43ee43e86
diff --git a/tests/try_with_value.js b/tests/try_with_value.js index <HASH>..<HASH> 100644 --- a/tests/try_with_value.js +++ b/tests/try_with_value.js @@ -23,7 +23,6 @@ function CounterAggregate(){ util.inherits(CounterAggregate, esdf.core.EventSourcedAggregate); CounterAggregate.prototype.incrementAndReturn = function incrementAndReturn() { - console.log('incrementAndReturn'); this._stageEvent(new esdf.core.Event('Incremented', {})); return this._counter; }; @@ -69,6 +68,28 @@ describe('tryWith', function(){ done(); }).catch(done); }); + + it('should return the latest method return value on reload/retry also in advanced mode', function(done) { + var counter1 = null; + var counter2 = null; + when.all([ + tryWith(loader, CounterAggregate, 'TestAggregate-4', function testUserFunction(counter){ + return counter.incrementAndReturn(); + }, { advanced: true }).then(function(output){ + counter1 = output.result; + }), + tryWith(loader, CounterAggregate, 'TestAggregate-4', function testUserFunction(counter){ + return counter.incrementAndReturn(); + }, { advanced: true }).then(function(output){ + counter2 = output.result; + }) + ]).then(function() { + assert((typeof counter1) === 'number'); + assert((typeof counter2) === 'number'); + assert.notStrictEqual(counter1, counter2); + done(); + }).catch(done); + }); }); describe('Repository', function(){ diff --git a/utils/tryWith.js b/utils/tryWith.js index <HASH>..<HASH> 100644 --- a/utils/tryWith.js +++ b/utils/tryWith.js @@ -64,22 +64,11 @@ function tryWith(loaderFunction, ARConstructor, ARID, userFunction, options){ stagedCommit = aggregateInstance.getCommit(options.commitMetadata || {}); // Actually commit: - return when.try(aggregateInstance.commit.bind(aggregateInstance), options.commitMetadata || {}).yield(userFunctionResult).catch(function handleSavingError(savingError) { - failureLogger(savingError); - var strategyAllowsAnotherTry = shouldTryAgain(savingError); - if (strategyAllowsAnotherTry) { - return delay(singlePass); - } - else { - return when.reject(savingError); - } - }).then(function(finalResult) { - // Note: we accept finalResult into this function because the retry - // may have overridden it since the first entry into userFunction. + return when.try(aggregateInstance.commit.bind(aggregateInstance), options.commitMetadata || {}).then(function _buildOutput() { // If the caller has requested an "advanced format" result, pass the data through to them, enriched with the result of the user function. if (options.advanced) { var output = { - result: finalResult, + result: userFunctionResult, rehydration: loadingResult.rehydration }; // Additionally, if "newCommits" is enabled, also add the events produced by the current invocation to the returned property. @@ -89,7 +78,16 @@ function tryWith(loaderFunction, ARConstructor, ARID, userFunction, options){ return output; } else { - return finalResult; + return userFunctionResult; + } + }, function handleSavingError(savingError) { + failureLogger(savingError); + var strategyAllowsAnotherTry = shouldTryAgain(savingError); + if (strategyAllowsAnotherTry) { + return delay(singlePass); + } + else { + return when.reject(savingError); } }); });
Fix passing of method return values after automatic tryWith retries in advanced mode, too
rkaw92_esdf
train
dec0a9ce5ba068c11645ae0be0f301369f4f5f95
diff --git a/controllers/base.php b/controllers/base.php index <HASH>..<HASH> 100644 --- a/controllers/base.php +++ b/controllers/base.php @@ -419,14 +419,16 @@ abstract class Decoy_Base_Controller extends Controller { $items = Model::where_in('id', $ids); if (empty($items)) return Response::error('404'); - // Delete images if they are defined. + // Delete foreach($items->get() as $item) { + + // Delete images if (!method_exists($item, 'image') && !empty($item->image)) Croppa::delete($item->image); + + // Delete row. These are deleted one at a time so that model events will fire. + $item->delete(); } - - // Delete the row - $items->delete(); - + // If the referrer contains the controller route, that would mean that we're // redirecting back to the edit page (which no longer exists). Thus, go to a // listing instead. Otherwise, go back (accoridng to referrer)
Deleting each row one at a time so that events get fires
BKWLD_decoy
train
d24e3a5752be5fd5dfa526c199cdda97322b421a
diff --git a/react-mixin-manager.js b/react-mixin-manager.js index <HASH>..<HASH> 100644 --- a/react-mixin-manager.js +++ b/react-mixin-manager.js @@ -220,6 +220,7 @@ if (match && !_mixins[match[1]]) { _add(match[1]); } + return mixin; } function GROUP() { @@ -297,11 +298,11 @@ }, alias: function(name) { - addMixin(name, GROUP, Array.prototype.slice.call(arguments, 1), false); + return addMixin(name, GROUP, Array.prototype.slice.call(arguments, 1), false); }, add: function( /* options, mixin */ ) { - addMixin.apply(this, mixinParams(arguments)); + return addMixin.apply(this, mixinParams(arguments)); }, exists: function(name) {
return the mixin added from the add mixin call
jhudson8_react-mixin-manager
train
6465c2aac3efc2e49349578836e2917812fc4350
diff --git a/pkg/datapath/loader/cache.go b/pkg/datapath/loader/cache.go index <HASH>..<HASH> 100644 --- a/pkg/datapath/loader/cache.go +++ b/pkg/datapath/loader/cache.go @@ -276,25 +276,34 @@ func (o *objectCache) fetchOrCompile(ctx context.Context, cfg datapath.EndpointC }() } + scopedLog := log.WithField(logfields.BPFHeaderfileHash, hash) + // Serializes attempts to compile this cfg. fq, compiled := o.serialize(hash) if !compiled { fq.Enqueue(func() error { defer fq.Stop() templateCfg := wrap(cfg, stats) - return o.build(ctx, templateCfg, hash) + err := o.build(ctx, templateCfg, hash) + if err != nil { + scopedLog.WithError(err).Error("BPF template object creation failed") + } + return err }, serializer.NoRetry) } // Wait until the build completes. if err = fq.Wait(ctx); err != nil { + scopedLog.WithError(err).Warning("Error while waiting for BPF template compilation") return "", false, fmt.Errorf("BPF template compilation failed: %s", err) } // Fetch the result of the compilation. path, ok := o.lookup(hash) if !ok { - return "", false, fmt.Errorf("BPF template compilation unsuccessful") + err := fmt.Errorf("Could not locate previously compiled BPF template") + scopedLog.WithError(err).Warning("BPF template compilation unsuccessful") + return "", false, err } return path, !compiled, nil
loader: Improve logging of template build failures
cilium_cilium
train
036ec16e33022d6d5e1b557d0a1d9414025ff888
diff --git a/livelossplot/main_logger.py b/livelossplot/main_logger.py index <HASH>..<HASH> 100644 --- a/livelossplot/main_logger.py +++ b/livelossplot/main_logger.py @@ -25,6 +25,16 @@ class MainLogger: auto_generate_groups_if_not_available: bool = True, auto_generate_metric_to_name: bool = True ): + """ + :param groups - dictionary with grouped metrics for example one group can contains + one metric in different stages for example Validation, Training etc.: + :param group_patterns - you can put there regular expressions to match a few metric names with group: + :param metric_to_name - transformation of metric name which can be used to display name: + :param current_step - current step of the train engine: + :param auto_generate_groups_if_not_available - flag, that enable auto-creation of metric groups: + :param auto_generate_metric_to_name - flag, that enable auto-creation of metric long names, + based on common shortcuts: + """ self.log_history = {} self.groups = groups self.group_patterns = group_patterns
Doctring in main logger init
stared_livelossplot
train
87a313c77e3c73a24ac932ecaf577ff149477082
diff --git a/lib/discourse/oneboxer/gist_onebox.rb b/lib/discourse/oneboxer/gist_onebox.rb index <HASH>..<HASH> 100644 --- a/lib/discourse/oneboxer/gist_onebox.rb +++ b/lib/discourse/oneboxer/gist_onebox.rb @@ -1,29 +1,31 @@ require_dependency 'oneboxer/handlebars_onebox' -module Oneboxer - class GistOnebox < HandlebarsOnebox +module Discourse + module Oneboxer + class GistOnebox < HandlebarsOnebox - matcher /^https?:\/\/gist\.github\.com/ - favicon 'github.png' + matcher /^https?:\/\/gist\.github\.com/ + favicon 'github.png' - def translate_url - m = @url.match(/gist\.github\.com\/([^\/]+\/)?(?<id>[0-9a-f]+)/mi) - return "https://api.github.com/gists/#{m[:id]}" if m - end - - def parse(data) - parsed = JSON.parse(data) - desc = parsed['description'] - if desc.length > 120 - desc = desc[0..120] - desc << "..." + def translate_url + m = @url.match(/gist\.github\.com\/([^\/]+\/)?(?<id>[0-9a-f]+)/mi) + return "https://api.github.com/gists/#{m[:id]}" if m end - result = {files: [], title: desc} - parsed['files'].each do |filename, attrs| - result[:files] << {filename: filename}.merge!(attrs) + + def parse(data) + parsed = JSON.parse(data) + desc = parsed['description'] + if desc.length > 120 + desc = desc[0..120] + desc << "..." + end + result = {files: [], title: desc} + parsed['files'].each do |filename, attrs| + result[:files] << {filename: filename}.merge!(attrs) + end + result end - result - end + end end -end +end \ No newline at end of file
namespacing gist oneboxer
discourse_onebox
train
dac27368bfc227df6954af16fcc111367a803c41
diff --git a/tests/Unit/Application/Model/OrderTest.php b/tests/Unit/Application/Model/OrderTest.php index <HASH>..<HASH> 100644 --- a/tests/Unit/Application/Model/OrderTest.php +++ b/tests/Unit/Application/Model/OrderTest.php @@ -10,6 +10,7 @@ use oxArticleHelper; use \oxdeliverylist; use oxEmailHelper; use \oxField; +use OxidEsales\Eshop\Application\Model\Basket; use OxidEsales\Eshop\Application\Model\Order; use OxidEsales\Eshop\Application\Model\Payment; use OxidEsales\Eshop\Application\Model\UserPayment; @@ -297,40 +298,86 @@ class OrderTest extends \OxidTestCase $this->assertNull($oOrder->validateDelivery($oBasket)); } - public function testValidatePayment() + public function testValidatePaymentWhenPaymentIsValid() { - $paymentModel = $this - ->getMockBuilder(Payment::class) - ->setMethods(['isValidPayment']) - ->getMock(); + $paymentModel = $this->getMock(Payment::class, ['isValidPayment']); + $paymentModel + ->method('isValidPayment') + ->willReturn(true); + + UtilsObject::setClassInstance(Payment::class, $paymentModel); + + $order = $this->getMock(Order::class, ['getPaymentType']); + $order + ->method('getPaymentType') + ->willReturn( + oxNew(UserPayment::class) + ); + + $paymentId = oxDb::getDb()->getOne('select oxid from oxpayments where oxactive = 1'); + $basket = $this->getMock(Basket::class, array("getPaymentId")); + $basket + ->method("getPaymentId") + ->willReturn($paymentId); + + $this->assertNull($order->validatePayment($basket)); + } + + public function testValidatePaymentWithWrongPaymentId() + { + $paymentModel = $this->getMock(Payment::class, ['isValidPayment']); $paymentModel ->method('isValidPayment') ->willReturn(true); UtilsObject::setClassInstance(Payment::class, $paymentModel); - $order = $this->getMockBuilder(Order::class) - ->setMethods(['getPaymentType']) - ->getMock(); + $order = $this->getMock(Order::class, ['getPaymentType']); $order ->method('getPaymentType') ->willReturn( oxNew(UserPayment::class) ); - // non existing payment - $oBasket = $this->getMock(\OxidEsales\Eshop\Application\Model\Basket::class, array("getPaymentId")); - $oBasket->method("getPaymentId")->will($this->returnValue("xxx")); + $basket = $this->getMock(Basket::class, array("getPaymentId")); + $basket + ->method("getPaymentId") + ->willReturn('wrongPaymentId'); - $this->assertEquals(oxOrder::ORDER_STATE_INVALIDPAYMENT, $order->validatePayment($oBasket)); + $this->assertEquals( + oxOrder::ORDER_STATE_INVALIDPAYMENT, + $order->validatePayment($basket) + ); + } + + public function testValidatePaymentWhenPaymentIsInvalid() + { + $paymentModel = $this->getMock(Payment::class, ['isValidPayment']); + $paymentModel + ->method('isValidPayment') + ->willReturn(false); + + UtilsObject::setClassInstance(Payment::class, $paymentModel); - // existing payment - $sPaymentId = oxDb::getDb()->getOne('select oxid from oxpayments where oxactive = 1'); - $oBasket = $this->getMock(\OxidEsales\Eshop\Application\Model\Basket::class, array("getPaymentId")); - $oBasket->method("getPaymentId")->will($this->returnValue($sPaymentId)); + $order = $this->getMock(Order::class, ['getPaymentType']); + $order + ->method('getPaymentType') + ->willReturn( + oxNew(UserPayment::class) + ); + + $paymentId = oxDb::getDb()->getOne('select oxid from oxpayments where oxactive = 1'); - $this->assertNull($order->validatePayment($oBasket)); + $basket = $this->getMock(Basket::class, array("getPaymentId")); + $basket + ->method("getPaymentId") + ->willReturn($paymentId); + + $this->assertEquals( + oxOrder::ORDER_STATE_INVALIDPAYMENT, + $order->validatePayment($basket) + ); } /**
OXDEV-<I> Update tests
OXID-eSales_oxideshop_ce
train
bbc7d7efc216ced3078e274724e08332df56de1f
diff --git a/memproxy.go b/memproxy.go index <HASH>..<HASH> 100644 --- a/memproxy.go +++ b/memproxy.go @@ -7,6 +7,7 @@ package main import "bufio" import "bytes" +import "crypto/rand" import "encoding/binary" import "errors" import "fmt" @@ -51,13 +52,30 @@ type Metadata struct { OrigFlags int32 NumChunks int32 ChunkSize int32 + Token [16]byte +} + +var tokens chan [16]byte +func genTokens() [16]byte { + for { + var retval [16]byte + rand.Read(retval[:]) + tokens <- retval + } } func init() { + // errors are values MISS = errors.New("Cache miss") + + // keep 1000 unique tokens around + tokens = make(chan [16]byte, 1000) } func main() { + // start generating unique keys immediately to avoid blocking + // reads from /dev/urandom during writes + go genTokens() server, err := net.Listen("tcp", ":11212")
generating unique tokens, adding to metadata struct
Netflix_rend
train
f695168a121d2d1410fd7f293e3c9250e7f96d40
diff --git a/components/table/header-cell.js b/components/table/header-cell.js index <HASH>..<HASH> 100644 --- a/components/table/header-cell.js +++ b/components/table/header-cell.js @@ -37,11 +37,11 @@ export default class HeaderCell extends PureComponent { iconGlyph = sortOrder ? sortedUpIcon : sortedDownIcon; } - const classes = classNames(className, { [style.headerCell]: true, [style.headerCellSortable]: this.sortable, - [style.headerCellSorted]: this.sorted + [style.headerCellSorted]: this.sorted, + [style.cellRight]: column.rightAlign }); return ( diff --git a/components/table/row.js b/components/table/row.js index <HASH>..<HASH> 100644 --- a/components/table/row.js +++ b/components/table/row.js @@ -1,6 +1,5 @@ /* eslint-disable react/jsx-max-props-per-line */ -import 'core-js/modules/es6.number.is-finite'; import React, {PureComponent, PropTypes} from 'react'; import classNames from 'classnames'; import {sortableHandle} from 'react-sortable-hoc'; @@ -106,7 +105,6 @@ class Row extends PureComponent { columns.map((column, key) => { const getValue = column.getValue || (() => item[column.id]); const value = getValue(item, column); - const rightAlign = column.align === 'right' || (!column.align && Number.isFinite(value)); /*let gap = 0; if (column.groupable) { @@ -117,7 +115,7 @@ class Row extends PureComponent { paddingLeft: `${gap + 10}px` };*/ - const cellClasses = classNames({[style.cellRight]: rightAlign}); + const cellClasses = classNames({[style.cellRight]: column.rightAlign}); cells.push(<Cell key={key} className={cellClasses}>{value}</Cell>); });
ring-table: align header cells like common cells #RG-<I> Fixed Former-commit-id: f<I>de3f<I>f<I>b3c<I>b8c<I>e3b5c6ff
JetBrains_ring-ui
train
6302c9f3ab57e3dc8142a36778b35307f9de1cb3
diff --git a/ginga/gtk3w/GtkHelp.py b/ginga/gtk3w/GtkHelp.py index <HASH>..<HASH> 100644 --- a/ginga/gtk3w/GtkHelp.py +++ b/ginga/gtk3w/GtkHelp.py @@ -7,6 +7,7 @@ import sys import os.path import math +import random from ginga.misc import Bunch, Callback from ginga.fonts import font_asst @@ -373,10 +374,20 @@ class MDIWidget(Gtk.Layout): subwin.add_callback('maximize', lambda *args: self.maximize_page(subwin)) subwin.add_callback('minimize', lambda *args: self.minimize_page(subwin)) - self.put(subwin.frame, self.cascade_offset, self.cascade_offset) + # pick a random spot to place the window initially + rect = self.get_allocation() + wd, ht = rect.width, rect.height + x = random.randint(self.cascade_offset, wd // 2) + y = random.randint(self.cascade_offset, ht // 2) - self.update_subwin_position(subwin) - self.update_subwin_size(subwin) + self.put(subwin.frame, x, y) + + # note: seem to need a slight delay to let the widget be mapped + # in order to accurately determine its position and size + #self.update_subwin_position(subwin) + #self.update_subwin_size(subwin) + GObject.timeout_add(1000, self.update_subwin_position, subwin) + GObject.timeout_add(1500, self.update_subwin_size, subwin) self._update_area_size() return subwin diff --git a/ginga/qtw/Widgets.py b/ginga/qtw/Widgets.py index <HASH>..<HASH> 100644 --- a/ginga/qtw/Widgets.py +++ b/ginga/qtw/Widgets.py @@ -1563,6 +1563,10 @@ class Toolbar(ContainerBase): def add_widget(self, child): self.add_ref(child) w = child.get_widget() + # in toolbars, generally don't want widgets to take up any more + # space than necessary + w.setSizePolicy(QtGui.QSizePolicy.Fixed, + QtGui.QSizePolicy.Fixed) self.widget.addWidget(w) self.make_callback('widget-added', child) diff --git a/ginga/rv/Control.py b/ginga/rv/Control.py index <HASH>..<HASH> 100644 --- a/ginga/rv/Control.py +++ b/ginga/rv/Control.py @@ -874,10 +874,24 @@ class GingaShell(GwMain.GwMain, Widgets.Application): self.next_channel_ws(ws) def add_channel_auto_ws(self, ws): - chpfx = self.settings.get('channel_prefix', "Image") - chpfx = ws.extdata.get('chpfx', chpfx) + chname = ws.extdata.w_chname.get_text().strip() + if len(chname) == 0: + # make up a channel name + chpfx = self.settings.get('channel_prefix', "Image") + chpfx = ws.extdata.get('chpfx', chpfx) + chname = self.make_channel_name(chpfx) + + try: + self.get_channel(chname) + # <-- channel name already in use + self.show_error( + "Channel name '%s' cannot be used, sorry." % (chname), + raisetab=True) + return + + except KeyError: + pass - chname = self.make_channel_name(chpfx) return self.add_channel(chname, workspace=ws.name) def add_channel_auto(self): @@ -1919,8 +1933,8 @@ class GingaShell(GwMain.GwMain, Widgets.Application): #b.share_settings.set_length(60) cbox = b.workspace_type - cbox.append_text("Grid") cbox.append_text("Tabs") + cbox.append_text("Grid") cbox.append_text("MDI") cbox.append_text("Stack") cbox.set_index(0) @@ -1939,7 +1953,7 @@ class GingaShell(GwMain.GwMain, Widgets.Application): b.channel_prefix.set_text(chpfx) spnbtn = b.num_channels spnbtn.set_limits(0, 36, incr_value=1) - spnbtn.set_value(4) + spnbtn.set_value(0) dialog = Widgets.Dialog(title="Add Workspace", flags=0, @@ -2140,6 +2154,12 @@ class GingaShell(GwMain.GwMain, Widgets.Application): tb.add_separator() + entry = Widgets.TextEntry() + entry.set_length(8) + entry.set_tooltip("Name for a new channel") + ws.extdata.w_chname = entry + btn = tb.add_widget(entry) + # add toolbar buttons adding and deleting channels iconpath = os.path.join(self.iconpath, "inbox_plus_48.png") btn = tb.add_action(None, iconpath=iconpath, iconsize=(24, 23)) @@ -2166,7 +2186,8 @@ class GingaShell(GwMain.GwMain, Widgets.Application): try: nb = self.ds.get_nb(wsname) # noqa self.show_error( - "Workspace name '%s' cannot be used, sorry." % (wsname)) + "Workspace name '%s' cannot be used, sorry." % (wsname), + raisetab=True) self.ds.remove_dialog(w) return
Add text box for naming new channels to workspace toolbar
ejeschke_ginga
train
c05fd138ebbc03b1b1eeed45f248a74da89a6b93
diff --git a/keanu-project/src/main/java/io/improbable/keanu/algorithms/mcmc/SamplingAlgorithm.java b/keanu-project/src/main/java/io/improbable/keanu/algorithms/mcmc/SamplingAlgorithm.java index <HASH>..<HASH> 100644 --- a/keanu-project/src/main/java/io/improbable/keanu/algorithms/mcmc/SamplingAlgorithm.java +++ b/keanu-project/src/main/java/io/improbable/keanu/algorithms/mcmc/SamplingAlgorithm.java @@ -8,20 +8,23 @@ import io.improbable.keanu.network.NetworkState; public interface SamplingAlgorithm { /** - * Same effect as a sample but the result isn't saved or returned. + * Move forward the state of the Sampling Algorithm by a single step. */ void step(); /** - * Takes a sample with the algorithm and saves it in the supplied map + * Takes a sample with the algorithm and saves it in the supplied map. Repeated calls to this function will return + * the same values without an intermediary call to 'step()' * * @param samples map to store sampled vertex values */ void sample(Map<Long, List<?>> samples); /** - * @return a network state that represents the value of vertices at the - * end of the algorithm step + * Takes a sample with the algorithm and returns the state of the network for that sample. Repeated calls to this + * function will return the same values without an intermediary call to 'step()' + * + * @return a network state that represents the current state of the algorithm. */ NetworkState sample(); }
(Hopefully) improving the Sampling Algorithm API documentation
improbable-research_keanu
train
af6a9f552c9b791d5bde2218a3e4c346a5ea86f1
diff --git a/lib/mongo/protocol/delete.rb b/lib/mongo/protocol/delete.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/protocol/delete.rb +++ b/lib/mongo/protocol/delete.rb @@ -56,7 +56,7 @@ module Mongo # # @since 2.1.0 def payload - { command_name: 'delete', database: @database, command_args: selector } + { command_name: 'delete', database: @database, command_args: selector, request_id: request_id } end private diff --git a/lib/mongo/protocol/get_more.rb b/lib/mongo/protocol/get_more.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/protocol/get_more.rb +++ b/lib/mongo/protocol/get_more.rb @@ -55,7 +55,8 @@ module Mongo { command_name: 'getmore', database: @database, - command_args: { cursor_id: cursor_id, number_to_return: number_to_return } + command_args: { cursor_id: cursor_id, number_to_return: number_to_return }, + request_id: request_id } end diff --git a/lib/mongo/protocol/insert.rb b/lib/mongo/protocol/insert.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/protocol/insert.rb +++ b/lib/mongo/protocol/insert.rb @@ -64,7 +64,7 @@ module Mongo # # @since 2.1.0 def payload - { command_name: 'insert', database: @database, command_args: documents } + { command_name: 'insert', database: @database, command_args: documents, request_id: request_id } end private diff --git a/lib/mongo/protocol/kill_cursors.rb b/lib/mongo/protocol/kill_cursors.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/protocol/kill_cursors.rb +++ b/lib/mongo/protocol/kill_cursors.rb @@ -44,7 +44,7 @@ module Mongo # # @since 2.1.0 def payload - { command_name: 'killcursors', database: nil, command_args: { cursor_ids: cursor_ids }} + { command_name: 'killcursors', command_args: { cursor_ids: cursor_ids }, request_id: request_id } end private diff --git a/lib/mongo/protocol/query.rb b/lib/mongo/protocol/query.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/protocol/query.rb +++ b/lib/mongo/protocol/query.rb @@ -83,7 +83,12 @@ module Mongo # # @since 2.1.0 def payload - { command_name: command_name, database: @database, command_args: arguments } + { + command_name: command_name, + database: @database, + command_args: arguments, + request_id: request_id + } end # If the message a command? diff --git a/lib/mongo/protocol/reply.rb b/lib/mongo/protocol/reply.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/protocol/reply.rb +++ b/lib/mongo/protocol/reply.rb @@ -47,7 +47,7 @@ module Mongo # # @since 2.1.0 def payload - { command_reply: documents } + { command_reply: documents, request_id: request_id, metadata: { cursor_id: cursor_id }} end private diff --git a/lib/mongo/protocol/update.rb b/lib/mongo/protocol/update.rb index <HASH>..<HASH> 100644 --- a/lib/mongo/protocol/update.rb +++ b/lib/mongo/protocol/update.rb @@ -68,7 +68,12 @@ module Mongo # # @since 2.1.0 def payload - { command_name: 'update', database: @database, command_args: { filter: selector, update: update }} + { + command_name: 'update', + database: @database, + command_args: { filter: selector, update: update }, + request_id: request_id + } end private
SPEC-<I>: Adding request_id and cursor_id into payloads
mongodb_mongo-ruby-driver
train
70ceaa975d5b12bb3e610dcf03d89ef603862872
diff --git a/p2p/net/swarm/swarm.go b/p2p/net/swarm/swarm.go index <HASH>..<HASH> 100644 --- a/p2p/net/swarm/swarm.go +++ b/p2p/net/swarm/swarm.go @@ -33,6 +33,7 @@ type Swarm struct { peers peer.Peerstore connh ConnHandler dsync dialsync + backf dialbackoff cg ctxgroup.ContextGroup } @@ -50,10 +51,10 @@ func NewSwarm(ctx context.Context, listenAddrs []ma.Multiaddr, } s := &Swarm{ - swarm: ps.NewSwarm(PSTransport), - local: local, - peers: peers, - cg: ctxgroup.WithContext(ctx), + swarm: ps.NewSwarm(PSTransport), + local: local, + peers: peers, + cg: ctxgroup.WithContext(ctx), } // configure Swarm diff --git a/p2p/net/swarm/swarm_dial.go b/p2p/net/swarm/swarm_dial.go index <HASH>..<HASH> 100644 --- a/p2p/net/swarm/swarm_dial.go +++ b/p2p/net/swarm/swarm_dial.go @@ -96,6 +96,71 @@ func (ds *dialsync) Unlock(dst peer.ID) { ds.lock.Unlock() } +// dialbackoff is a struct used to avoid over-dialing the same, dead peers. +// Whenever we totally time out on a peer (all three attempts), we add them +// to dialbackoff. Then, whenevers goroutines would _wait_ (dialsync), they +// check dialbackoff. If it's there, they don't wait and exit promptly with +// an error. (the single goroutine that is actually dialing continues to +// dial). If a dial is successful, the peer is removed from backoff. +// Example: +// +// for { +// if ok, wait := dialsync.Lock(p); !ok { +// if backoff.Backoff(p) { +// return errDialFailed +// } +// <-wait +// continue +// } +// defer dialsync.Unlock(p) +// c, err := actuallyDial(p) +// if err != nil { +// dialbackoff.AddBackoff(p) +// continue +// } +// dialbackoff.Clear(p) +// } +// +type dialbackoff struct { + entries map[peer.ID]struct{} + lock sync.RWMutex +} + +func (db *dialbackoff) init() { + if db.entries == nil { + db.entries = make(map[peer.ID]struct{}) + } +} + +// Backoff returns whether the client should backoff from dialing +// peeer p +func (db *dialbackoff) Backoff(p peer.ID) bool { + db.lock.Lock() + db.init() + _, found := db.entries[p] + db.lock.Unlock() + return found +} + +// AddBackoff lets other nodes know that we've entered backoff with +// peer p, so dialers should not wait unnecessarily. We still will +// attempt to dial with one goroutine, in case we get through. +func (db *dialbackoff) AddBackoff(p peer.ID) { + db.lock.Lock() + db.init() + db.entries[p] = struct{}{} + db.lock.Unlock() +} + +// Clear removes a backoff record. Clients should call this after a +// successful Dial. +func (db *dialbackoff) Clear(p peer.ID) { + db.lock.Lock() + db.init() + delete(db.entries, p) + db.lock.Unlock() +} + // Dial connects to a peer. // // The idea is that the client of Swarm does not need to know what network @@ -103,6 +168,7 @@ func (ds *dialsync) Unlock(dst peer.ID) { // This allows us to use various transport protocols, do NAT traversal/relay, // etc. to achive connection. func (s *Swarm) Dial(ctx context.Context, p peer.ID) (*Conn, error) { + log := log.Prefix("swarm %s dialing %s", s.local, p) if p == s.local { return nil, errors.New("Attempted connection to self!") } @@ -126,7 +192,13 @@ func (s *Swarm) Dial(ctx context.Context, p peer.ID) (*Conn, error) { // check if there's an ongoing dial to this peer if ok, wait := s.dsync.Lock(p); !ok { - log.Debugf("swarm %s dialing %s -- waiting for ongoing dial", s.local, p) + + if s.backf.Backoff(p) { + log.Debugf("backoff") + return nil, fmt.Errorf("%s failed to dial %s, backing off.", s.local, p) + } + + log.Debugf("waiting for ongoing dial") select { case <-wait: // wait for that dial to finish. continue // and see if it worked (loop), OR we got an incoming dial. @@ -137,14 +209,17 @@ func (s *Swarm) Dial(ctx context.Context, p peer.ID) (*Conn, error) { // ok, we have been charged to dial! let's do it. // if it succeeds, dial will add the conn to the swarm itself. - log.Debugf("swarm %s dialing %s -- dial start", s.local, p) + log.Debugf("dial start") ctxT, _ := context.WithTimeout(ctx, DialTimeout) conn, err = s.dial(ctxT, p) s.dsync.Unlock(p) - log.Debugf("swarm %s dialing %s -- dial end %s", s.local, p, conn) + log.Debugf("dial end %s", conn) if err != nil { + s.backf.AddBackoff(p) // let others know to backoff + continue // ok, we failed. try again. (if loop is done, our error is output) } + s.backf.Clear(p) // okay, no longer need to backoff return conn, nil } if err == nil {
p2p/net/swarm: dial backoff This commit introduces a backoff when failing to dial peers. It makes everything much faster.
ipfs_go-ipfs
train
01ae4dcd37c2cfbc8f7e720402eb0af62aa3bca6
diff --git a/src/main/java/com/spotify/docker/client/DefaultDockerClient.java b/src/main/java/com/spotify/docker/client/DefaultDockerClient.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/spotify/docker/client/DefaultDockerClient.java +++ b/src/main/java/com/spotify/docker/client/DefaultDockerClient.java @@ -419,6 +419,10 @@ public class DefaultDockerClient implements DockerClient, Closeable { } else { this.registryAuthSupplier = builder.registryAuthSupplier; } + + if (builder.getRequestEntityProcessing() != null) { + config.property(ClientProperties.REQUEST_ENTITY_PROCESSING, builder.requestEntityProcessing); + } this.client = ClientBuilder.newBuilder() .withConfig(config) @@ -2855,6 +2859,7 @@ public class DefaultDockerClient implements DockerClient, Closeable { private RegistryAuth registryAuth; private RegistryAuthSupplier registryAuthSupplier; private Map<String, Object> headers = new HashMap<>(); + private RequestEntityProcessing requestEntityProcessing; public URI uri() { return uri; @@ -3037,6 +3042,27 @@ public class DefaultDockerClient implements DockerClient, Closeable { public Map<String, Object> headers() { return headers; } + + /** + * Allows setting transfer encoding. CHUNKED does not send the content-length header + * while BUFFERED does. + * + * <p>By default ApacheConnectorProvider uses CHUNKED mode. Some Docker API end-points + * seems to fail when no content-length is specified but a body is sent. + * + * @param requestEntityProcessing is the requested entity processing to use when calling docker + * daemon (tcp protocol). + * @return Builder + */ + public Builder useRequestEntityProcessing( + final RequestEntityProcessing requestEntityProcessing) { + this.requestEntityProcessing = requestEntityProcessing; + return this; + } + + public RequestEntityProcessing getRequestEntityProcessing() { + return this.requestEntityProcessing; + } public DefaultDockerClient build() { if (dockerAuth && registryAuthSupplier == null && registryAuth == null) { diff --git a/src/test/java/com/spotify/docker/client/DefaultDockerClientUnitTest.java b/src/test/java/com/spotify/docker/client/DefaultDockerClientUnitTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/spotify/docker/client/DefaultDockerClientUnitTest.java +++ b/src/test/java/com/spotify/docker/client/DefaultDockerClientUnitTest.java @@ -93,7 +93,10 @@ import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import okio.Buffer; + +import org.glassfish.jersey.client.RequestEntityProcessing; import org.glassfish.jersey.internal.util.Base64; +import org.hamcrest.core.IsNot; import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -1134,6 +1137,48 @@ public class DefaultDockerClientUnitTest { "baz", "qux" ))); } + + @Test + public void testBufferedRequestEntityProcessing() throws Exception { + builder.useRequestEntityProcessing(RequestEntityProcessing.BUFFERED); + final DefaultDockerClient dockerClient = new DefaultDockerClient(builder); + + final HostConfig hostConfig = HostConfig.builder().build(); + + final ContainerConfig containerConfig = ContainerConfig.builder() + .hostConfig(hostConfig) + .build(); + + server.enqueue(new MockResponse()); + + dockerClient.createContainer(containerConfig); + + final RecordedRequest recordedRequest = takeRequestImmediately(); + + assertThat(recordedRequest.getHeader("Content-Length"), notNullValue()); + assertThat(recordedRequest.getHeader("Transfer-Encoding"), nullValue()); + } + + @Test + public void testChunkedRequestEntityProcessing() throws Exception { + builder.useRequestEntityProcessing(RequestEntityProcessing.CHUNKED); + final DefaultDockerClient dockerClient = new DefaultDockerClient(builder); + + final HostConfig hostConfig = HostConfig.builder().build(); + + final ContainerConfig containerConfig = ContainerConfig.builder() + .hostConfig(hostConfig) + .build(); + + server.enqueue(new MockResponse()); + + dockerClient.createContainer(containerConfig); + + final RecordedRequest recordedRequest = takeRequestImmediately(); + + assertThat(recordedRequest.getHeader("Content-Length"), nullValue()); + assertThat(recordedRequest.getHeader("Transfer-Encoding"), is("chunked")); + } private void enqueueServerApiResponse(final int statusCode, final String fileName) throws IOException {
Allow setup DockerClientBuilder with RequestEntityProcessing.
spotify_docker-client
train
59cc32e012f7bb7f89fe4ffc9eeb81da398453d0
diff --git a/fancyimpute/MICE.py b/fancyimpute/MICE.py index <HASH>..<HASH> 100644 --- a/fancyimpute/MICE.py +++ b/fancyimpute/MICE.py @@ -38,7 +38,7 @@ class MICE(Solver): impute_type : str "row" means classic PMM, "col" (default) means fill in linear preds. - n_neighbors : int + n_pmm_neighbors : int Number of nearest neighbors for PMM, defaults to 5. model : predictor function @@ -64,7 +64,7 @@ class MICE(Solver): visit_sequence='monotone', # order in which we visit the columns n_imputations=100, n_burn_in=10, # this many replicates will be thrown away - n_neighbors=5, # number of nearest neighbors in PMM + n_pmm_neighbors=5, # number of nearest neighbors in PMM impute_type='col', # also can be pmm model=BayesianRidgeRegression(lambda_reg=0.001), add_ones=True, @@ -88,7 +88,7 @@ class MICE(Solver): "col" (default) means fill in with samples from posterior predictive distribution. - n_neighbors : int + n_pmm_neighbors : int Number of nearest neighbors for PMM, defaults to 5. model : predictor function @@ -111,7 +111,7 @@ class MICE(Solver): self.visit_sequence = visit_sequence self.n_imputations = n_imputations self.n_burn_in = n_burn_in - self.n_neighbors = n_neighbors + self.n_pmm_neighbors = n_pmm_neighbors self.impute_type = impute_type self.model = model self.add_ones = add_ones @@ -174,13 +174,11 @@ class MICE(Solver): # for each missing value, find its nearest neighbors in the observed values D = np.abs(col_preds_missing[:, np.newaxis] - col_preds_observed) # distances # take top k neighbors - k = np.minimum(self.n_neighbors, len(col_preds_observed) - 1) - + k = np.minimum(self.n_pmm_neighbors, len(col_preds_observed) - 1) NN = np.argpartition(D, k, 1)[:, :k] # <- bottleneck! - # pick one of the 5 nearest neighbors at random! that's right! - # not even an average + # pick one of the nearest neighbors at random! that's right! NN_sampled = [np.random.choice(NN_row) for NN_row in NN] - # set the missing values to be the values of the nearest + # set the missing values to be the values of the nearest # neighbor in the output space X_filled[missing_mask_col, col_idx] = \ X_filled[observed_row_mask_for_col, col_idx][NN_sampled] diff --git a/fancyimpute/bayesian_ridge_regression.py b/fancyimpute/bayesian_ridge_regression.py index <HASH>..<HASH> 100644 --- a/fancyimpute/bayesian_ridge_regression.py +++ b/fancyimpute/bayesian_ridge_regression.py @@ -19,8 +19,38 @@ class BayesianRidgeRegression(object): """ Bayesian Ridge Regression + Parameters + ---------- + lambda_reg : float + Ridge regularization parameter. + Default is 0.001. + + add_ones : boolean + Whether to add a constant column of ones. + Default is False. + + normalize_lambda : boolean + Default is True. + This variant multiplies lambda_reg by + np.linalg.norm(np.dot(X.T,X)) """ - def __init__(self, lambda_reg=1e-5,add_ones=False,normalize_lambda=True): + def __init__(self, lambda_reg=0.001,add_ones=False,normalize_lambda=True): + ''' + Parameters + ---------- + lambda_reg : float + Ridge regularization parameter. + Default is 0.001. + + add_ones : boolean + Whether to add a constant column of ones. + Default is False. + + normalize_lambda : boolean + Default is True. + This variant multiplies lambda_reg by + np.linalg.norm(np.dot(X.T,X)) + ''' self.lambda_reg = lambda_reg self.add_ones = add_ones self.normalize_lambda = normalize_lambda
documentation for bayesian ridge regression
iskandr_fancyimpute
train
823242d9a82b6ad0ca4c6ffa24da26fe06a65d6a
diff --git a/pkg/kubelet/network/cni/cni_test.go b/pkg/kubelet/network/cni/cni_test.go index <HASH>..<HASH> 100644 --- a/pkg/kubelet/network/cni/cni_test.go +++ b/pkg/kubelet/network/cni/cni_test.go @@ -74,6 +74,8 @@ func installPluginUnderTest(t *testing.T, vendorName string, plugName string) { const execScriptTempl = `#!/bin/bash read ignore +env > {{.OutputEnv}} +echo "%@" >> {{.OutputEnv}} export $(echo ${CNI_ARGS} | sed 's/;/ /g') &> /dev/null mkdir -p {{.OutputDir}} &> /dev/null echo -n "$CNI_COMMAND $CNI_NETNS $K8S_POD_NAMESPACE $K8S_POD_NAME $K8S_POD_INFRA_CONTAINER_ID" >& {{.OutputFile}} @@ -81,6 +83,7 @@ echo -n "{ \"ip4\": { \"ip\": \"10.1.0.23/24\" } }" ` execTemplateData := &map[string]interface{}{ "OutputFile": path.Join(pluginDir, plugName+".out"), + "OutputEnv": path.Join(pluginDir, plugName+".env"), "OutputDir": pluginDir, } @@ -179,7 +182,13 @@ func TestCNIPlugin(t *testing.T) { if err != nil { t.Errorf("Expected nil: %v", err) } - output, err := ioutil.ReadFile(path.Join(testNetworkConfigPath, pluginName, pluginName+".out")) + outputEnv := path.Join(testNetworkConfigPath, pluginName, pluginName+".env") + eo, eerr := ioutil.ReadFile(outputEnv) + outputFile := path.Join(testNetworkConfigPath, pluginName, pluginName+".out") + output, err := ioutil.ReadFile(outputFile) + if err != nil { + t.Errorf("Failed to read output file %s: %v (env %s err %v)", outputFile, err, eo, eerr) + } expectedOutput := "ADD /proc/12345/ns/net podNamespace podName dockerid2345" if string(output) != expectedOutput { t.Errorf("Mismatch in expected output for setup hook. Expected '%s', got '%s'", expectedOutput, string(output))
Add some more debugging to the CNI testcase Let's attempt to find out why it's failing some small percentage of the time.
kubernetes_kubernetes
train
bddeed6fc4726819c3a0ba268e98c87d47672606
diff --git a/cmd/flags.go b/cmd/flags.go index <HASH>..<HASH> 100644 --- a/cmd/flags.go +++ b/cmd/flags.go @@ -38,7 +38,7 @@ func (f *largeFilesFlag) Set(value string) error { } var ( - sizeMax = flag.Int("file_limit", 128*1024, "maximum file size") + sizeMax = flag.Int("file_limit", 2<<20, "maximum file size") trigramMax = flag.Int("max_trigram_count", 20000, "maximum number of trigrams per document") shardLimit = flag.Int("shard_limit", 100<<20, "maximum corpus size for a shard") parallelism = flag.Int("parallelism", 4, "maximum number of parallel indexing processes.") diff --git a/toc.go b/toc.go index <HASH>..<HASH> 100644 --- a/toc.go +++ b/toc.go @@ -38,7 +38,8 @@ const IndexFormatVersion = 15 // 6: Include '#' into the LineFragment template // 7: Record skip reasons in the index. // 8: Record source path in the index. -const FeatureVersion = 8 +// 9: Bump default max file size. +const FeatureVersion = 9 type indexTOC struct { fileContents compoundSection
Bump the file size limit to 2 MB The largest .cc file in the chromium tree is <I> bytes Fixes #<I>. Change-Id: I4c<I>ae1a<I>dc<I>fb<I>a<I>dd
google_zoekt
train
f19964435a9b5a75a22f315eb02452ec043631eb
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,6 +1,11 @@ +class Validator { + constructor (source) { -function makeJ2119Validator (assertionsSource) { - return null -} + } -module.exports = makeJ2119Validator + validate (jsonSource) { + return ['fart'] + } // validator +} // class Validator + +module.exports = source => new Validator(source) diff --git a/test/validator_test.js b/test/validator_test.js index <HASH>..<HASH> 100644 --- a/test/validator_test.js +++ b/test/validator_test.js @@ -4,54 +4,51 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -/* -GOOD = '{ ' + - ' "StartAt": "x", ' + - ' "States": {' + - ' "x": {' + - ' "Type": "Pass",' + - ' "End": true ' + - ' }' + - ' } ' + - '}' - -SCHEMA = File.dirname(__FILE__) + '/../data/AWL.j2119' -*/ +const validator = require('../lib') + +const GOOD = '{ ' + + ' "StartAt": "x", ' + + ' "States": {' + + ' "x": {' + + ' "Type": "Pass",' + + ' "End": true ' + + ' }' + + ' } ' + + '}' + +const SCHEMA = 'data/AWL.j2119' describe('J2119 Validator', () => { - it('do this', () => fail()) - /* - it 'should accept parsed JSON' do - v = J2119::Validator.new SCHEMA - j = JSON.parse GOOD - p = v.validate j - expect(p.empty?).to be true - end - - it 'should accept JSON text' do - v = J2119::Validator.new SCHEMA - p = v.validate GOOD - expect(p.empty?).to be true - end - - it 'should read a JSON file' do - v = J2119::Validator.new SCHEMA - fn = "/tmp/#{$$}.tjf" - f = File.open(fn, "w") - f.write GOOD - f.close - - p = v.validate fn - File.delete fn - expect(p.empty?).to be true - end - - it 'should produce some sort of sane message with bad JSON' do - v = J2119::Validator.new SCHEMA - p = v.validate GOOD + 'x' - expect(p.size).to eq(1) - end - - end + it('should accept parsed JSON', () => { + const v = validator(SCHEMA) + const j = JSON.parse(GOOD) + const p = v.validate(j) + expect(p.length).to.eql(0) + }) + + it('should accept JSON text', () => { + const v = validator(SCHEMA) + const p = v.validate(GOOD) + expect(p.length).to.eql(0) + }) + + xit('should read a JSON file', () => { + /* + v = J2119::Validator.new SCHEMA + fn = "/tmp/#{$$}.tjf" + f = File.open(fn, "w") + f.write GOOD + f.close + + p = v.validate fn + File.delete fn + expect(p.empty?).to be true */ + }) + + it('should produce some sort of sane message with bad JSON', () => { + const v = validator(SCHEMA) + const p = v.validate(GOOD + 'x') + expect(p.length).to.eql(1) + }) }) \ No newline at end of file
test: Validator tests now compile and fail
wmfs_j2119
train
c7efd265a7539a2ffeb4a6fa3e07207747c9d888
diff --git a/lib/rdl/info.rb b/lib/rdl/info.rb index <HASH>..<HASH> 100644 --- a/lib/rdl/info.rb +++ b/lib/rdl/info.rb @@ -23,7 +23,7 @@ class RDL::Info # if no prev info for kind, set to val and return true # if prev info for kind, return true if prev == val and false otherwise def set(klass, label, kind, val) - klass = klass.to_s + klass = RDL::Util.to_klass(klass) label = label.to_sym @info[klass] = {} unless @info[klass] @info[klass][label] = {} unless @info[klass][label] diff --git a/lib/rdl/util.rb b/lib/rdl/util.rb index <HASH>..<HASH> 100644 --- a/lib/rdl/util.rb +++ b/lib/rdl/util.rb @@ -15,6 +15,15 @@ class RDL::Util return c end + def self.to_klass(cls) + cls_str = cls.to_s + if cls_str.start_with? '#<Class:' + cls_str = cls_str.split('(')[0] + '>' if cls_str['('] + cls_str = RDL::Util.add_singleton_marker(cls_str[8..-2]) + end + cls_str + end + def self.has_singleton_marker(klass) return (klass =~ /^#{SINGLETON_MARKER_REGEXP}/) end
set Singleton class info on the simplified class string (without singleton class id or rails model parameters)
plum-umd_rdl
train
9f9fa69ad5a08ad138ec6aa334bf600aa4ca5eb0
diff --git a/extensions/wtf-injector-chrome/debugger.js b/extensions/wtf-injector-chrome/debugger.js index <HASH>..<HASH> 100644 --- a/extensions/wtf-injector-chrome/debugger.js +++ b/extensions/wtf-injector-chrome/debugger.js @@ -257,9 +257,7 @@ Debugger.prototype.beginListening_ = function() { chrome.debugger.sendCommand(this.debugee_, 'Timeline.start', { // Limit call stack depth to keep messages small - if we ever need this // data this can be increased. - // BUG: values of 0 are ignored: - // https://code.google.com/p/chromium/issues/detail?id=232008 - 'maxCallStackDepth': 1 + 'maxCallStackDepth': 0 }); }
maxCallStackDepth in timeline can now be 0 in all Chrome releases.
google_tracing-framework
train
205cc0e4878d71f8d193abbd4ccf3692f0b41154
diff --git a/txdbus/marshal.py b/txdbus/marshal.py index <HASH>..<HASH> 100644 --- a/txdbus/marshal.py +++ b/txdbus/marshal.py @@ -244,6 +244,7 @@ def sigFromPy( pobj ): elif isinstance(pobj, six.integer_types): return 'x' elif isinstance(pobj, float): return 'd' elif isinstance(pobj, six.string_types): return 's' + elif isinstance(pobj, bytearray): return 'ay' elif isinstance(pobj, list): vtype = type(pobj[0]) @@ -474,12 +475,12 @@ def marshal_array( ct, var, start_byte, lendian ): start_byte += len(initial_padding) chunks.append( initial_padding ) - if isinstance(var, (list, tuple)): + if isinstance(var, (list, tuple, bytearray)): arr_list = var elif isinstance(var, dict): arr_list = [ tpl for tpl in six.iteritems(var) ] else: - raise MarshallingError('List, Tuple, or Dictionary required for DBus array. Received: ' + repr(var)) + raise MarshallingError('List, Tuple, Bytearray, or Dictionary required for DBus array. Received: ' + repr(var)) for item in arr_list: diff --git a/txdbus/test/test_marshal.py b/txdbus/test/test_marshal.py index <HASH>..<HASH> 100644 --- a/txdbus/test/test_marshal.py +++ b/txdbus/test/test_marshal.py @@ -44,6 +44,9 @@ class SigFromPyTests(unittest.TestCase): def test_list(self): self.t([1],'ai') + def test_bytearray(self): + self.t(bytearray('\xAA\xAA'), 'ay') + def test_list_multiple_elements_same_type(self): self.t([1,2],'ai') @@ -222,6 +225,9 @@ class TestArrayMarshal(TestMarshal): def test_byte(self): self.check('ay', [[1,2,3,4]], pack('iBBBB', 4, 1,2,3,4)) + def test_byte_bytearray(self): + self.check('ay', bytearray('\xaa\xaa'), pack('iBB', 2, 170, 170)) + def test_string(self): self.check('as', [['x', 'foo']], pack('ii2sxxi4s', 16, 1, b'x', 3, b'foo')) @@ -259,6 +265,9 @@ class TestVariantMarshal(TestMarshal): self.check('v', [S()], pack('B5sxxii', 4, b'(ii)', 1,2)) + def test_bytearray(self): + self.check('v', bytearray('\xAA\xAA'), pack('B2siBB', 2, 'ay', 2, 170, 170)) + #------------------------------------------------------------------------------- # Unmarshalling
Add support for bytearrays with array types. Also allow for the bytearrays in variant types.
cocagne_txdbus
train
bfa13f2404c8c43f5cb33677fb438d23deb63929
diff --git a/_config.php b/_config.php index <HASH>..<HASH> 100755 --- a/_config.php +++ b/_config.php @@ -69,7 +69,4 @@ PasswordEncryptor::register('none', 'PasswordEncryptor_None'); PasswordEncryptor::register('md5', 'PasswordEncryptor_LegacyPHPHash("md5")'); PasswordEncryptor::register('sha1','PasswordEncryptor_LegacyPHPHash("sha1")'); PasswordEncryptor::register('md5_v2.4', 'PasswordEncryptor_PHPHash("md5")'); -PasswordEncryptor::register('sha1_v2.4','PasswordEncryptor_PHPHash("sha1")'); - -// Set the default folder for where combined JS/CSS files should live -Requirements::set_combined_files_folder('assets/.combinedfiles'); \ No newline at end of file +PasswordEncryptor::register('sha1_v2.4','PasswordEncryptor_PHPHash("sha1")'); \ No newline at end of file
MINOR Reverted default location for combined JS/CSS as you can't customise this easily git-svn-id: svn://svn.silverstripe.com/silverstripe/open/modules/sapphire/branches/<I>@<I> <I>b<I>ca-7a2a-<I>-9d3b-<I>d<I>a<I>a9
silverstripe_silverstripe-framework
train
1c1ba621402571451a191eeb03c5f6dffb05b6eb
diff --git a/python/dllib/src/test/bigdl/nnframes/test_nn_image_reader.py b/python/dllib/src/test/bigdl/nnframes/test_nn_image_reader.py index <HASH>..<HASH> 100644 --- a/python/dllib/src/test/bigdl/nnframes/test_nn_image_reader.py +++ b/python/dllib/src/test/bigdl/nnframes/test_nn_image_reader.py @@ -53,7 +53,7 @@ class TestNNImageReader(): assert first_row[2] == 500 assert first_row[3] == 3 assert first_row[4] == 16 - assert len(first_row[5]) == 95959 + assert len(first_row[5]) == 562500 def test_read_image_withOriginColumn(self): image_path = os.path.join(self.resource_path, "pascal/000025.jpg")
replace bytes with mat in nnframes (#<I>) * replace bytes with mat in nnframes * scala style * ut fix
intel-analytics_BigDL
train
4e872b96b97433ad6a05a03903d592082e49e2ef
diff --git a/jax/lax/lax.py b/jax/lax/lax.py index <HASH>..<HASH> 100644 --- a/jax/lax/lax.py +++ b/jax/lax/lax.py @@ -1164,7 +1164,7 @@ def index_in_dim(operand, index, axis=0, keepdims=True): def dynamic_slice_in_dim(operand, start_index, slice_size, axis=0): """Convenience wrapper around dynamic_slice applying to one dimension.""" - start_indices = [onp.array([0])] * operand.ndim + start_indices = [onp.array([0], dtype=start_index.dtype)] * operand.ndim slice_sizes = list(operand.shape) axis = int(axis) diff --git a/tests/lax_numpy_indexing_test.py b/tests/lax_numpy_indexing_test.py index <HASH>..<HASH> 100644 --- a/tests/lax_numpy_indexing_test.py +++ b/tests/lax_numpy_indexing_test.py @@ -365,6 +365,9 @@ MIXED_ADVANCED_INDEXING_TESTS = [ None, onp.array([-1, 2]))), ]), + ("IntArrayWithInt32Type", + [IndexSpec(shape=(3, 4), indexer=(Ellipsis, onp.array(1, dtype=onp.int32))) + ]), ] class IndexingTest(jtu.JaxTestCase):
Fix type mismatch with int<I>-type indices under a jit with <I>-bit types enabled.
tensorflow_probability
train
869499261230265cda48fadf24419b20710f8af9
diff --git a/java/com/couchbase/cblite/testapp/tests/Collation.java b/java/com/couchbase/cblite/testapp/tests/Collation.java index <HASH>..<HASH> 100644 --- a/java/com/couchbase/cblite/testapp/tests/Collation.java +++ b/java/com/couchbase/cblite/testapp/tests/Collation.java @@ -101,8 +101,8 @@ public class Collation extends AndroidTestCase { public void testCollateUnicodeStrings() { int mode = kTDCollateJSON_Unicode; - Assert.assertEquals(0, TDCollateJSON.testCollateJSON(mode, 0, encode("fr�d"), 0, encode("fr�d"))); - Assert.assertEquals(1, TDCollateJSON.testCollateJSON(mode, 0, encode("�m�"), 0, encode("omo"))); + Assert.assertEquals(0, TDCollateJSON.testCollateJSON(mode, 0, encode("fr�d"), 0, encode("fr�d"))); + // Assert.assertEquals(1, TDCollateJSON.testCollateJSON(mode, 0, encode("�m�"), 0, encode("omo"))); Assert.assertEquals(-1, TDCollateJSON.testCollateJSON(mode, 0, encode("\t"), 0, encode(" "))); Assert.assertEquals(-1, TDCollateJSON.testCollateJSON(mode, 0, encode("\001"), 0, encode(" ")));
Comment out unicode collation test that is failing sporadically.
couchbase_couchbase-lite-android
train
e3e2767374d590426ed1a16fb62f8aab8556cbcc
diff --git a/tensorflow_probability/python/mcmc/random_walk_metropolis.py b/tensorflow_probability/python/mcmc/random_walk_metropolis.py index <HASH>..<HASH> 100644 --- a/tensorflow_probability/python/mcmc/random_walk_metropolis.py +++ b/tensorflow_probability/python/mcmc/random_walk_metropolis.py @@ -347,7 +347,7 @@ class RandomWalkMetropolis(kernel_base.TransitionKernel): def __init__(self, target_log_prob_fn, - new_state_fn=random_walk_normal_fn(), + new_state_fn=None, seed=None, name=None): """Initializes this transition kernel. @@ -360,7 +360,8 @@ class RandomWalkMetropolis(kernel_base.TransitionKernel): seed; returns a same-type `list` of `Tensor`s, each being a perturbation of the input state parts. The perturbation distribution is assumed to be a symmetric distribution centered at the input state part. - Default value: `tfp.mcmc.random_walk_normal_fn()`. + Default value: `None` which is mapped to + `tfp.mcmc.random_walk_normal_fn()`. seed: Python integer to seed the random number generator. name: Python `str` name prefixed to Ops created by this function. Default value: `None` (i.e., 'rwm_kernel'). @@ -376,6 +377,9 @@ class RandomWalkMetropolis(kernel_base.TransitionKernel): ValueError: if there isn't one `scale` or a list with same length as `current_state`. """ + if new_state_fn is None: + new_state_fn = random_walk_normal_fn() + self._impl = metropolis_hastings.MetropolisHastings( inner_kernel=UncalibratedRandomWalk( target_log_prob_fn=target_log_prob_fn, @@ -453,9 +457,12 @@ class UncalibratedRandomWalk(kernel_base.TransitionKernel): @mcmc_util.set_doc(RandomWalkMetropolis.__init__.__doc__) def __init__(self, target_log_prob_fn, - new_state_fn=random_walk_normal_fn(), + new_state_fn=None, seed=None, name=None): + if new_state_fn is None: + new_state_fn = random_walk_normal_fn() + self._target_log_prob_fn = target_log_prob_fn self._seed_stream = distributions.SeedStream( seed, salt='RandomWalkMetropolis') @@ -505,10 +512,10 @@ class UncalibratedRandomWalk(kernel_base.TransitionKernel): current_state_parts = [tf.convert_to_tensor(s, name='current_state') for s in current_state_parts] - new_state_fn = self.new_state_fn - next_state_parts = new_state_fn(current_state_parts, self._seed_stream()) + next_state_parts = self.new_state_fn(current_state_parts, # pylint: disable=not-callable + self._seed_stream()) # Compute `target_log_prob` so its available to MetropolisHastings. - next_target_log_prob = self.target_log_prob_fn(*next_state_parts) + next_target_log_prob = self.target_log_prob_fn(*next_state_parts) # pylint: disable=not-callable def maybe_flatten(x): return x if mcmc_util.is_list_like(current_state) else x[0] @@ -529,7 +536,7 @@ class UncalibratedRandomWalk(kernel_base.TransitionKernel): if not mcmc_util.is_list_like(init_state): init_state = [init_state] init_state = [tf.convert_to_tensor(x) for x in init_state] - init_target_log_prob = self.target_log_prob_fn(*init_state) + init_target_log_prob = self.target_log_prob_fn(*init_state) # pylint:disable=not-callable return UncalibratedRandomWalkResults( log_acceptance_correction=tf.zeros_like(init_target_log_prob), target_log_prob=init_target_log_prob)
In RandomWalkMetropolis set the default value for new_state_fn parameter to None. This allows the user to explicitly supply None and still get the default behavior. PiperOrigin-RevId: <I>
tensorflow_probability
train
e2c8d6139996af981f496ca23373f28595d36bb7
diff --git a/core/block_horizontal_scratch.js b/core/block_horizontal_scratch.js index <HASH>..<HASH> 100644 --- a/core/block_horizontal_scratch.js +++ b/core/block_horizontal_scratch.js @@ -793,7 +793,7 @@ Blockly.BlockSvg.TAB_WIDTH = 8; * Width of vertical tab (inc left margin). * @const */ -Blockly.BlockSvg.NOTCH_HEIGHT = 32; +Blockly.BlockSvg.NOTCH_BASE_HEIGHT = 32; /** * Rounded corner radius. * @const @@ -805,15 +805,47 @@ Blockly.BlockSvg.CORNER_RADIUS = 4; */ Blockly.BlockSvg.HAT_CORNER_RADIUS = 16; /** - * SVG path for drawing next/previous notch from left to right. + * Rounded notch radius. * @const */ -Blockly.BlockSvg.NOTCH_PATH_DOWN = 'l 8,8 0,16 -8,8'; +Blockly.BlockSvg.NOTCH_RADIUS = 1; +Blockly.BlockSvg.NOTCH_HEIGHT = Blockly.BlockSvg.NOTCH_BASE_HEIGHT + Blockly.BlockSvg.NOTCH_RADIUS; /** - * SVG path for drawing next/previous notch from right to left. + * SVG path for drawing next/previous notch from top to bottom. * @const */ -Blockly.BlockSvg.NOTCH_PATH_UP = 'l 8,-8 0,-16 -8,-8'; +Blockly.BlockSvg.NOTCH_PATH_DOWN = + 'a ' + Blockly.BlockSvg.NOTCH_RADIUS + ',' + 2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '-45 0 0 ' + Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + 'l ' + (Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 - Blockly.BlockSvg.NOTCH_RADIUS) + ',' + + (Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 - Blockly.BlockSvg.NOTCH_RADIUS) + ' ' + + 'a ' + Blockly.BlockSvg.NOTCH_RADIUS + ',' + 2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '-45 0 1 ' + Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + 'v ' + (Blockly.BlockSvg.NOTCH_BASE_HEIGHT/2 - Blockly.BlockSvg.NOTCH_RADIUS) + ' ' + + 'a ' + Blockly.BlockSvg.NOTCH_RADIUS + ',' + 2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '45 0 1 ' + -1*Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + 'l ' + (-1*Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 + Blockly.BlockSvg.NOTCH_RADIUS) + ',' + + (1*Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 - Blockly.BlockSvg.NOTCH_RADIUS) + ' ' + + 'a ' + Blockly.BlockSvg.NOTCH_RADIUS + ',' + 2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '45 0 0 ' + -Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + Blockly.BlockSvg.NOTCH_RADIUS; +/** + * SVG path for drawing next/previous notch from bottom to top. + * @const + */ +Blockly.BlockSvg.NOTCH_PATH_UP = + 'a ' + Blockly.BlockSvg.NOTCH_RADIUS + ',' + -2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '45 0 1 ' + Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + -Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + 'l ' + (Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 - Blockly.BlockSvg.NOTCH_RADIUS) + ',' + + -1*(Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 - Blockly.BlockSvg.NOTCH_RADIUS) + ' ' + + 'a ' + Blockly.BlockSvg.NOTCH_RADIUS + ',' + -2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '45 0 0 ' + Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + -Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + 'v ' + -1*(Blockly.BlockSvg.NOTCH_BASE_HEIGHT/2 - Blockly.BlockSvg.NOTCH_RADIUS) + ' ' + + 'a ' + -1*Blockly.BlockSvg.NOTCH_RADIUS + ',' + -2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '-45 0 0 ' + -1*Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + -Blockly.BlockSvg.NOTCH_RADIUS + + 'l ' + (-1*Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 + Blockly.BlockSvg.NOTCH_RADIUS) + ',' + + (-1*Blockly.BlockSvg.NOTCH_BASE_HEIGHT/4 + Blockly.BlockSvg.NOTCH_RADIUS) + ' ' + + 'a ' + Blockly.BlockSvg.NOTCH_RADIUS + ',' + -2*Blockly.BlockSvg.NOTCH_RADIUS + ' ' + + '-45 0 1 ' + -Blockly.BlockSvg.NOTCH_RADIUS/2 + ',' + -Blockly.BlockSvg.NOTCH_RADIUS; /** * SVG path for drawing a horizontal puzzle tab from top to bottom. * @const
Round notch corners The rounding adds some height to the notch, so this is compensated by removing height from the notch "tab". For this reason the radius should stay relatively small.
LLK_scratch-blocks
train
99a37489b874dd705d4647c05ae6fb517367b58e
diff --git a/properties/base.py b/properties/base.py index <HASH>..<HASH> 100644 --- a/properties/base.py +++ b/properties/base.py @@ -146,8 +146,8 @@ class HasProperties(with_metaclass(PropertyMetaclass, object)): defaults = self._defaults or dict() for key, value in iteritems(defaults): if key not in self._props.keys(): - raise KeyError( - 'Default input "{:s}" is not a known property'.format(key) + raise AttributeError( + "Default input '{:s}'' is not a known property".format(key) ) if callable(value): setattr(self, key, value()) @@ -157,8 +157,8 @@ class HasProperties(with_metaclass(PropertyMetaclass, object)): # set the keywords for key in kwargs: if not hasattr(self, key) and key not in self._props.keys(): - raise KeyError('Keyword input "{:s}" is not a known property ' - 'or attribute'.format(key)) + raise AttributeError("Keyword input '{:s}'' is not a known " + "property or attribute".format(key)) setattr(self, key, kwargs[key]) def _get(self, name): diff --git a/tests/test_basic.py b/tests/test_basic.py index <HASH>..<HASH> 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -30,7 +30,7 @@ class TestBasic(unittest.TestCase): with self.assertRaises(AttributeError): setattr(GettablePropOpt(), 'mygp', 5) - with self.assertRaises(KeyError): + with self.assertRaises(AttributeError): GettablePropOpt(not_mygp=0) assert GettablePropOpt().validate() diff --git a/tests/test_default.py b/tests/test_default.py index <HASH>..<HASH> 100644 --- a/tests/test_default.py +++ b/tests/test_default.py @@ -70,7 +70,7 @@ class TestDefault(unittest.TestCase): del(hi.c) assert hi.c == 100 - with self.assertRaises(KeyError): + with self.assertRaises(AttributeError): class HasIntCError(HasIntC): _defaults = {'z': 100} HasIntCError()
Change KeyErrors to AttributeErrors for more consistency with error types These occur when setting attributes from a dictionary, so both error types are reasonably valid.
seequent_properties
train
5cdc2dbf57253bd3c331d49399fe0835f1167893
diff --git a/lib/kafka-producer.js b/lib/kafka-producer.js index <HASH>..<HASH> 100644 --- a/lib/kafka-producer.js +++ b/lib/kafka-producer.js @@ -47,22 +47,24 @@ Producer.prototype.getProducer = Promise.method(function (opts) { * the message against the existing schemas. * * @param {kafka.Producer} producerInstance node-rdkafka instance. - * @param {string} topic Topic to produce on. * @param {kafka.Producer.Topic} kafkaTopic node-rdkafka Topic instance. * @param {number} partition The partition to produce on. * @param {Object} value The message. * @param {string|number} key The partioning key. */ -Producer.prototype._produceWrapper = function (producerInstance, topic, kafkaTopic, +Producer.prototype._produceWrapper = function (producerInstance, kafkaTopic, partition, value, key) { - if (!this.valueSchemas[topic]) { + var topicName = kafkaTopic.name(); + + if (!this.valueSchemas[topicName]) { // topic not found in schemas, bail early - console.log('KafkaAvro :: Warning, did not find topic on SR:', topic); + console.log('KafkaAvro :: Warning, did not find topic on SR:', topicName); var bufVal = new Buffer(JSON.stringify(value)); - return producerInstance.__kafkaAvro_produce(topic, partition, bufVal, key); + return producerInstance.__kafkaAvro_produce(kafkaTopic, partition, bufVal, key); } - var bufValue = this.valueSchemas[topic].toBuffer(value); - return producerInstance.__kafkaAvro_produce(topic, partition, bufValue, key); + var bufValue = this.valueSchemas[topicName].toBuffer(value); + + return producerInstance.__kafkaAvro_produce(kafkaTopic, partition, bufValue, key); };
change producer API, use topic.name()
waldophotos_kafka-avro
train
0f0c85b65bba9563316e8108ee8d80c8fdd33594
diff --git a/.size-limit b/.size-limit index <HASH>..<HASH> 100644 --- a/.size-limit +++ b/.size-limit @@ -19,6 +19,6 @@ "name": "The home page of the documentation", "path": ".next/bundles/pages/index.js", "webpack": false, - "limit": "3.4 KB" + "limit": "3.7 KB" } ] diff --git a/docs/src/modules/components/GitHub.js b/docs/src/modules/components/GitHub.js index <HASH>..<HASH> 100644 --- a/docs/src/modules/components/GitHub.js +++ b/docs/src/modules/components/GitHub.js @@ -6,7 +6,7 @@ import SvgIcon from 'material-ui/SvgIcon'; function GitHub(props) { return ( <SvgIcon {...props}> - <path d="M12.007 0C6.12 0 1.1 4.27.157 10.08c-.944 5.813 2.468 11.45 8.054 13.312.19.064.397.033.555-.084.16-.117.25-.304.244-.5v-2.042c-3.33.735-4.037-1.56-4.037-1.56-.22-.726-.694-1.35-1.334-1.756-1.096-.75.074-.735.074-.735.773.103 1.454.557 1.846 1.23.694 1.21 2.23 1.638 3.45.96.056-.61.327-1.178.766-1.605-2.67-.3-5.462-1.335-5.462-6.002-.02-1.193.42-2.35 1.23-3.226-.327-1.015-.27-2.116.166-3.09 0 0 1.006-.33 3.3 1.23 1.966-.538 4.04-.538 6.003 0 2.295-1.5 3.3-1.23 3.3-1.23.445 1.006.49 2.144.12 3.18.81.877 1.25 2.033 1.23 3.226 0 4.607-2.805 5.627-5.476 5.927.578.583.88 1.386.825 2.206v3.29c-.005.2.092.393.26.507.164.115.377.14.565.063 5.568-1.88 8.956-7.514 8.007-13.313C22.892 4.267 17.884.007 12.008 0z" /> + <path d="M12 .3a12 12 0 0 0-3.8 23.4c.6.1.8-.3.8-.6v-2c-3.3.7-4-1.6-4-1.6-.6-1.4-1.4-1.8-1.4-1.8-1-.7.1-.7.1-.7 1.2 0 1.9 1.2 1.9 1.2 1 1.8 2.8 1.3 3.5 1 0-.8.4-1.3.7-1.6-2.7-.3-5.5-1.3-5.5-6 0-1.2.5-2.3 1.3-3.1-.2-.4-.6-1.6 0-3.2 0 0 1-.3 3.4 1.2a11.5 11.5 0 0 1 6 0c2.3-1.5 3.3-1.2 3.3-1.2.6 1.6.2 2.8 0 3.2.9.8 1.3 1.9 1.3 3.2 0 4.6-2.8 5.6-5.5 5.9.5.4.9 1 .9 2.2v3.3c0 .3.1.7.8.6A12 12 0 0 0 12 .3" /> </SvgIcon> ); } diff --git a/docs/src/modules/styles/getPageContext.js b/docs/src/modules/styles/getPageContext.js index <HASH>..<HASH> 100644 --- a/docs/src/modules/styles/getPageContext.js +++ b/docs/src/modules/styles/getPageContext.js @@ -38,7 +38,9 @@ function createPageContext() { sheetsManager: new Map(), // This is needed in order to inject the critical CSS. sheetsRegistry: new SheetsRegistry(), - generateClassName: createGenerateClassName(), + generateClassName: createGenerateClassName({ + productionPrefix: 'j', // Reduce the bandwidth usage. + }), }; } diff --git a/pages/index.js b/pages/index.js index <HASH>..<HASH> 100644 --- a/pages/index.js +++ b/pages/index.js @@ -7,6 +7,7 @@ import Typography from 'material-ui/Typography'; import Button from 'material-ui/Button'; import withRoot from 'docs/src/modules/components/withRoot'; import AppFooter from 'docs/src/modules/components/AppFooter'; +import NoSSR from 'docs/src/modules/components/NoSSR'; import Link from 'docs/src/modules/components/Link'; import MarkdownElement from 'docs/src/modules/components/MarkdownElement'; @@ -58,6 +59,7 @@ const styles = theme => ({ padding: theme.spacing.unit * 2, display: 'flex', justifyContent: 'center', + minHeight: 600, }, backersBody: { maxWidth: theme.spacing.unit * 90, @@ -109,9 +111,10 @@ function PageHome(props) { </div> </div> <div className={classes.backers}> - <MarkdownElement - className={classes.backersBody} - text={` + <NoSSR> + <MarkdownElement + className={classes.backersBody} + text={` <h2 style="text-align: center;">Supporting Material-UI</h2> Material-UI is an MIT-licensed open source project. @@ -148,8 +151,9 @@ Gold Sponsors are those who have pledged $500/month and more to Material-UI. <a href="https://opencollective.com/material-ui/tiers/gold-sponsors/7/website" target="_blank" style="margin-right: 8px;"><img src="https://opencollective.com/material-ui/tiers/gold-sponsors/7/avatar.svg" alt="7" /></a> <a href="https://opencollective.com/material-ui/tiers/gold-sponsors/8/website" target="_blank" style="margin-right: 8px;"><img src="https://opencollective.com/material-ui/tiers/gold-sponsors/8/avatar.svg" alt="8" /></a> <a href="https://opencollective.com/material-ui/tiers/gold-sponsors/9/website" target="_blank" style="margin-right: 8px;"><img src="https://opencollective.com/material-ui/tiers/gold-sponsors/9/avatar.svg" alt="9" /></a> - `} - /> + `} + /> + </NoSSR> </div> <AppFooter /> </div>
[docs] Improve the performance of the homepage (#<I>)
mui-org_material-ui
train
e8d3df92f7a6f72475dcd028496be2c6923fcb3a
diff --git a/p2p/host/basic/basic_host.go b/p2p/host/basic/basic_host.go index <HASH>..<HASH> 100644 --- a/p2p/host/basic/basic_host.go +++ b/p2p/host/basic/basic_host.go @@ -605,6 +605,17 @@ func (h *BasicHost) RemoveStreamHandler(pid protocol.ID) { // to create one. If ProtocolID is "", writes no header. // (Threadsafe) func (h *BasicHost) NewStream(ctx context.Context, p peer.ID, pids ...protocol.ID) (network.Stream, error) { + // Ensure we have a connection, with peer addresses resolved by the routing system (#207) + // It is not sufficient to let the underlying host connect, it will most likely not have + // any addresses for the peer without any prior connections. + // If the caller wants to prevent the host from dialing, it should use the NoDial option. + if nodial, _ := network.GetNoDial(ctx); !nodial { + err := h.Connect(ctx, peer.AddrInfo{ID: p}) + if err != nil { + return nil, err + } + } + s, err := h.Network().NewStream(ctx, p) if err != nil { return nil, err diff --git a/p2p/host/basic/basic_host_test.go b/p2p/host/basic/basic_host_test.go index <HASH>..<HASH> 100644 --- a/p2p/host/basic/basic_host_test.go +++ b/p2p/host/basic/basic_host_test.go @@ -6,6 +6,7 @@ import ( "io" "io/ioutil" "reflect" + "strings" "sync" "testing" "time" @@ -439,6 +440,51 @@ func TestNewDialOld(t *testing.T) { require.Equal(t, s.Protocol(), protocol.ID("/testing"), "should have gotten /testing") } +func TestNewStreamResolve(t *testing.T) { + h1, err := NewHost(swarmt.GenSwarm(t), nil) + require.NoError(t, err) + h2, err := NewHost(swarmt.GenSwarm(t), nil) + require.NoError(t, err) + + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + + // Get the tcp port that h2 is listening on. + h2pi := h2.Peerstore().PeerInfo(h2.ID()) + var dialAddr string + const tcpPrefix = "/ip4/127.0.0.1/tcp/" + for _, addr := range h2pi.Addrs { + addrStr := addr.String() + if strings.HasPrefix(addrStr, tcpPrefix) { + port := addrStr[len(tcpPrefix):] + dialAddr = "/dns4/localhost/tcp/" + port + break + } + } + assert.NotEqual(t, dialAddr, "") + + // Add the DNS multiaddr to h1's peerstore. + maddr, err := ma.NewMultiaddr(dialAddr) + require.NoError(t, err) + h1.Peerstore().AddAddr(h2.ID(), maddr, time.Second) + + connectedOn := make(chan protocol.ID) + h2.SetStreamHandler("/testing", func(s network.Stream) { + connectedOn <- s.Protocol() + s.Close() + }) + + // NewStream will make a new connection using the DNS address in h1's + // peerstore. + s, err := h1.NewStream(ctx, h2.ID(), "/testing/1.0.0", "/testing") + require.NoError(t, err) + + // force the lazy negotiation to complete + _, err = s.Write(nil) + require.NoError(t, err) + assertWait(t, connectedOn, "/testing") +} + func TestProtoDowngrade(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel()
Resolve addresses when creating a new stream (#<I>) * Resolve addresses when creating a new stream BasicHost.NewStream will try to establish a connection if one doesn't already exist. This will fail if the hosts addresses have not yet been resolved. This PR resolves the hosts addresses before creating the stream and possible new connection. Fixes #<I> * Changes from review comments
libp2p_go-libp2p
train
d8ddc5aa366b413e6558aca4afe00546231f673e
diff --git a/lib/OpenLayers/Layer/Marker.js b/lib/OpenLayers/Layer/Marker.js index <HASH>..<HASH> 100644 --- a/lib/OpenLayers/Layer/Marker.js +++ b/lib/OpenLayers/Layer/Marker.js @@ -58,10 +58,10 @@ OpenLayers.Layer.Marker.prototype = */ drawMarker: function(marker) { var px = this.map.getPixelFromLonLat(marker.lonlat); - var markerDiv = marker.draw(px); - if (marker.drawn != 1) { - this.div.appendChild(markerDiv); - marker.drawn = 1; + var markerImg = marker.draw(px); + if (!marker.drawn) { + this.div.appendChild(markerImg); + marker.drawn = true; } },
More readable code, and change markerDiv -> markerImg, since we're not returning a div anymore. git-svn-id: <URL>
openlayers_openlayers
train
978caec398e6055e404c948763379ac83eee92b5
diff --git a/Configuration/Configuration.php b/Configuration/Configuration.php index <HASH>..<HASH> 100644 --- a/Configuration/Configuration.php +++ b/Configuration/Configuration.php @@ -79,6 +79,10 @@ class Configuration implements ConfigurationInterface */ public function loadConfiguration($sitePath, $envName = null) { + if ($this->isSpressSite($sitePath) === false) { + throw new \RuntimeException(sprintf('Not a Spress site at "%s".', $sitePath)); + } + $default = $this->loadDefaultConfiguration(); $dev = $this->loadEnvironmentConfiguration($sitePath, 'dev'); $result = $this->resolver->resolve(array_merge($default, $dev)); @@ -129,6 +133,11 @@ class Configuration implements ConfigurationInterface return $filename; } + private function isSpressSite($sitePath) + { + return file_exists($sitePath.'/'.$this->configFilename); + } + private function getConfigurationResolver() { $resolver = new AttributesResolver(); diff --git a/Tests/Configuration/ConfigurationTest.php b/Tests/Configuration/ConfigurationTest.php index <HASH>..<HASH> 100644 --- a/Tests/Configuration/ConfigurationTest.php +++ b/Tests/Configuration/ConfigurationTest.php @@ -53,7 +53,7 @@ class ConfigurationTest extends \PHPUnit_Framework_TestCase $this->assertArrayHasKey('text_extensions', $values['data_sources']['filesystem']['arguments']); } - public function testLoadConfigurationEnvironment() + public function testLoadConfigurationWithEnvironmentName() { $defaulConfiguration = __DIR__.'/../../config/default.yml'; @@ -90,8 +90,24 @@ class ConfigurationTest extends \PHPUnit_Framework_TestCase /** * @expectedException RuntimeException + * @expectedExceptionMessageRegExp /Not a Spress site at/ */ - public function testLoadConfigurationEnvironmentEmpty() + public function testNotASpressSite() + { + $defaulConfiguration = __DIR__.'/../../config/default.yml'; + + $locator = new FileLocator([]); + $configLoader = new Config([new YamlLoader($locator)]); + + $config = new Configuration($configLoader, $defaulConfiguration); + $values = $config->loadConfiguration(__DIR__.'/../fixtures', ''); + } + + /** + * @expectedException RuntimeException + * @expectedExceptionMessage Expected a non-empty string as environment name. + */ + public function testEnvironmentEmpty() { $defaulConfiguration = __DIR__.'/../../config/default.yml';
Clarified the message when `site:build` command is invoked against a non Spress site folder
spress_spress-core
train
b227fe7e2afd438c134e9d4b600f1f5e882483c2
diff --git a/test/test_controller.go b/test/test_controller.go index <HASH>..<HASH> 100644 --- a/test/test_controller.go +++ b/test/test_controller.go @@ -114,7 +114,11 @@ func unmarshalControllerExample(data []byte) (map[string]interface{}, error) { } func (s *ControllerSuite) generateControllerExamples(t *c.C) map[string]interface{} { - cmd := exec.Command(exec.DockerImage(imageURIs["controller-examples"]), "/bin/flynn-controller-examples") + cmd := exec.CommandUsingCluster( + s.clusterClient(t), + exec.DockerImage(imageURIs["controller-examples"]), + "/bin/flynn-controller-examples", + ) cmd.Env = map[string]string{ "CONTROLLER_KEY": s.clusterConf(t).Key, "SKIP_MIGRATE_DOMAIN": "true",
test: Use explicit cluster client in controller examples test
flynn_flynn
train
6ed93a89901de0747c81b349779ac9a2a6a6f86c
diff --git a/library/src/main/java/com/mikepenz/materialdrawer/Drawer.java b/library/src/main/java/com/mikepenz/materialdrawer/Drawer.java index <HASH>..<HASH> 100644 --- a/library/src/main/java/com/mikepenz/materialdrawer/Drawer.java +++ b/library/src/main/java/com/mikepenz/materialdrawer/Drawer.java @@ -14,6 +14,9 @@ import android.widget.FrameLayout; import android.widget.RelativeLayout; import com.mikepenz.fastadapter.FastAdapter; +import com.mikepenz.fastadapter.adapters.FooterAdapter; +import com.mikepenz.fastadapter.adapters.HeaderAdapter; +import com.mikepenz.fastadapter.adapters.ItemAdapter; import com.mikepenz.materialdrawer.holder.ImageHolder; import com.mikepenz.materialdrawer.holder.StringHolder; import com.mikepenz.materialdrawer.model.ContainerDrawerItem; @@ -264,15 +267,42 @@ public class Drawer { } /** - * get the BaseDrawerAdapter of the current drawer + * get the FastAdapter of the current drawer * * @return */ - public FastAdapter getAdapter() { + public FastAdapter<IDrawerItem> getAdapter() { return mDrawerBuilder.mAdapter; } /** + * get the HeaderAdapter of the current drawer + * + * @return + */ + public HeaderAdapter<IDrawerItem> getHeaderAdapter() { + return mDrawerBuilder.mHeaderAdapter; + } + + /** + * get the ItemAdapter of the current drawer + * + * @return + */ + public ItemAdapter<IDrawerItem> getItemAdapter() { + return mDrawerBuilder.mItemAdapter; + } + + /** + * get the FooterAdapter of the current drawer + * + * @return + */ + public FooterAdapter<IDrawerItem> getFooterAdapter() { + return mDrawerBuilder.mFooterAdapter; + } + + /** * get all drawerItems of the current drawer * * @return diff --git a/library/src/main/java/com/mikepenz/materialdrawer/MiniDrawer.java b/library/src/main/java/com/mikepenz/materialdrawer/MiniDrawer.java index <HASH>..<HASH> 100644 --- a/library/src/main/java/com/mikepenz/materialdrawer/MiniDrawer.java +++ b/library/src/main/java/com/mikepenz/materialdrawer/MiniDrawer.java @@ -37,7 +37,7 @@ public class MiniDrawer { private LinearLayout mContainer; private RecyclerView mRecyclerView; protected FastAdapter<IDrawerItem> mAdapter; - protected ItemAdapter<IDrawerItem> mItemAdapter = new ItemAdapter<IDrawerItem>(); + protected ItemAdapter<IDrawerItem> mItemAdapter = new ItemAdapter<>(); private Drawer mDrawer; @@ -170,22 +170,56 @@ public class MiniDrawer { return this; } + /** + * get the RecyclerView of this MiniDrawer + * + * @return + */ public RecyclerView getRecyclerView() { return mRecyclerView; } - public FastAdapter<IDrawerItem> getDrawerAdapter() { + /** + * get the FastAdapter of this MiniDrawer + * + * @return + */ + public FastAdapter<IDrawerItem> getAdapter() { return mAdapter; } + /** + * get the ItemAdapter of this MiniDrawer + * + * @return + */ + public ItemAdapter<IDrawerItem> getItemAdapter() { + return mItemAdapter; + } + + /** + * get the Drawer used to fill this MiniDrawer + * + * @return + */ public Drawer getDrawer() { return mDrawer; } + /** + * get the AccountHeader used to fill the this MiniDrawer + * + * @return + */ public AccountHeader getAccountHeader() { return mAccountHeader; } + /** + * get the Crossfader used for this MiniDrawer + * + * @return + */ public ICrossfader getCrossFader() { return mCrossFader; }
* make ItemAdapter, HeaderAdapter, FooterAdapter accessible in the MiniDrawer and Drawer
mikepenz_MaterialDrawer
train
f8c39ffde068543e1745ba0b82c7985735072c63
diff --git a/index.html b/index.html index <HASH>..<HASH> 100644 --- a/index.html +++ b/index.html @@ -502,10 +502,10 @@ select().from('person').where({'last_name': 'Flintstone'}).union() <p id="forUpdate"> <b class="header">forUpdate, noWait</b><code>sel.forUpdate([tbl, ...]) / sel.noWait()</code> <br /> - <p>Add the <tt>FOR UPDATE</tt> clause to lock all selected records from all tables in the select (or just the tables specified), along with an optional <tt>NO WAIT</tt> at the end:</p> + <p>Add the <tt>FOR UPDATE</tt> clause to lock all selected records from all tables in the select (or just the tables specified), along with an optional <tt>NOWAIT</tt> at the end:</p> <pre> select('addr_id').from('person').forUpdate().of('addr_id').noWait(); -// SELECT addr_id FROM person FOR UPDATE OF addr_id NO WAIT +// SELECT addr_id FROM person FOR UPDATE OF addr_id NOWAIT </pre> </p> diff --git a/sql-bricks.js b/sql-bricks.js index <HASH>..<HASH> 100644 --- a/sql-bricks.js +++ b/sql-bricks.js @@ -313,7 +313,7 @@ Select.defineClause('forUpdate', function(opts) { if (this._forUpdate) return `FOR UPDATE${this._of ? ` OF ${handleColumns(this._of, opts)}` : ''}` + - (this._noWait ? ' NO WAIT' : ''); + (this._noWait ? ' NOWAIT' : ''); }); diff --git a/tests/doctests.js b/tests/doctests.js index <HASH>..<HASH> 100644 --- a/tests/doctests.js +++ b/tests/doctests.js @@ -166,7 +166,7 @@ check(select().from('person').where({'last_name': 'Flintstone'}).union() .sele }); it("select('addr_id').from('person').forUpdate().of('addr_id').noWait();", function() { -check(select('addr_id').from('person').forUpdate().of('addr_id').noWait(), "SELECT addr_id FROM person FOR UPDATE OF addr_id NO WAIT"); +check(select('addr_id').from('person').forUpdate().of('addr_id').noWait(), "SELECT addr_id FROM person FOR UPDATE OF addr_id NOWAIT"); }); it("insert('person', {'first_name': 'Fred', 'last_name': 'Flintstone'});", function() { diff --git a/tests/tests.js b/tests/tests.js index <HASH>..<HASH> 100644 --- a/tests/tests.js +++ b/tests/tests.js @@ -374,9 +374,9 @@ describe('SQL Bricks', function() { check(select().from('user').forUpdate().of('user'), 'SELECT * FROM "user" FOR UPDATE OF "user"'); }); - it('should support FOR UPDATE OF ... NO WAIT', function() { + it('should support FOR UPDATE OF ... NOWAIT', function() { check(select().from('user').forUpdate().of('user').noWait(), - 'SELECT * FROM "user" FOR UPDATE OF "user" NO WAIT'); + 'SELECT * FROM "user" FOR UPDATE OF "user" NOWAIT'); }); });
fix: output `.noWait()` as 'NOWAIT' was previously output as 'NO WAIT'
CSNW_sql-bricks
train
f830b83f0b06e7b3afe1de79678ac921e4381a6c
diff --git a/pytablewriter/_excel_writer.py b/pytablewriter/_excel_writer.py index <HASH>..<HASH> 100644 --- a/pytablewriter/_excel_writer.py +++ b/pytablewriter/_excel_writer.py @@ -365,7 +365,7 @@ class ExcelXlsxTableWriter(ExcelTableWriter): except TypeError: pass - if prop.data is None: + if prop.typecode is dp.Typecode.NAN: base_props = dict(self.__nan_format_property) cell_format = self.__get_cell_format(format_key, base_props)
Fix the case for writing not a number
thombashi_pytablewriter
train
1001dde7715dbb6b993100c79257f8266571b2c7
diff --git a/lib/seraph.js b/lib/seraph.js index <HASH>..<HASH> 100644 --- a/lib/seraph.js +++ b/lib/seraph.js @@ -199,10 +199,13 @@ Seraph.nodeFlags = [ 'self' ]; Seraph.prototype._isNode = function(node) { + if (!node || typeof node !== 'object') { + return false; + } + var inNode = node.hasOwnProperty.bind(node); - return typeof node === 'object' && - Seraph.nodeFlags.every(inNode) && - typeof node.data === 'object'; + return Seraph.nodeFlags.every(inNode) && + typeof node.data === 'object'; }; /** @@ -216,10 +219,13 @@ Seraph.relationshipFlags = [ 'end' ]; Seraph.prototype._isRelationship = function(rel) { + if (!rel || typeof rel !== 'object') { + return false; + } + var inRelationship = rel.hasOwnProperty.bind(rel); - return typeof rel === 'object' && - Seraph.relationshipFlags.every(inRelationship) && - typeof rel.data === 'object'; + return Seraph.relationshipFlags.every(inRelationship) && + typeof rel.data === 'object'; }; /** diff --git a/test/seraph.js b/test/seraph.js index <HASH>..<HASH> 100644 --- a/test/seraph.js +++ b/test/seraph.js @@ -709,7 +709,7 @@ describe('seraph#query, seraph#queryRaw', function() { cypher += "return x, n "; db.query(cypher, function(err, result) { assert.ok(!err); - assert.deepEqual([ user ], result); + assert.deepEqual([{x: user, n: null}], result); done(); }); }
fixed case where some return values are optional in query. closes #<I>
brikteknologier_seraph
train
35b4222c3f9fd5e936c4f0c7b00fcd82459c611b
diff --git a/lib/python/vdm/static/js/vdm.ui.js b/lib/python/vdm/static/js/vdm.ui.js index <HASH>..<HASH> 100755 --- a/lib/python/vdm/static/js/vdm.ui.js +++ b/lib/python/vdm/static/js/vdm.ui.js @@ -2390,11 +2390,11 @@ var loadPage = function() { if(connection.Metadata['SERVER_DELETE'].result == true){ $("#deleteConfirmation").modal('hide'); $('#errorDialog').modal('hide'); - toggleServer(editStates.ShowEdit,hostId); + toggleServer(editStates.ShowEdit,serverId); } else if(connection.Metadata['SERVER_DELETE'].hasOwnProperty('statusstring')){ $('#errorMsg').html(connection.Metadata['SERVER_DELETE']['statusstring']) $('#errorDialog').modal('show'); - toggleServer(editStates.ShowEdit,hostId); + toggleServer(editStates.ShowEdit,serverId); } var dbData = { id: VdmUI.getCurrentDbCookie() == -1 ? 1 : VdmUI.getCurrentDbCookie() diff --git a/lib/python/vdm/tests/server/serverTest.py b/lib/python/vdm/tests/server/serverTest.py index <HASH>..<HASH> 100755 --- a/lib/python/vdm/tests/server/serverTest.py +++ b/lib/python/vdm/tests/server/serverTest.py @@ -91,12 +91,6 @@ class Server(unittest.TestCase): response = requests.get(url) value = response.json() if value: - server_length = len(value['members']) - last_server_id = value['members'][server_length-1]['id'] - print "ServerId to be deleted is " + str(last_server_id) - url += str(last_server_id) - response = requests.delete(url) - self.assertEqual(response.status_code, 200) # Delete database db_url = __db_url__ + str(last_db_id) response = requests.delete(db_url)
VDM-<I>: Failed tests in serverTests.py fixed
VoltDB_voltdb
train
d5dbe5ece57fb7d04002378542c6be3d40e2680c
diff --git a/google-cloud-monitoring/lib/google/cloud/monitoring.rb b/google-cloud-monitoring/lib/google/cloud/monitoring.rb index <HASH>..<HASH> 100644 --- a/google-cloud-monitoring/lib/google/cloud/monitoring.rb +++ b/google-cloud-monitoring/lib/google/cloud/monitoring.rb @@ -186,6 +186,38 @@ module Google end ## + # Create a new client object for QueryService. + # + # By default, this returns an instance of + # [Google::Cloud::Monitoring::V3::QueryService::Client](https://googleapis.dev/ruby/google-cloud-monitoring-v3/latest/Google/Cloud/Monitoring/V3/QueryService/Client.html) + # for version V3 of the API. + # However, you can specify specify a different API version by passing it in the + # `version` parameter. If the QueryService service is + # supported by that API version, and the corresponding gem is available, the + # appropriate versioned client will be returned. + # + # ## About QueryService + # + # The QueryService API is used to manage time series data in Stackdriver + # Monitoring. Time series data is a collection of data points that describes + # the time-varying values of a metric. + # + # @param version [::String, ::Symbol] The API version to connect to. Optional. + # Defaults to `:v3`. + # @return [QueryService::Client] A client object for the specified version. + # + def self.query_service version: :v3, &block + require "google/cloud/monitoring/#{version.to_s.downcase}" + + package_name = Google::Cloud::Monitoring + .constants + .select { |sym| sym.to_s.downcase == version.to_s.downcase.tr("_", "") } + .first + package_module = Google::Cloud::Monitoring.const_get package_name + package_module.const_get(:QueryService).const_get(:Client).new(&block) + end + + ## # Create a new client object for ServiceMonitoringService. # # By default, this returns an instance of diff --git a/google-cloud-monitoring/synth.metadata b/google-cloud-monitoring/synth.metadata index <HASH>..<HASH> 100644 --- a/google-cloud-monitoring/synth.metadata +++ b/google-cloud-monitoring/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/google-cloud-ruby.git", - "sha": "f996dd9922a3818c473de5f1a08eb1689e657916" + "sha": "3c8bd26b7ed6243772b1ce4f8e206114cbeee42a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "874846a1917ee5c3fe271449f3cb9a06e75407be", - "internalRef": "326288259" + "sha": "2124b6251e56e2ec8dd5c47e7d3815d7d5841880", + "internalRef": "364422694" } } ], diff --git a/google-cloud-monitoring/test/google/cloud/monitoring/client_test.rb b/google-cloud-monitoring/test/google/cloud/monitoring/client_test.rb index <HASH>..<HASH> 100644 --- a/google-cloud-monitoring/test/google/cloud/monitoring/client_test.rb +++ b/google-cloud-monitoring/test/google/cloud/monitoring/client_test.rb @@ -62,6 +62,16 @@ class Google::Cloud::Monitoring::ClientConstructionMinitest < Minitest::Test end end + def test_query_service + Gapic::ServiceStub.stub :new, :stub do + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + client = Google::Cloud::Monitoring.query_service do |config| + config.credentials = grpc_channel + end + assert_kind_of Google::Cloud::Monitoring::V3::QueryService::Client, client + end + end + def test_service_monitoring_service Gapic::ServiceStub.stub :new, :stub do grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
feat(monitoring): Support for querying time series using the Monitoring Query Language PiperOrigin-RevId: <I> Source-
googleapis_google-cloud-ruby
train
1afe3c16ed5e4f57bddc86a0b32b628d740a1999
diff --git a/linkedin/linkedin.py b/linkedin/linkedin.py index <HASH>..<HASH> 100644 --- a/linkedin/linkedin.py +++ b/linkedin/linkedin.py @@ -565,6 +565,18 @@ class LinkedInApplication(object): response = self.make_request('PUT', url, data=json.dumps(is_liked)) raise_for_error(response) return True + + def get_company_historical_status_update_statistics(self, company_id, params=None, headers=None): + url = '%s/%s/historical-status-update-statistics' % (ENDPOINTS.COMPANIES, str(company_id)) + response = self.make_request('GET', url, params=params, headers=headers) + raise_for_error(response) + return response.json() + + def get_company_historical_follow_statistics(self, company_id, params=None, headers=None): + url = '%s/%s/historical-follow-statistics' % (ENDPOINTS.COMPANIES, str(company_id)) + response = self.make_request('GET', url, params=params, headers=headers) + raise_for_error(response) + return response.json() def comment_as_company(self, company_id, update_key, comment): comment = {'comment': comment}
get_company_historical_status_update_statistics and get_company_historical_follow_statistics added
DEKHTIARJonathan_python3-linkedin
train
80aa23b56c1cb2dd8ae0c24788bb1af79b84c975
diff --git a/test/index.js b/test/index.js index <HASH>..<HASH> 100644 --- a/test/index.js +++ b/test/index.js @@ -31,7 +31,8 @@ var input = [ // HTTP/S Transport Protocol , ["http://host.xz/path/to/repo.git/", false] , ["https://host.xz/path/to/repo.git/", false] - + , ["http://host.xz:8000/path/to/repo.git/", false] + , ["https://host.xz:8000/path/to/repo.git/", false] // Local (Filesystem) Transport Protocol , ["/path/to/repo.git/", false] , ["path/to/repo.git/", false]
add tests for urls with ports
IonicaBizau_is-ssh
train
03ca109134bbe5e6d5dea43b42f23314df6f5ad4
diff --git a/test-support/helpers/upload.js b/test-support/helpers/upload.js index <HASH>..<HASH> 100644 --- a/test-support/helpers/upload.js +++ b/test-support/helpers/upload.js @@ -1,7 +1,7 @@ /*global triggerEvent, find */ export default function (selector, file, filename) { - let input = find(this, selector)[0]; + let input = findWithAssert(selector)[0]; file.name = filename;
assert when the upload helper can't find the selector also, fix the assertion so it uses normal syntax instead of ember-cli-page-object `findElement` syntax
adopted-ember-addons_ember-file-upload
train
bb535876469c8ae3ee01f8529ff1790a70e73a53
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -46,6 +46,7 @@ function Upload (cfg) { this.origin = cfg.origin this.configStore = new ConfigStore('gcs-resumable-upload') + this.uriProvidedManually = !!cfg.uri this.uri = cfg.uri || this.get('uri') this.numBytesWritten = 0 this.numRetries = 0 @@ -202,7 +203,18 @@ Upload.prototype.getAndSetOffset = function (callback) { 'Content-Range': 'bytes */*' } }, function (err, resp) { - if (err) return self.destroy(err) + if (err) { + if (resp && resp.statusCode === 404 && !self.uriProvidedManually) { + // only return the error if the user provided the resumable URI + // themselves. if we're just using the configstore file to tell us that + // this file exists, and it turns out that it doesn't, that's probably + // stale config data. + self.restart() + } else { + self.destroy(err) + } + return + } if (resp.statusCode === RESUMABLE_INCOMPLETE_STATUS_CODE) { if (resp.headers.range) { @@ -222,7 +234,7 @@ Upload.prototype.makeRequest = function (reqOpts, callback) { if (err) return callback(wrapError('Could not authenticate request', err)) request(authorizedReqOpts, function (err, resp, body) { - if (err) return callback(err) + if (err) return callback(err, resp) if (body && body.error) return callback(body.error) diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -202,10 +202,12 @@ describe('gcs-resumable-upload', function () { it('should localize the uri or get one from config', function () { var uri = 'http://www.blah.com/' var upWithUri = upload({ bucket: BUCKET, file: FILE, uri: uri }) + assert.strictEqual(upWithUri.uriProvidedManually, true) assert.strictEqual(upWithUri.uri, uri) configData[FILE] = { uri: 'fake-uri' } var up = upload({ bucket: BUCKET, file: FILE }) + assert.strictEqual(up.uriProvidedManually, false) assert.strictEqual(up.uri, 'fake-uri') }) @@ -648,6 +650,34 @@ describe('gcs-resumable-upload', function () { up.getAndSetOffset() }) + describe('restart on 404', function () { + var ERROR = new Error(':(') + var RESP = { + statusCode: 404 + } + + beforeEach(function () { + up.makeRequest = function (reqOpts, callback) { + callback(ERROR, RESP) + } + }) + + it('should restart the upload', function (done) { + up.restart = done + up.getAndSetOffset() + }) + + it('should not restart if URI provided manually', function (done) { + up.uriProvidedManually = true + up.restart = done // will cause test to fail + up.on('error', function (err) { + assert.strictEqual(err, ERROR) + done() + }) + up.getAndSetOffset() + }) + }) + it('should set the offset from the range', function (done) { up.makeRequest = function (reqOpts, callback) { callback(null, RESP) @@ -725,6 +755,7 @@ describe('gcs-resumable-upload', function () { it('should destroy the stream if there was an error', function (done) { var error = new Error(':(') + var response = {} up.authClient = { authorizeRequest: function (reqOpts, callback) { @@ -733,11 +764,12 @@ describe('gcs-resumable-upload', function () { } requestMock = function (opts, callback) { - callback(error, {}) + callback(error, response) } - up.makeRequest(REQ_OPTS, function (err) { + up.makeRequest(REQ_OPTS, function (err, resp) { assert.strictEqual(err, error) + assert.strictEqual(resp, response) done() }) })
delete config when it is probably wrong
googleapis_gcs-resumable-upload
train
cdb23a7749d75c1bf5b453ab9f51908855ad2799
diff --git a/lib/assets/index.js b/lib/assets/index.js index <HASH>..<HASH> 100644 --- a/lib/assets/index.js +++ b/lib/assets/index.js @@ -139,7 +139,13 @@ assets.resolveConfig = function (assetConfig, fromUrl, assetGraph, cb) { ['catch'](cb); } if (typeof assetConfig === 'string') { - assetConfig = {url: encodeURI(assetConfig)}; + if (/^[\w\+]+:/.test(assetConfig)) { + // Includes protocol, assume url + assetConfig = {url: assetConfig}; + } else { + // File system path + assetConfig = {url: encodeURI(assetConfig)}; + } } if (assetConfig.isAsset || assetConfig.isResolved) { // Almost done, add .type property if possible (this is all we can do without actually fetching the asset):
assets.resolveConfig: Don't call encodeURI if assetConfig is a string that includes a protocol.
assetgraph_assetgraph
train
7d4bf1ece42450fea6b1480a7d0972a2c224dab5
diff --git a/test/test_ssl.py b/test/test_ssl.py index <HASH>..<HASH> 100644 --- a/test/test_ssl.py +++ b/test/test_ssl.py @@ -367,7 +367,7 @@ class TestSSL(unittest.TestCase): ssl_cert_reqs=ssl.CERT_REQUIRED, ssl_ca_certs=CA_PEM) self.fail("Invalid hostname should have failed") - except: + except ConnectionFailure: pass else: try: @@ -379,7 +379,7 @@ class TestSSL(unittest.TestCase): ssl_cert_reqs=ssl.CERT_REQUIRED, ssl_ca_certs=CA_PEM) self.fail("Invalid hostname should have failed") - except: + except ConnectionFailure: pass def test_mongodb_x509_auth(self):
No bare except clauses in test_ssl.
mongodb_mongo-python-driver
train
671fb0cb28b69d65d9c605364ea38637b3c468ec
diff --git a/src/Serializer.php b/src/Serializer.php index <HASH>..<HASH> 100644 --- a/src/Serializer.php +++ b/src/Serializer.php @@ -25,7 +25,7 @@ use Xabbuh\XApi\Serializer\Normalizer\StatementNormalizer; use Xabbuh\XApi\Serializer\Normalizer\StatementResultNormalizer; /** - * Entry point to setup the {@link \Symfony\Component\Serializer\Serializer Symfony Serializer component} + * Entry point to set up the {@link \Symfony\Component\Serializer\Serializer Symfony Serializer component} * for the Experience API. * * @author Christian Flothmann <[email protected]>
fix a typo in a docblock
php-xapi_serializer
train
ecae302b44a4c0057b84f879de70ea5bd0465629
diff --git a/lib/assertions.js b/lib/assertions.js index <HASH>..<HASH> 100644 --- a/lib/assertions.js +++ b/lib/assertions.js @@ -603,7 +603,9 @@ module.exports = function (expect) { } else { this.block(valueOutput); } - this.block(annotation.prependLinesWith('error', ' // ')); + if (!annotation.isEmpty()) { + this.block(annotation.prependLinesWith('error', ' // ')); + } }).nl(); }); diff --git a/lib/types.js b/lib/types.js index <HASH>..<HASH> 100644 --- a/lib/types.js +++ b/lib/types.js @@ -39,9 +39,7 @@ module.exports = function (expect) { if (comparison) { this.nl().append(comparison.diff); } - this.prependLinesWith(function () { - this.error(' // '); - }); + this.prependLinesWith('error', ' // '); }); }).nl() .outdentLines() @@ -232,7 +230,9 @@ module.exports = function (expect) { } else { this.block(valueOutput); } - this.block(annotation.prependLinesWith('error', ' // ')); + if (!annotation.isEmpty()) { + this.block(annotation.prependLinesWith('error', ' // ')); + } }).nl(); }); diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "arraydiff": "0.1.1", "diff": "=1.0.8", "leven": "1.0.0", - "magicpen": "=2.1.1" + "magicpen": "=3.0.0" }, "devDependencies": { "browserify": "=5.9.1",
Align with changes in MagicPen@<I>
unexpectedjs_unexpected
train
7d035858465d265d1e7638ff3fff65a2b6a8b0d4
diff --git a/rx-run/src/web/custom-element-widget.js b/rx-run/src/web/custom-element-widget.js index <HASH>..<HASH> 100644 --- a/rx-run/src/web/custom-element-widget.js +++ b/rx-run/src/web/custom-element-widget.js @@ -82,7 +82,7 @@ function makePropertiesDriver() { Object.defineProperty(propertiesDriver, 'getAll', { enumerable: false, value: function getAll() { - return this.get('*'); + return this.get(ALL_PROPS); } }); return propertiesDriver;
Change props.getAll() implementation to use ALL_PROPS const
cyclejs_cyclejs
train