hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
8f53b6c1934e87bf201b4aa91fe893d503b35ed4
diff --git a/lib/parser.js b/lib/parser.js index <HASH>..<HASH> 100644 --- a/lib/parser.js +++ b/lib/parser.js @@ -81,7 +81,7 @@ exports.parseParams = function (str) { exports.parseFmtpConfig = exports.parseParams; exports.parsePayloads = function (str) { - return str.split(' ').map(Number); + return str.toString().split(' ').map(Number); }; exports.parseRemoteCandidates = function (str) {
parsePayloads() is not safe If there's only a payload in the m= line is considered as number and the function split throws an error. Should check the other functions too
clux_sdp-transform
train
f7c9b5fa8b9aeb2ff96e095d21e0037532a63ed2
diff --git a/java/client/test/org/openqa/selenium/UploadTest.java b/java/client/test/org/openqa/selenium/UploadTest.java index <HASH>..<HASH> 100644 --- a/java/client/test/org/openqa/selenium/UploadTest.java +++ b/java/client/test/org/openqa/selenium/UploadTest.java @@ -41,7 +41,7 @@ import java.io.IOException; /** * Demonstrates how to use WebDriver with a file input element. */ -@Ignore(value = {SAFARI, MARIONETTE}, issues = {4220}) +@Ignore(value = {SAFARI}, issues = {4220}) public class UploadTest extends JUnit4TestBase { private static final String LOREM_IPSUM_TEXT = "lorem ipsum dolor sit amet"; @@ -63,7 +63,7 @@ public class UploadTest extends JUnit4TestBase { TestUtilities.getEffectivePlatform(driver).is(ANDROID)); driver.get(pages.uploadPage); driver.findElement(By.id("upload")).sendKeys(testFile.getAbsolutePath()); - driver.findElement(By.id("go")).submit(); + driver.findElement(By.id("go")).click(); // Uploading files across a network may take a while, even if they're really small WebElement label = driver.findElement(By.id("upload_label"));
Unignoring file uploading test in marionette
SeleniumHQ_selenium
train
1a8806c1e8a952d1dfde84eb158f5da6d9551ea3
diff --git a/lib/rules/no-array-prototype-extensions.js b/lib/rules/no-array-prototype-extensions.js index <HASH>..<HASH> 100644 --- a/lib/rules/no-array-prototype-extensions.js +++ b/lib/rules/no-array-prototype-extensions.js @@ -66,6 +66,9 @@ const KNOWN_NON_ARRAY_FUNCTION_CALLS = new Set([ // RSVP.reject 'RSVP.reject', + 'RSVP.Promise.reject', + 'Ember.RSVP.reject', + 'Ember.RSVP.Promise.reject', // *storage.clear() 'window.localStorage.clear', diff --git a/tests/lib/rules/no-array-prototype-extensions.js b/tests/lib/rules/no-array-prototype-extensions.js index <HASH>..<HASH> 100644 --- a/tests/lib/rules/no-array-prototype-extensions.js +++ b/tests/lib/rules/no-array-prototype-extensions.js @@ -48,17 +48,23 @@ ruleTester.run('no-array-prototype-extensions', rule, { 'Promise.reject();', 'Promise.reject("some reason");', 'reject();', + 'this.reject();', // Global non-array class (RSVP.reject) 'RSVP.reject();', 'RSVP.reject("some reason");', + 'RSVP.Promise.reject();', + 'Ember.RSVP.reject();', + 'Ember.RSVP.Promise.reject();', // Global non-array class (*storage.clear) 'window.localStorage.clear();', 'window.sessionStorage.clear();', 'localStorage.clear();', 'sessionStorage.clear();', + 'sessionStorage?.clear();', 'clear();', + 'this.clear();', // Global non-array class (location.replace) 'window.document.location.replace(url)',
fix: more false positives with RSVP.Promise.reject() in no-array-prototype-extensions
ember-cli_eslint-plugin-ember
train
66019869e71b2bcdceffeda0ed8f5605101167ec
diff --git a/filer/admin/folderadmin.py b/filer/admin/folderadmin.py index <HASH>..<HASH> 100644 --- a/filer/admin/folderadmin.py +++ b/filer/admin/folderadmin.py @@ -1020,15 +1020,17 @@ class FolderAdmin(PrimitivePermissionAwareModelAdmin): try: selected_destination_folder = int(request.POST.get('destination', 0)) except ValueError: - selected_destination_folder = 0 - + if current_folder: + selected_destination_folder = current_folder.pk + else: + selected_destination_folder = 0 context = { "title": _("Copy files and/or folders"), "instance": current_folder, "breadcrumbs_action": _("Copy files and/or folders"), "to_copy": to_copy, "destination_folders": folders, - "selected_destination_folder": selected_destination_folder or current_folder.pk, + "selected_destination_folder": selected_destination_folder, "copy_form": form, "files_queryset": files_queryset, "folders_queryset": folders_queryset,
Fix for #<I>: make 'copy files and/or folders' work when the current folder is the root folder.
divio_django-filer
train
02c1a4833d39f112170759a3348644caf4c98b30
diff --git a/tests/unit/utils/test_ssdp.py b/tests/unit/utils/test_ssdp.py index <HASH>..<HASH> 100644 --- a/tests/unit/utils/test_ssdp.py +++ b/tests/unit/utils/test_ssdp.py @@ -22,9 +22,25 @@ except ImportError as err: pytest = None +class Mocks(object): + def get_socket_mock(self, expected_ip, expected_hostname): + ''' + Get a mock of a socket + :return: + ''' + sck = MagicMock() + sck.getsockname = MagicMock(return_value=(expected_ip, 123456)) + + sock_mock = MagicMock() + sock_mock.socket = MagicMock(return_value=sck) + sock_mock.gethostbyname = MagicMock(return_value=expected_hostname) + + return sock_mock + + @skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(pytest is None, 'PyTest is missing') -class SSDPTestCase(TestCase): +class SSDPBaseTestCase(TestCase, Mocks): ''' TestCase for SSDP-related parts. ''' @@ -81,16 +97,11 @@ class SSDPTestCase(TestCase): base = ssdp.SSDPBase() expected_ip = '192.168.1.10' expected_host = 'oxygen' - sck = MagicMock() - sck.getsockname = MagicMock(return_value=(expected_ip, 123456)) - - sock_mock = MagicMock() - sock_mock.socket = MagicMock(return_value=sck) - sock_mock.gethostbyname = MagicMock(return_value=expected_host) + sock_mock = self.get_socket_mock(expected_ip, expected_host) with patch('salt.utils.ssdp.socket', sock_mock): assert base.get_self_ip() == expected_ip - sck.getsockname.side_effect = boom + sock_mock.socket().getsockname.side_effect = boom with patch('salt.utils.ssdp.socket', sock_mock): assert base.get_self_ip() == expected_host
Move sock mock away to the utils mixin
saltstack_salt
train
706b9f9525da2fee7e98d4c3320c8cd07d80edf9
diff --git a/src/edu/rpi/cmt/calendar/diff/ParamsWrapper.java b/src/edu/rpi/cmt/calendar/diff/ParamsWrapper.java index <HASH>..<HASH> 100644 --- a/src/edu/rpi/cmt/calendar/diff/ParamsWrapper.java +++ b/src/edu/rpi/cmt/calendar/diff/ParamsWrapper.java @@ -89,15 +89,21 @@ class ParamsWrapper extends BaseSetWrapper<ParamWrapper, PropWrapper, if (ncmp == 0) { // Names match - it's a modify sel = select(sel, thisOne.diff(thatOne)); - } else if (ncmp < 0) { + thisI++; + thatI++; + continue; + } + + if (ncmp < 0) { // in this but not that - addition sel = add(sel, thisOne.makeRef()); thisI++; - } else { - // in that but not this - deletion - sel = remove(sel, thatOne.makeRef()); - thatI++; + continue; } + + // in that but not this - deletion + sel = remove(sel, thatOne.makeRef()); + thatI++; } while (thisI < size()) {
Fix to date/time processing for SOAP
Bedework_bw-util
train
f8f63e628674fcb6755e9ef50bea1d148ba49ac2
diff --git a/workflow/controller/controller.go b/workflow/controller/controller.go index <HASH>..<HASH> 100644 --- a/workflow/controller/controller.go +++ b/workflow/controller/controller.go @@ -192,6 +192,10 @@ var indexers = cache.Indexers{ // Run starts an Workflow resource controller func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWorkers, podWorkers, podCleanupWorkers int) { defer runtimeutil.HandleCrash(runtimeutil.PanicHandlers...) + + ctx, cancel := context.WithCancel(ctx) + defer cancel() + defer wfc.wfQueue.ShutDown() defer wfc.podQueue.ShutDown() defer wfc.podCleanupQueue.ShutDown() @@ -240,7 +244,6 @@ func (wfc *WorkflowController) Run(ctx context.Context, wfWorkers, workflowTTLWo } logCtx := log.WithField("id", nodeID) - var cancel context.CancelFunc leaderName := "workflow-controller" if wfc.Config.InstanceID != "" { leaderName = fmt.Sprintf("%s-%s", leaderName, wfc.Config.InstanceID)
fix(controller): Handling panic in leaderelection (#<I>)
argoproj_argo
train
64a57da9bea8d7f4911103a6c0042f29284d6227
diff --git a/lib/plugin.js b/lib/plugin.js index <HASH>..<HASH> 100644 --- a/lib/plugin.js +++ b/lib/plugin.js @@ -19,19 +19,26 @@ function PluginType(type, vars) { fs.statSync(pluginDir); var foundPlugins = requireDir(pluginDir); - var pluginNames = Object.keys(foundPlugins); - - console.log("[Core/Plugin] Found %d plugins of type '%s':", pluginNames.length, type); - - pluginNames.forEach(function (name) { - this.plugins[name] = foundPlugins[name](this.vars); - console.log("[Core/Plugin] - '%s'", name); + var fileNames = Object.keys(foundPlugins); + + console.log("[Core/Plugin] Found %d plugins of type '%s'"+(fileNames.length > 0 ? ":" : ""), fileNames.length, type); + + fileNames.forEach(function (name) { + if(name.substr(0, 1) != "_") { + var newPlugin = new foundPlugins[name](this.vars); + this.plugins[newPlugin.info.name] = newPlugin; + console.log("[Core/Plugin] - '%s' (Enabled)", name); + }else{ + console.log("[Core/Plugin] - '%s' (Disabled)", name.substr(1)); + } }.bind(this)); } catch (err) { if(err.code == "ENOENT") { console.error("[Core/Plugin] The plugin directory '%s' can not be found", err.path); console.log("[Core/Plugin] Attempting to create plugin directory '%s'", err.path); fs.mkdirsSync(err.path); + }else{ + console.error("[Core/Plugin] ", err); } } }
Allow plugins to be disabled by prefixing their name with a `_`
basement-js_basement
train
2aae06e78d5d8e04c80b65f4e7a2656ca71978e4
diff --git a/tests/gui/widget/test_restore_session.py b/tests/gui/widget/test_restore_session.py index <HASH>..<HASH> 100644 --- a/tests/gui/widget/test_restore_session.py +++ b/tests/gui/widget/test_restore_session.py @@ -188,6 +188,9 @@ def trigger_gui_signals_first_run(*args): library_os_path = library_manager.get_os_path_to_library("turtle_libraries", "clear_field")[0] call_gui_callback(menubar_ctrl.on_open_activate, None, None, library_os_path) call_gui_callback(testing_utils.wait_for_gui) + # use artificial marked dirty to check for recovery of the flag + assert not sm_manager_model.get_selected_state_machine_model().state_machine.marked_dirty + sm_manager_model.get_selected_state_machine_model().state_machine._marked_dirty = True assert sm_manager_model.get_selected_state_machine_model().state_machine.marked_dirty # library with changes
fix-test(restore session): adapt to additional meta data Adding meta data for 7c<I>e showed this issue and let respective test already fail since version <I>.
DLR-RM_RAFCON
train
3cd06f17c85e1b1570db8d2575a8afeea8410cd5
diff --git a/resources/gen-changelog.js b/resources/gen-changelog.js index <HASH>..<HASH> 100644 --- a/resources/gen-changelog.js +++ b/resources/gen-changelog.js @@ -42,6 +42,20 @@ if (!GH_TOKEN) { process.exit(1); } +if (!packageJSON.repository || typeof packageJSON.repository.url !== 'string') { + console.error('package.json is missing repository.url string!'); + process.exit(1); +} + +const match = /https:\/\/github.com\/([^/]+)\/([^/]+).git/.exec( + packageJSON.repository.url, +); +if (match == null) { + console.error('Can not extract organisation and repo name from repo URL!'); + process.exit(1); +} +const [, githubOrg, githubRepo] = match; + getChangeLog() .then(changelog => process.stdout.write(changelog)) .catch(error => console.error(error)); @@ -120,7 +134,7 @@ function graphqlRequestImpl(query, variables, cb) { headers: { Authorization: 'bearer ' + GH_TOKEN, 'Content-Type': 'application/json', - 'User-Agent': 'graphql-js-changelog', + 'User-Agent': 'gen-changelog', }, }); @@ -200,7 +214,7 @@ async function batchCommitInfo(commits) { const response = await graphqlRequest(` { - repository(owner: "graphql", name: "graphql-js") { + repository(owner: "${githubOrg}", name: "${githubRepo}") { ${commitsSubQuery} } } @@ -217,7 +231,7 @@ function commitsInfoToPRs(commits) { const prs = {}; for (const commit of commits) { const associatedPRs = commit.associatedPullRequests.nodes.filter( - pr => pr.repository.nameWithOwner === 'graphql/graphql-js', + pr => pr.repository.nameWithOwner === `${githubOrg}/${githubRepo}`, ); if (associatedPRs.length === 0) { throw new Error(
gen-changelog: remove hardcoded GitHub org and repo (#<I>)
graphql_graphql-js
train
cf10d558eba4c3877ab31406ca00133f66ac2dcd
diff --git a/billy/tests/__init__.py b/billy/tests/__init__.py index <HASH>..<HASH> 100644 --- a/billy/tests/__init__.py +++ b/billy/tests/__init__.py @@ -1,19 +1,10 @@ from billy.conf import settings +settings.MONGO_DATABASE += '_test' from billy import db from billy.models import base import pymongo -def setup(): - host = settings.MONGO_HOST - port = settings.MONGO_PORT - settings.MONGO_DATABASE += '_test' - db_name = settings.MONGO_DATABASE - - db._db = pymongo.Connection(host, port)[db_name] - base.db = db._db - - def teardown(): host = settings.MONGO_HOST port = settings.MONGO_PORT
a hack to make both test dbs play nice
openstates_billy
train
a7134a5a8d31d9c7bac0129663adbf86fc8cae3f
diff --git a/src/server/api.js b/src/server/api.js index <HASH>..<HASH> 100644 --- a/src/server/api.js +++ b/src/server/api.js @@ -63,6 +63,10 @@ export const createApiServer = ({ db, games }) => { const app = new Koa(); const router = new Router(); + router.get('/games', async ctx => { + ctx.body = games.map(game => game.name); + }); + router.post('/games/:name/create', koaBody(), async ctx => { const gameName = ctx.params.name; let numPlayers = parseInt(ctx.request.body.numPlayers); diff --git a/src/server/api.test.js b/src/server/api.test.js index <HASH>..<HASH> 100644 --- a/src/server/api.test.js +++ b/src/server/api.test.js @@ -391,4 +391,29 @@ describe('.createApiServer', () => { }); }); }); + + describe('gets game list', () => { + let db; + beforeEach(() => { + delete process.env.API_SECRET; + db = { + get: async () => {}, + set: async () => {}, + }; + }); + describe('when given 2 games', async () => { + let response; + beforeEach(async () => { + let app; + let games; + games = [Game({ name: 'foo' }), Game({ name: 'bar' })]; + app = createApiServer({ db, games }); + + response = await request(app.callback()).get('/games'); + }); + test('should get 2 games', async () => { + expect(Object.keys(JSON.parse(response.text)).length).toEqual(2); + }); + }); + }); });
initial game list api endpoint implementation (#<I>)
nicolodavis_boardgame.io
train
d271c08f9e70fba27316a3e0fdb32240fb0ff2d1
diff --git a/go/vt/tabletserver/tx_pool.go b/go/vt/tabletserver/tx_pool.go index <HASH>..<HASH> 100644 --- a/go/vt/tabletserver/tx_pool.go +++ b/go/vt/tabletserver/tx_pool.go @@ -246,7 +246,7 @@ func (txc *TxConnection) discard(conclusion string) { // Ensure PoolConnection won't be accessed after Recycle. txc.PoolConnection = nil if txc.LogToFile.Get() != 0 { - log.Warningf("Logged transaction: %s", txc.Format(nil)) + log.Infof("Logged transaction: %s", txc.Format(nil)) } TxLogger.Send(txc) }
Demote "Logged transaction:" log messages from warning to info. There are already error-level messages highlighting this state, and marking these as "warnings" adds nothing to problem discoverability (while obscuring other, probably more useful warnings). They are useful informational messages, but "info" logging is sufficient here.
vitessio_vitess
train
7c825a116394e9d448546c2b277e7f67957e1bff
diff --git a/src/js/droppable.js b/src/js/droppable.js index <HASH>..<HASH> 100644 --- a/src/js/droppable.js +++ b/src/js/droppable.js @@ -55,6 +55,7 @@ flex = setting.flex, canMoveHere = setting.canMoveHere, dropToClass = setting.dropToClass, + noShadow = setting.noShadow, $ele = $root, isMouseDown = false, $container, @@ -81,31 +82,36 @@ mouseOffset = {left: event.pageX, top: event.pageY}; - // ignore small move - if(Math.abs(mouseOffset.left - startMouseOffset.left) < deviation && Math.abs(mouseOffset.top - startMouseOffset.top) < deviation) return; - - if($shadow === null) // create shadow + if(!$shadow) // create shadow { + // ignore small move + if(Math.abs(mouseOffset.left - startMouseOffset.left) < deviation && Math.abs(mouseOffset.top - startMouseOffset.top) < deviation) return; + var cssPosition = $container.css('position'); if(cssPosition != 'absolute' && cssPosition != 'relative' && cssPosition != 'fixed') { oldCssPosition = cssPosition; $container.css('position', 'relative'); } - $shadow = $ele.clone().removeClass('drag-from').addClass('drag-shadow').css({ - position: 'absolute', - width: $ele.outerWidth(), - transition: 'none' - }).appendTo($container); - $ele.addClass('dragging'); + if (noShadow) { + $shadow = {}; + } else { + $shadow = $ele.clone().removeClass('drag-from').addClass('drag-shadow').css({ + position: 'absolute', + width: $ele.outerWidth(), + transition: 'none' + }).appendTo($container); + } + $ele.addClass('dragging'); $targets.addClass(setting.dropTargetClass); that.trigger('start', { event: event, element: $ele, - shadowElement: $shadow, - targets: $targets + shadowElement: noShadow ? null : $shadow, + targets: $targets, + mouseOffset: mouseOffset }); } @@ -117,8 +123,10 @@ left: offset.left - containerOffset.left, top: offset.top - containerOffset.top }; - $shadow.css(position); - $.extend(lastMouseOffset, mouseOffset); + + if (!noShadow) { + $shadow.css(position); + } var isNew = false; isIn = false; @@ -158,7 +166,9 @@ if(!flex) { $ele.toggleClass('drop-in', isIn); - $shadow.toggleClass('drop-in', isIn); + if (!noShadow) { + $shadow.toggleClass('drop-in', isIn); + } } else if($target !== null && $target.length) { isIn = true; } @@ -173,14 +183,13 @@ selfTarget: isSelf, clickOffset: clickOffset, offset: offset, - position: { - left: offset.left - containerOffset.left, - top: offset.top - containerOffset.top - }, - mouseOffset: mouseOffset + position: position, + mouseOffset: mouseOffset, + lastMouseOffset: lastMouseOffset, }); } + $.extend(lastMouseOffset, mouseOffset); event.preventDefault(); }; @@ -256,7 +265,10 @@ $targets.removeClass(dropToClass).removeClass(setting.dropTargetClass); $ele.removeClass('dragging').removeClass('drag-from'); - $shadow.remove(); + + if (!noShadow) { + $shadow.remove(); + } $shadow = null; that.trigger('finish', eventOptions); diff --git a/src/js/sortable.js b/src/js/sortable.js index <HASH>..<HASH> 100644 --- a/src/js/sortable.js +++ b/src/js/sortable.js @@ -72,6 +72,7 @@ before : options.before, nested : !!containerSelector, mouseButton : options.mouseButton, + noShadow : options.noShadow, stopPropagation : options.stopPropagation, start: function(e) { if(dragCssClass) e.element.addClass(dragCssClass);
* add new option "noShadow" to droppable component.
easysoft_zui
train
e57e6410659745e30ede6f1ad1d2a5c564ac771d
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,5 +13,8 @@ setup( description='Commandline interface to HabitRPG (http://habitrpg.com)', long_description=readme, packages=['hrpg'], + install_requires=[ + 'docopt', + ], scripts=['bin/hrpg'], )
Adding dependency to setup.py Setting explicit dependency for docopt
philadams_habitica
train
250ea31af734fba32b44c4bd4ac3f78d483a30e1
diff --git a/java/client/src/org/openqa/selenium/firefox/NotConnectedException.java b/java/client/src/org/openqa/selenium/firefox/NotConnectedException.java index <HASH>..<HASH> 100644 --- a/java/client/src/org/openqa/selenium/firefox/NotConnectedException.java +++ b/java/client/src/org/openqa/selenium/firefox/NotConnectedException.java @@ -21,12 +21,12 @@ import java.io.IOException; import java.net.URL; public class NotConnectedException extends IOException { - public NotConnectedException(URL url, long timeToWaitInMilliSeconds) { - super(getMessage(url, timeToWaitInMilliSeconds)); + public NotConnectedException(URL url, long timeToWaitInMilliSeconds, String consoleOutput) { + super(getMessage(url, timeToWaitInMilliSeconds, consoleOutput)); } - private static String getMessage(URL url, long timeToWaitInMilliSeconds) { - return String.format("Unable to connect to host %s on port %d after %d ms", - url.getHost(), url.getPort(), timeToWaitInMilliSeconds); + private static String getMessage(URL url, long timeToWaitInMilliSeconds, String consoleOutput) { + return String.format("Unable to connect to host %s on port %d after %d ms. Firefox console output:\n%s", + url.getHost(), url.getPort(), timeToWaitInMilliSeconds, consoleOutput); } } diff --git a/java/client/src/org/openqa/selenium/firefox/internal/NewProfileExtensionConnection.java b/java/client/src/org/openqa/selenium/firefox/internal/NewProfileExtensionConnection.java index <HASH>..<HASH> 100644 --- a/java/client/src/org/openqa/selenium/firefox/internal/NewProfileExtensionConnection.java +++ b/java/client/src/org/openqa/selenium/firefox/internal/NewProfileExtensionConnection.java @@ -18,9 +18,6 @@ limitations under the License. package org.openqa.selenium.firefox.internal; -import static org.openqa.selenium.firefox.FirefoxProfile.PORT_PREFERENCE; -import static org.openqa.selenium.internal.SocketLock.DEFAULT_PORT; - import org.openqa.selenium.WebDriverException; import org.openqa.selenium.firefox.ExtensionConnection; import org.openqa.selenium.firefox.FirefoxBinary; @@ -40,6 +37,9 @@ import java.net.MalformedURLException; import java.net.Socket; import java.net.URL; +import static org.openqa.selenium.firefox.FirefoxProfile.PORT_PREFERENCE; +import static org.openqa.selenium.internal.SocketLock.DEFAULT_PORT; + public class NewProfileExtensionConnection implements ExtensionConnection { private final static int BUFFER_SIZE = 4096; @@ -93,7 +93,7 @@ public class NewProfileExtensionConnection implements ExtensionConnection { while (!isConnected()) { if (waitUntil < System.currentTimeMillis()) { throw new NotConnectedException( - delegate.getAddressOfRemoteServer(), connectTimeout); + delegate.getAddressOfRemoteServer(), connectTimeout, process.getConsoleOutput()); } try {
KristianRosenvold: Included error message from browser when browser start times out. I'm sure there is an issue out there being fixed by this. My output now looks like this if I set incorrect DISPLAY variable: org.openqa.selenium.firefox.NotConnectedException: Unable to connect to host <I> on port <I> after <I> ms. Console output: Error: cannot open display: :<I> r<I>
SeleniumHQ_selenium
train
69fd5a1527b690b45b043dd3b59eff05e67091dc
diff --git a/common/step_create_cdrom.go b/common/step_create_cdrom.go index <HASH>..<HASH> 100644 --- a/common/step_create_cdrom.go +++ b/common/step_create_cdrom.go @@ -236,12 +236,26 @@ func (s *StepCreateCD) AddFile(dst, src string) error { return err } + // file is a directory, so we need to parse the filename into a path to + // dicard and a basename + discardPath, _ := filepath.Split(src) + // Add a directory and its subdirectories visit := func(pathname string, fi os.FileInfo, err error) error { if err != nil { return err } + // Clean up pathing so that we preserve the base directory provided by + // the user but not the local pathing to that directory. + allDirs, base := filepath.Split(pathname) + intermediaryDirs := strings.Replace(allDirs, discardPath, "", 1) + + dstPath := filepath.Join(dst, base) + if intermediaryDirs != "" { + dstPath = filepath.Join(dst, intermediaryDirs, base) + } + // add a file if !fi.IsDir() { inputF, err := os.Open(pathname) @@ -250,26 +264,26 @@ func (s *StepCreateCD) AddFile(dst, src string) error { } defer inputF.Close() - fileDst, err := os.Create(filepath.Join(dst, pathname)) + fileDst, err := os.Create(dstPath) if err != nil { - return fmt.Errorf("Error opening file %s on CD", src) + return fmt.Errorf("Error opening file %s on CD: %s", dstPath, err) } defer fileDst.Close() nBytes, err := io.Copy(fileDst, inputF) if err != nil { - return fmt.Errorf("Error copying %s to CD", src) + return fmt.Errorf("Error copying %s to CD: %s", dstPath, err) } - s.filesAdded[pathname] = true - log.Printf("Wrote %d bytes to %s", nBytes, pathname) + s.filesAdded[dstPath] = true + log.Printf("Wrote %d bytes to %s", nBytes, dstPath) return err } if fi.Mode().IsDir() { // create the directory on the CD, continue walk. - err := os.Mkdir(filepath.Join(dst, pathname), fi.Mode()) + err := os.MkdirAll(dstPath, fi.Mode()) if err != nil { err = fmt.Errorf("error creating new directory %s: %s", - filepath.Join(dst, pathname), err) + dstPath, err) } return err }
fix pathing in cd_files copy to make sure directories make it into the cd root.
hashicorp_packer
train
026b1daecf3af3c99eee6fa5e3c6eaa4e78deee7
diff --git a/src/input/pointerevent.js b/src/input/pointerevent.js index <HASH>..<HASH> 100644 --- a/src/input/pointerevent.js +++ b/src/input/pointerevent.js @@ -685,6 +685,10 @@ throw new me.Error("invalid event type : " + eventType); } + if (typeof region === "undefined") { + throw new me.Error("registerPointerEvent: region for " + region + " event is undefined "); + } + var eventTypes = findAllActiveEvents(activeEventList, pointerEventMap[eventType]); // register the event
throw an exception if the given region passed is undefined, when registering an event
melonjs_melonJS
train
215e63b4b32c5217193bbd3c5ddcedfa9d54badb
diff --git a/code/pages/WikiPage.php b/code/pages/WikiPage.php index <HASH>..<HASH> 100644 --- a/code/pages/WikiPage.php +++ b/code/pages/WikiPage.php @@ -258,12 +258,23 @@ class WikiPage extends Page include_once SIMPLEWIKI_DIR.'/thirdparty/htmlpurifier-4.0.0-lite/library/HTMLPurifier.auto.php'; $purifier = new HTMLPurifier(); $content = $purifier->purify($content); + $content = preg_replace_callback('/\%5B(.*?)\%5D/', array($this, 'reformatShortcodes'), $content); } return $content; } /** + * Reformats shortcodes after being run through htmlpurifier + * + * @param array $matches + */ + public function reformatShortcodes($matches) { + $val = urldecode($matches[1]); + return '['.$val.']'; + } + + /** * Get the root of the wiki that this wiki page exists in * * @return WikiPage
BUGFIX: Convert purified shortcodes back to what they once were
nyeholt_silverstripe-simplewiki
train
ce695fa9eeb9639671fda9f67dc06e15c9dcd821
diff --git a/lib/highLevelProducer.js b/lib/highLevelProducer.js index <HASH>..<HASH> 100644 --- a/lib/highLevelProducer.js +++ b/lib/highLevelProducer.js @@ -83,7 +83,7 @@ HighLevelProducer.prototype.send = function (payloads, cb) { HighLevelProducer.prototype.buildPayloads = function (payloads) { var self = this; return payloads.map(function (p) { - p.partition = p.partition || self.client.nextPartition(p.topic); + p.partition = p.hasOwnProperty('partition') ? p.partition : self.client.nextPartition(p.topic); p.attributes = p.attributes || 0; var messages = _.isArray(p.messages) ? p.messages : [p.messages]; messages = messages.map(function (message) {
fix(HighLevelProducer): allow sending to partition 0
SOHU-Co_kafka-node
train
532a24181e35e86085264589f8814ed6346d2b95
diff --git a/frasco/users/avatars.py b/frasco/users/avatars.py index <HASH>..<HASH> 100644 --- a/frasco/users/avatars.py +++ b/frasco/users/avatars.py @@ -1,6 +1,7 @@ from frasco.ext import * from frasco.upload import url_for_upload from frasco.helpers import url_for +from frasco.utils import slugify from flask import current_app, request import sqlalchemy as sqla import hashlib @@ -80,6 +81,7 @@ def url_for_avatar(user): username = username.lower().encode('utf-8') else: username = username.lower() + username = slugify(username) hash = hashlib.md5(username).hexdigest() email = getattr(user, state.options["gravatar_email_column"] or 'email', None) if email:
slugify username before using in avatr url
frascoweb_frasco
train
78bcd6e8310002ab8fc3bc10c330014e42b4ad57
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -48,6 +48,22 @@ module.exports = function (grunt) { ] }, + jekyll: { + options: { + src: 'docs', + dest: 'docs/_site' + }, + build: { + d: null + }, + serve: { + options: { + serve: true, + watch: true + } + } + }, + jshint: { options: { jshintrc: true @@ -182,6 +198,7 @@ module.exports = function (grunt) { grunt.loadNpmTasks('grunt-contrib-uglify'); grunt.loadNpmTasks('grunt-contrib-watch'); + grunt.loadNpmTasks('grunt-jekyll'); grunt.loadNpmTasks('grunt-sass'); grunt.registerTask('default', ['compile', 'test', 'minify']); @@ -189,4 +206,8 @@ module.exports = function (grunt) { grunt.registerTask('compile', ['requirejs', 'sass:dev']); grunt.registerTask('minify', ['uglify', 'sass:dist']); grunt.registerTask('test', ['qunit', 'jshint']); + + grunt.registerTask('docs', ['jekyll:serve']); + + grunt.registerTask('release', ['default', 'jekyll:build']); };
Added docs build for grunt
select2_select2
train
cd5b2e692b3a5792ccc736d38a9536250f872d6e
diff --git a/src/Storage/Database/Schema/Comparison/Sqlite.php b/src/Storage/Database/Schema/Comparison/Sqlite.php index <HASH>..<HASH> 100644 --- a/src/Storage/Database/Schema/Comparison/Sqlite.php +++ b/src/Storage/Database/Schema/Comparison/Sqlite.php @@ -22,7 +22,7 @@ class Sqlite extends BaseComparator */ protected function setIgnoredChanges() { - if (DBAL\Version::compare('2.7.0') >= 0) { + if (DBAL\Version::compare('2.7.0') > 0) { /** @deprecated Drop when minimum PHP version is 7.1 or greater. */ $this->ignoredChanges[] = new IgnoredChange('changedColumns', 'type', 'text', 'json'); $this->ignoredChanges[] = new IgnoredChange('changedColumns', 'type', 'text', 'json_array');
Adjust comparison -1 if older 0 if it is the same 1 if version passed as argument is newer
bolt_bolt
train
75cfd0bc7aa396e9f08131de3f78fe8f785f2269
diff --git a/src/main/java/com/podio/space/SpaceMemberV2.java b/src/main/java/com/podio/space/SpaceMemberV2.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/podio/space/SpaceMemberV2.java +++ b/src/main/java/com/podio/space/SpaceMemberV2.java @@ -61,7 +61,7 @@ public class SpaceMemberV2 { * @param profile the profile to set */ @JsonProperty("profile") - public void setUser(Profile profile) { + public void setProfile(Profile profile) { this.profile = profile; }
Corrected setter method setUser -> setProfile
podio_podio-java
train
924011ec7e31daea718f2141c92867b2ad16d7ea
diff --git a/test/test-http.js b/test/test-http.js index <HASH>..<HASH> 100644 --- a/test/test-http.js +++ b/test/test-http.js @@ -1,7 +1,9 @@ +var serverTest = require('servertest') var webhook = require('..') var test = require('tape') var http = require('http') var spec = require('./spec') +var fs = require('fs') var _webhook = webhook('/', function hook (err, data) { // supress tape output for err @@ -20,5 +22,25 @@ test('Vanilla HTTP', function (t) { Object.keys(tests).forEach(function runner (_test) { t.test(_test, tests[_test]) }) + t.test('recieve data', function (t) { + var file = __dirname + '/fixtures/example.json' + var data = fs.readFileSync(file) + var rs = fs.createReadStream(file) + var opts = { + encoding: 'utf8', + method: 'POST', + headers: { + 'x-webhook-name': 'ticket.created' + } + } + + function hook (err, _data) { + t.ifError(err, 'no error') + t.deepEqual(_data, JSON.parse(data.toString()), 'original data returned') + t.end() + } + + rs.pipe(serverTest(http.createServer(webhook('/', hook)), '/', opts)) + }) t.end() })
Update tests We now test the validity of the data object returned by calling `callback(err, data)`.
joshgillies_tito-webhook
train
e1ef2baad7bd2081e94af93b5487afe96a7b8292
diff --git a/superset/migrations/versions/abe27eaf93db_add_extra_config_column_to_alerts.py b/superset/migrations/versions/abe27eaf93db_add_extra_config_column_to_alerts.py index <HASH>..<HASH> 100644 --- a/superset/migrations/versions/abe27eaf93db_add_extra_config_column_to_alerts.py +++ b/superset/migrations/versions/abe27eaf93db_add_extra_config_column_to_alerts.py @@ -17,14 +17,14 @@ """add_extra_config_column_to_alerts Revision ID: abe27eaf93db -Revises: aea15018d53b +Revises: 0ca9e5f1dacd Create Date: 2021-12-02 12:03:20.691171 """ # revision identifiers, used by Alembic. revision = "abe27eaf93db" -down_revision = "aea15018d53b" +down_revision = "0ca9e5f1dacd" import sqlalchemy as sa from alembic import op diff --git a/superset/migrations/versions/b92d69a6643c_rename_csv_to_file.py b/superset/migrations/versions/b92d69a6643c_rename_csv_to_file.py index <HASH>..<HASH> 100644 --- a/superset/migrations/versions/b92d69a6643c_rename_csv_to_file.py +++ b/superset/migrations/versions/b92d69a6643c_rename_csv_to_file.py @@ -17,14 +17,14 @@ """rename_csv_to_file Revision ID: b92d69a6643c -Revises: 32646df09c64 +Revises: aea15018d53b Create Date: 2021-09-19 14:42:20.130368 """ # revision identifiers, used by Alembic. revision = "b92d69a6643c" -down_revision = "32646df09c64" +down_revision = "aea15018d53b" import sqlalchemy as sa from alembic import op diff --git a/superset/migrations/versions/f9847149153d_add_certifications_columns_to_slice.py b/superset/migrations/versions/f9847149153d_add_certifications_columns_to_slice.py index <HASH>..<HASH> 100644 --- a/superset/migrations/versions/f9847149153d_add_certifications_columns_to_slice.py +++ b/superset/migrations/versions/f9847149153d_add_certifications_columns_to_slice.py @@ -17,7 +17,7 @@ """add_certifications_columns_to_slice Revision ID: f9847149153d -Revises: 0ca9e5f1dacd +Revises: 32646df09c64 Create Date: 2021-11-03 14:07:09.905194 """ @@ -27,7 +27,7 @@ import sqlalchemy as sa from alembic import op revision = "f9847149153d" -down_revision = "0ca9e5f1dacd" +down_revision = "32646df09c64" def upgrade():
fix(migrations): reorder skipped <I> migrations (#<I>)
apache_incubator-superset
train
05c4fa5d0fa7fdc8df763c80971858123e1b16bb
diff --git a/Zebra_cURL.php b/Zebra_cURL.php index <HASH>..<HASH> 100644 --- a/Zebra_cURL.php +++ b/Zebra_cURL.php @@ -1564,7 +1564,7 @@ class Zebra_cURL { ''; // if we have a body, we're not doing a binary transfer, and _htmlentities is set to TRUE, run htmlentities() on it - if (!empty($result->body) && !isset($this->options[CURLOPT_BINARYTRANSFER]) && $this->_htmlentities) htmlentities($result->body); + if (!empty($result->body) && !isset($this->options[CURLOPT_BINARYTRANSFER]) && $this->_htmlentities) $result->body = htmlentities($result->body); // get CURLs response code and associated message $result->response = array($this->_response_messages[$info['result']], $info['result']);
Minor bug fix htmlentities now actually returns the encoded result for $result->body
stefangabos_Zebra_cURL
train
40a468e690201090846dab800045a5fb4dc6c707
diff --git a/slim/Slim.php b/slim/Slim.php index <HASH>..<HASH> 100644 --- a/slim/Slim.php +++ b/slim/Slim.php @@ -130,7 +130,7 @@ class Slim { * @param mixed $callable Anything that returns true for is_callable() */ public static function get($pattern, $callable) { - self::router()->map($pattern, $callable, Request::METHOD_GET); + return self::router()->map($pattern, $callable, Request::METHOD_GET); } /** @@ -143,7 +143,7 @@ class Slim { * @param mixed $callable Anything that returns true for is_callable() */ public static function post($pattern, $callable) { - self::router()->map($pattern, $callable, Request::METHOD_POST); + return self::router()->map($pattern, $callable, Request::METHOD_POST); } /** @@ -156,7 +156,7 @@ class Slim { * @param mixed $callable Anything that returns true for is_callable() */ public static function put($pattern, $callable) { - self::router()->map($pattern, $callable, Request::METHOD_PUT); + return self::router()->map($pattern, $callable, Request::METHOD_PUT); } /** @@ -169,7 +169,7 @@ class Slim { * @param mixed $callable Anything that returns true for is_callable() */ public static function delete($pattern, $callable) { - self::router()->map($pattern, $callable, Request::METHOD_DELETE); + return self::router()->map($pattern, $callable, Request::METHOD_DELETE); } /**
Updating Slim::get, Slim::post, Slim::put, and Slim::delete to return the Route created for the given pattern and method
slimphp_Slim
train
858bd8554d27f3144c43e4edba18aba81d290236
diff --git a/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigFileApplicationListenerTests.java b/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigFileApplicationListenerTests.java index <HASH>..<HASH> 100644 --- a/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigFileApplicationListenerTests.java +++ b/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigFileApplicationListenerTests.java @@ -457,7 +457,7 @@ public class ConfigFileApplicationListenerTests { } private void withDebugLogging(Runnable runnable) { - LoggerContext loggingContext = (LoggerContext) LogManager.getContext(true); + LoggerContext loggingContext = (LoggerContext) LogManager.getContext(false); org.apache.logging.log4j.core.config.Configuration configuration = loggingContext .getConfiguration(); configuration.addLogger(ConfigFileApplicationListener.class.getName(),
Align test logging config with latest change in Spring Framework See spring-projects/spring-framework@ea5cb<I>d<I>f
spring-projects_spring-boot
train
ac303e12aa0a5dd9cb1921df58bf85caa3c86532
diff --git a/src/detection-strategy/scroll.js b/src/detection-strategy/scroll.js index <HASH>..<HASH> 100644 --- a/src/detection-strategy/scroll.js +++ b/src/detection-strategy/scroll.js @@ -229,10 +229,12 @@ module.exports = function(options) { } function addEvent(el, name, cb) { - if (el.attachEvent) { + if (el.addEventListener) { + el.addEventListener(name, cb); + } else if(el.attachEvent) { el.attachEvent("on" + name, cb); } else { - el.addEventListener(name, cb); + return reporter.error("[scroll] Don't know how to add event listeners."); } }
Now preferring addEventListener over attachEvent
wnr_element-resize-detector
train
f6b3453a2eeb2100e5ddaa3c328f6254abc96502
diff --git a/modin/engines/ray/generic/io.py b/modin/engines/ray/generic/io.py index <HASH>..<HASH> 100644 --- a/modin/engines/ray/generic/io.py +++ b/modin/engines/ray/generic/io.py @@ -455,6 +455,21 @@ class RayIO(BaseIO): import sqlalchemy as sa + try: + import psycopg2 as pg + + if isinstance(con, pg.extensions.connection): + con = "postgresql+psycopg2://{}:{}@{}{}/{}".format( # Table in DB + con.info.user, # <Username>: for DB + con.info.password, # Password for DB + con.info.host if con.info.host != "/tmp" else "", # @<Hostname> + (":" + str(con.info.port)) + if con.info.host != "/tmp" + else "", # <port> + con.info.dbname, # Table in DB + ) + except ImportError: + pass # In the case that we are given a SQLAlchemy Connection or Engine, the objects # are not pickleable. We have to convert it to the URL string and connect from # each of the workers.
Fix psycopg2 connection functionality (#<I>)
modin-project_modin
train
dd9b9143f95872484fd5240940c71ed3cd857932
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -68,6 +68,11 @@ For Vkontakte API there is a possibility of providing additional options. For in SocialPoster.write(:vk, 'Text on the Group Wall...', nil, owner_id: '-GROUP_ID') ``` +You can specify extra parameters for Facebook API too. For instance, you can post links, or pictures like this: +```ruby +SocialPoster.write(:fb, 'Text on the Wall...', nil, link: 'http://google.com', picture: 'https://www.google.com/images/srpr/logo11w.png') +``` + Contribute ---------- diff --git a/lib/social_poster.rb b/lib/social_poster.rb index <HASH>..<HASH> 100644 --- a/lib/social_poster.rb +++ b/lib/social_poster.rb @@ -44,7 +44,7 @@ module SocialPoster def self.write(network, text, title = '', options = {}) site = case network.to_sym when :fb - site = Poster::Facebook.new + site = Poster::Facebook.new(options) when :vk site = Poster::Vkontakte.new(options) when :twitter diff --git a/lib/social_poster/poster/facebook.rb b/lib/social_poster/poster/facebook.rb index <HASH>..<HASH> 100644 --- a/lib/social_poster/poster/facebook.rb +++ b/lib/social_poster/poster/facebook.rb @@ -6,8 +6,12 @@ module SocialPoster class Facebook include SocialPoster::Helper + def initialize(options) + @options = options + end + def write(text, title) - FbGraph::User.me(config_key :access_token).feed!(message: text) + FbGraph::User.me(config_key :access_token).feed!({message: text}.merge(@options)) end end
added extra options for fb api
HeeL_social_poster
train
f07b4f2d331687b820f0298d231f494b9f4f95d1
diff --git a/lib/simple_calendar/view_helpers.rb b/lib/simple_calendar/view_helpers.rb index <HASH>..<HASH> 100644 --- a/lib/simple_calendar/view_helpers.rb +++ b/lib/simple_calendar/view_helpers.rb @@ -1,8 +1,6 @@ module SimpleCalendar module ViewHelpers - WEEKDAYS = Date::DAYNAMES.map {|d| d.downcase.to_sym } - def calendar(events, options={}, &block) raise 'SimpleCalendar requires a block to be passed in' unless block_given? @@ -27,11 +25,8 @@ module SimpleCalendar private def build_range(selected_month, options) - start_date = selected_month.beginning_of_month - start_date = start_date.send(options[:start_day].to_s+'?') ? start_date : start_date.beginning_of_week(options[:start_day]) - - end_date = selected_month.end_of_month - end_date = end_date.saturday? ? end_date : end_date.end_of_week(options[:start_day]) + start_date = selected_month.beginning_of_month.beginning_of_week(options[:start_day]) + end_date = selected_month.end_of_month.end_of_week(options[:start_day]) (start_date..end_date).to_a end @@ -43,7 +38,7 @@ module SimpleCalendar content_tag(:table, :class => "table table-bordered table-striped calendar") do tags << month_header(selected_month, options) day_names = I18n.t("date.abbr_day_names") - day_names.rotate(WEEKDAYS.index(options[:start_date]) || 0) + day_names.rotate(Date::DAYS_INTO_WEEK[options[:start_date]] || 0) tags << content_tag(:thead, content_tag(:tr, day_names.collect { |name| content_tag :th, name, :class => (selected_month.month == Date.today.month && Date.today.strftime("%a") == name ? "current-day" : nil)}.join.html_safe)) tags << content_tag(:tbody, :'data-month'=>selected_month.month, :'data-year'=>selected_month.year) do @@ -105,8 +100,8 @@ module SimpleCalendar end # Generates the link to next and previous months - def month_link(text, month, opts={}) - link_to(text, "#{simple_calendar_path}?month=#{month.month}&year=#{month.year}", opts) + def month_link(text, date, opts={}) + link_to(text, {:month => date.month, :year => date.year}, opts) end # Returns the full path to the calendar
Simplified next and previous month links
excid3_simple_calendar
train
c682c00b058de21e927c3a6c42fadb34c9745767
diff --git a/src/utils.js b/src/utils.js index <HASH>..<HASH> 100644 --- a/src/utils.js +++ b/src/utils.js @@ -1,19 +1,9 @@ var _ = require('lodash'); -var driver, C, offsetsByDirection; +var driver, C, offsetsByDirection = [, [0,-1], [1,-1], [1,0], [1,1], [0,1], [-1,1], [-1,0], [-1,-1]]; function loadDriver() { C = driver.constants; - offsetsByDirection = { - [C.TOP]: [0,-1], - [C.TOP_RIGHT]: [1,-1], - [C.RIGHT]: [1,0], - [C.BOTTOM_RIGHT]: [1,1], - [C.BOTTOM]: [0,1], - [C.BOTTOM_LEFT]: [-1,1], - [C.LEFT]: [-1,0], - [C.TOP_LEFT]: [-1,-1] - }; } try { @@ -32,7 +22,9 @@ exports.getDriver = function getDriver() { exports.getRuntimeDriver = function getRuntimeDriver() { try { - return require('~runtime-driver'); + driver = require('~runtime-driver'); + loadDriver(); + return driver; } catch (e) { return exports.getDriver();
♻ update constants on driver load DEV-<I>
screeps_engine
train
1e11d71f855546a4281fab9ef3750c1d650ae2ac
diff --git a/test/tests/index.js b/test/tests/index.js index <HASH>..<HASH> 100644 --- a/test/tests/index.js +++ b/test/tests/index.js @@ -61,10 +61,9 @@ describe("Index", function() { })) .then(function() { return index.addAll(undefined, undefined, function() { - // ensure that the add callback is called, - // and that there is no deadlock if we call - // a sync libgit2 function from the callback addCallbacksCount++; + // ensure that there is no deadlock if we call + // a sync libgit2 function from the callback test.repository.path(); return 0; // confirm add @@ -98,6 +97,7 @@ describe("Index", function() { differentFileName: "this has a different name and shouldn't be deleted" }; var fileNames = Object.keys(fileContent); + var removeCallbacksCount = 0; return Promise.all(fileNames.map(function(fileName) { return writeFile( @@ -114,9 +114,15 @@ describe("Index", function() { assert.equal(newFiles.length, 3); - return index.removeAll("newFile*"); + return index.removeAll("newFile*", function() { + removeCallbacksCount++; + + return 0; // confirm remove + }); }) .then(function() { + assert.equal(removeCallbacksCount, 2); + var newFiles = index.entries().filter(function(entry) { return ~fileNames.indexOf(entry.path); }); @@ -141,6 +147,7 @@ describe("Index", function() { newFile2: "and this will have more content" }; var fileNames = Object.keys(fileContent); + var updateCallbacksCount = 0; return Promise.all(fileNames.map(function(fileName) { return writeFile( @@ -160,9 +167,15 @@ describe("Index", function() { return fse.remove(path.join(repo.workdir(), fileNames[0])); }) .then(function() { - return index.updateAll("newFile*"); + return index.updateAll("newFile*", function() { + updateCallbacksCount++; + + return 0; // confirm update + }); }) .then(function() { + assert.equal(updateCallbacksCount, 1); + var newFiles = index.entries().filter(function(entry) { return ~fileNames.indexOf(entry.path); });
Add tests for callbacks passed directly as parameters
nodegit_nodegit
train
36e3b33bbc0a707ff8ff69e4a5e05922669ddd46
diff --git a/gandi/cli/core/cli.py b/gandi/cli/core/cli.py index <HASH>..<HASH> 100644 --- a/gandi/cli/core/cli.py +++ b/gandi/cli/core/cli.py @@ -71,6 +71,55 @@ class GandiCLI(click.Group): ]) + def resolve_command(self, ctx, args): + cmd_name = args[0] + + sub_cmd = False + if len(args) > 1: + # XXX: dirty hack to handle namespaces by merging the first 2 args + # i.e : paas + list = 'paas list' + new_cmd_name = ' '.join(args[0:2]) + cmd = click.Group.get_command(self, ctx, new_cmd_name) + if cmd is not None: + sub_cmd = True + cmd_name = new_cmd_name + + cmd = click.Group.get_command(self, ctx, cmd_name) + if cmd is not None: + if sub_cmd: + del args[1] + return cmd_name, cmd, args[1:] + + matches = [x for x in self.list_commands(ctx) + if x.startswith(cmd_name)] + if not matches: + return None + elif len(matches) == 1: + if sub_cmd: + del args[1] + cmd = click.Group.get_command(self, ctx, matches[0]) + return cmd_name, cmd, args[1:] + + formatter = ctx.make_formatter() + rows = [] + for matched in sorted(matches): + cmd = click.Group.get_command(self, ctx, matched) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + + help = cmd.short_help or '' + rows.append((matched, help)) + + if rows: + formatter.write_dl(rows) + + print(formatter.getvalue().rstrip('\n')) + ctx.exit() + + if click.parser.split_opt(cmd_name)[0]: + click.Group.parse_args(ctx, ctx.args) + def get_command(self, ctx, cmd_name): """ Retrieve command from internal list.
Fixes issue #<I> : Compatiblity with click 6.x This commit add the resolve_command override so we can keep our subcommands working with newer click upstream versions
Gandi_gandi.cli
train
8e187d19628205763c44f4436f763d7dfd4c3b5e
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -148,7 +148,7 @@ "pug": "^2.0.3", "redis": "^2.8.0", "request": "^2.88.0", - "restify": "^7.7.0", + "restify": "^8.3.1", "restify-clients": "^2.6.2", "rimraf": "^2.6.2", "send": "^0.16.2", diff --git a/test/config.js b/test/config.js index <HASH>..<HASH> 100644 --- a/test/config.js +++ b/test/config.js @@ -575,6 +575,9 @@ test('disableInstrumentations', function (t) { var mysql2Version = require('mysql2/package.json').version var modules = new Set(Instrumentation.modules) + if (semver.lt(process.version, '8.6.0')) { + modules.delete('restify') + } if (semver.lt(process.version, '8.3.0')) { modules.delete('http2') } diff --git a/test/instrumentation/modules/restify/basic.js b/test/instrumentation/modules/restify/basic.js index <HASH>..<HASH> 100644 --- a/test/instrumentation/modules/restify/basic.js +++ b/test/instrumentation/modules/restify/basic.js @@ -6,6 +6,11 @@ const agent = require('../../../..').start({ captureExceptions: false }) +const pkg = require('restify/package.json') +const semver = require('semver') + +if (semver.lt(process.version, '8.6.0') && semver.gte(pkg.version, '8.0.0')) process.exit() + const http = require('http') const once = require('once') diff --git a/test/instrumentation/modules/restify/set-framework.js b/test/instrumentation/modules/restify/set-framework.js index <HASH>..<HASH> 100644 --- a/test/instrumentation/modules/restify/set-framework.js +++ b/test/instrumentation/modules/restify/set-framework.js @@ -5,6 +5,11 @@ const agent = require('../../../..').start({ metricsInterval: 0 }) +const pkg = require('restify/package.json') +const semver = require('semver') + +if (semver.lt(process.version, '8.6.0') && semver.gte(pkg.version, '8.0.0')) process.exit() + let asserts = 0 agent.setFramework = function ({ name, version, overwrite }) {
chore(package): update restify to version <I> (#<I>)
elastic_apm-agent-nodejs
train
15c0f31ce268952c915d519f7d58d9981bd2d9f9
diff --git a/cdm/src/main/java/ucar/nc2/Variable.java b/cdm/src/main/java/ucar/nc2/Variable.java index <HASH>..<HASH> 100644 --- a/cdm/src/main/java/ucar/nc2/Variable.java +++ b/cdm/src/main/java/ucar/nc2/Variable.java @@ -32,7 +32,6 @@ */ package ucar.nc2; -import thredds.catalog2.ThreddsMetadata; import ucar.ma2.*; import ucar.nc2.constants.CDM; import ucar.nc2.constants.CF;
removed unused import in Variable that was breaking gradle build
Unidata_thredds
train
0efc33c156fc071c6c784ab25bd09fb0db84316a
diff --git a/src/saml2/client.py b/src/saml2/client.py index <HASH>..<HASH> 100644 --- a/src/saml2/client.py +++ b/src/saml2/client.py @@ -73,8 +73,11 @@ class Saml2Client(object): self.users.cache, log=None, vorg_conf=None) self.sec = security_context(config) - - self.debug = debug + + if not debug: + self.debug = self.config.debug() + else: + self.debug = debug def _init_request(self, request, destination): #request.id = sid()
Debug as defined in the config file
IdentityPython_pysaml2
train
41ec160913824cfc354916edb4838329cf9aa021
diff --git a/hashring/hashring_test.go b/hashring/hashring_test.go index <HASH>..<HASH> 100644 --- a/hashring/hashring_test.go +++ b/hashring/hashring_test.go @@ -399,7 +399,7 @@ func (s *ProcessMembershipChangesSuite) TestAddMember0() { s.ring.ProcessMembershipChanges([]membership.MemberChange{ {After: s.members[0]}, }) - mock.AssertExpectationsForObjects(s.T(), s.l.Mock) + mock.AssertExpectationsForObjects(s.T(), &s.l.Mock) s.Equal(1, s.ring.ServerCount(), "unexpected count of members in ring") } @@ -411,7 +411,7 @@ func (s *ProcessMembershipChangesSuite) TestAddMember1() { s.ring.ProcessMembershipChanges([]membership.MemberChange{ {After: s.members[1]}, }) - mock.AssertExpectationsForObjects(s.T(), s.l.Mock) + mock.AssertExpectationsForObjects(s.T(), &s.l.Mock) s.Equal(2, s.ring.ServerCount(), "unexpected count of members in ring") } @@ -425,7 +425,7 @@ func (s *ProcessMembershipChangesSuite) TestRemoveMember0AddMember2() { {After: s.members[2]}, {Before: s.members[0]}, }) - mock.AssertExpectationsForObjects(s.T(), s.l.Mock) + mock.AssertExpectationsForObjects(s.T(), &s.l.Mock) s.Equal(2, s.ring.ServerCount(), "unexpected count of members in ring") } @@ -453,7 +453,7 @@ func (s *ProcessMembershipChangesSuite) TestChangeIdentityMember2() { s.ring.ProcessMembershipChanges([]membership.MemberChange{ {Before: s.members[1], After: memberNewIdentity}, }) - mock.AssertExpectationsForObjects(s.T(), s.l.Mock) + mock.AssertExpectationsForObjects(s.T(), &s.l.Mock) s.Equal(2, s.ring.ServerCount(), "unexpected count of members in ring") }
Fix mock.AssertExpectationsForObjects lint issue in hashring tests
uber_ringpop-go
train
48c93453c8dcd0cbc0aa4e7924cf8a44eb5d70b8
diff --git a/jodd-lagarto/src/main/java/jodd/jerry/Jerry.java b/jodd-lagarto/src/main/java/jodd/jerry/Jerry.java index <HASH>..<HASH> 100644 --- a/jodd-lagarto/src/main/java/jodd/jerry/Jerry.java +++ b/jodd-lagarto/src/main/java/jodd/jerry/Jerry.java @@ -989,10 +989,9 @@ public class Jerry implements Iterable<Jerry> { return this; } - /** - * Remove the parents of the set of matched elements from the DOM, leaving - * the matched elements (and siblings, if any) in their place. - * Returns the set of elements that was removed. + /** + * Replace each element in the set of matched elements with the provided + * new content and return the set of elements that was removed. */ public Jerry replaceWith(String html) { if (html == null) { @@ -1094,7 +1093,7 @@ public class Jerry implements Iterable<Jerry> { /** * Remove the parents of the set of matched elements from the DOM, leaving - * the matched elements (and siblings, if any) in their place. + * the matched elements (and siblings, if any) in their place. */ public Jerry unwrap() { if (nodes.length == 0) {
amend commit(documentation got messed up)
oblac_jodd
train
e4399a57246824bbdb4aaf99d7dd0d216aed7e4c
diff --git a/lib/vagrant/util/platform.rb b/lib/vagrant/util/platform.rb index <HASH>..<HASH> 100644 --- a/lib/vagrant/util/platform.rb +++ b/lib/vagrant/util/platform.rb @@ -561,9 +561,9 @@ module Vagrant def wsl_validate_matching_vagrant_versions! valid = false if Util::Which.which("vagrant.exe") - result = Util::Subprocess.execute("vagrant.exe", "version") + result = Util::Subprocess.execute("vagrant.exe", "--version") if result.exit_code == 0 - windows_version = result.stdout.match(/Installed Version: (?<version>[\w.-]+)/) + windows_version = result.stdout.match(/Vagrant (?<version>[\w.-]+)/) if windows_version windows_version = windows_version[:version].strip valid = windows_version == Vagrant::VERSION diff --git a/test/unit/vagrant/util/platform_test.rb b/test/unit/vagrant/util/platform_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/vagrant/util/platform_test.rb +++ b/test/unit/vagrant/util/platform_test.rb @@ -185,8 +185,8 @@ describe Vagrant::Util::Platform do before do allow(Vagrant::Util::Which).to receive(:which).and_return(true) - allow(Vagrant::Util::Subprocess).to receive(:execute).with("vagrant.exe", "version"). - and_return(double(exit_code: 0, stdout: "Installed Version: #{exe_version}")) + allow(Vagrant::Util::Subprocess).to receive(:execute).with("vagrant.exe", "--version"). + and_return(double(exit_code: 0, stdout: "Vagrant #{exe_version}")) end it "should not raise an error" do
Check Windows installed Vagrant version using --version Using the --version flag keeps Vagrant from doing a full startup and prevents the Vagrantfile from being parsed. This makes the version check faster, and stops things like syntax errors within a Vagrantfile from causing an unexpected error. Fixes #<I>
hashicorp_vagrant
train
569e863292ca8f7b42e0f65c8779355a2fd04f2d
diff --git a/lib/epimath100/error.class.rb b/lib/epimath100/error.class.rb index <HASH>..<HASH> 100644 --- a/lib/epimath100/error.class.rb +++ b/lib/epimath100/error.class.rb @@ -5,13 +5,15 @@ class Error ERR_HIGH = "Fatal Error" ERR_MEDIUM = "Error" ERR_LOW = "Warning" + ERR_DEFAULT = "Default" ERR_COLOR_RED = "0;31" ERR_COLOR_GREEN = "0;32" ERR_COLOR_YELLOW = "1;33" ERR_COLOR_BLUE = "0;34" ERR_COLOR_ORANGE = "0;33" @@errors = 0 - + @@error_default = ERR_HIGH + # The function will check if the specified value can be converted to a Numerical value. # == Parameters: # type:: @@ -34,18 +36,34 @@ class Error return false end end - + + # return @@error_default + def self.default + return @@error_default + end + + # set @@error_default + def self.default=(level) + if level != Error::ERR_HIGH and level != Error::ERR_MEDIUM and level != Error::ERR_LOW + self.call "Error::default= : error level invalid", Error::ERR_MEDIUM + @@error_default = Error::ERR_HIGH + else + @@error_default = level + end + end + # "call" is a function you can acces with: # Error.call "message", ERR_LEVEL # == The error's levels are : # * ERR_HIGH # * ERR_MEDIUM # * ERR_LOW + # * ERR_DEFAULT # The error's level influence the color (visibility) and defined if the programm must exit. # An ERR_HIGH is only to call exit and stop the programm. So be carrefull. # ERR_MEDIUM and ERR_LOW will just display the message and no more. - # ERR_HIGH is the default value, you can change it if yo want - # def self.call m, level=ERR_MEDIUM + # ERR_HIGH is the default value, you can change it if yo want by : + # Error.default = Error::ERR_X # # == Parameters: # m:: @@ -56,8 +74,12 @@ class Error # # == Returns: # nil - def self.call m, level=Error::ERR_HIGH - + def self.call m, level=Error::ERR_DEFAULT + # define the default value if level is set by "Default" + if level == Error::ERR_DEFAULT + level = @@error_default + end + if level != Error::ERR_HIGH and level != Error::ERR_MEDIUM and level != Error::ERR_LOW self.call "Error::call : error level invalid", Error::ERR_MEDIUM end
Improve Error class : the default error's level is now changeable
Nephos_epimath100
train
e1b47d3b431288adcbe36c4643e23b445155713b
diff --git a/lib/rolify/dynamic.rb b/lib/rolify/dynamic.rb index <HASH>..<HASH> 100644 --- a/lib/rolify/dynamic.rb +++ b/lib/rolify/dynamic.rb @@ -5,7 +5,7 @@ module Rolify def load_dynamic_methods if ENV['ADAPTER'] == 'active_record' # supported Rails version >= 3.2 with AR should use find_each, since use of .all.each is deprecated - self.role_class.includes(:resource).find_each do |r| + self.role_class.group("name, resource_type").includes(:resource).find_each do |r| define_dynamic_method(r.name, r.resource) end else
don't load every single role+resource on startup rather, we can simply load the unique role name/resource type combinations, which are likely to be far fewer than the number of absolute role records.
RolifyCommunity_rolify
train
5e75669a22cdda3fb005c170d489479b0a71f449
diff --git a/pyphi/compute/parallel.py b/pyphi/compute/parallel.py index <HASH>..<HASH> 100644 --- a/pyphi/compute/parallel.py +++ b/pyphi/compute/parallel.py @@ -153,10 +153,7 @@ class MapReduce: configure_worker_logging(log_queue) - while True: - obj = in_queue.get() - if obj is POISON_PILL: - break + for obj in iter(in_queue.get, POISON_PILL): out_queue.put(compute(obj, *context)) out_queue.put(POISON_PILL)
Use `iter` to get jobs from queue
wmayner_pyphi
train
2d8eac01068a6b8f04c9ec2cbd968933f0480736
diff --git a/bcbio/pipeline/genome.py b/bcbio/pipeline/genome.py index <HASH>..<HASH> 100644 --- a/bcbio/pipeline/genome.py +++ b/bcbio/pipeline/genome.py @@ -228,8 +228,8 @@ def get_builds(galaxy_base): # ## Retrieve pre-prepared genomes -REMAP_NAMES = {"tophat2": "bowtie2", - "samtools": "seq"} +REMAP_NAMES = {"tophat2": ["bowtie2"], + "samtools": ["rtg", "seq"]} INPLACE_INDEX = {"star": star.index} def download_prepped_genome(genome_build, data, name, need_remap, out_dir=None): @@ -245,24 +245,23 @@ def download_prepped_genome(genome_build, data, name, need_remap, out_dir=None): if not out_dir: out_dir = utils.safe_makedir(os.path.join(tz.get_in(["dirs", "work"], data), "inputs", "data", "genomes")) - ref_dir = os.path.join(out_dir, genome_build, REMAP_NAMES.get(name, name)) - if not os.path.exists(ref_dir): - target = REMAP_NAMES.get(name, name) - if target in INPLACE_INDEX: - ref_file = glob.glob(os.path.normpath(os.path.join(ref_dir, os.pardir, "seq", "*.fa")))[0] - INPLACE_INDEX[target](ref_file, ref_dir, data) - else: - # XXX Currently only supports genomes from S3 us-east-1 bucket. - # Need to assess how slow this is from multiple regions and generalize to non-AWS. - fname = objectstore.BIODATA_INFO["s3"].format(build=genome_build, - target=REMAP_NAMES.get(name, name)) - try: - objectstore.connect(fname) - except: - raise ValueError("Could not find reference genome file %s %s" % (genome_build, name)) - with utils.chdir(out_dir): - cmd = objectstore.cl_input(fname, unpack=False, anonpipe=False) + " | pigz -d -c | tar -xvp" - do.run(cmd.format(**locals()), "Download pre-prepared genome data: %s" % genome_build) + for target in REMAP_NAMES.get(name, [name]): + ref_dir = os.path.join(out_dir, genome_build, target) + if not os.path.exists(ref_dir): + if target in INPLACE_INDEX: + ref_file = glob.glob(os.path.normpath(os.path.join(ref_dir, os.pardir, "seq", "*.fa")))[0] + INPLACE_INDEX[target](ref_file, ref_dir, data) + else: + # XXX Currently only supports genomes from S3 us-east-1 bucket. + # Need to assess how slow this is from multiple regions and generalize to non-AWS. + fname = objectstore.BIODATA_INFO["s3"].format(build=genome_build, target=target) + try: + objectstore.connect(fname) + except: + raise ValueError("Could not find reference genome file %s %s" % (genome_build, name)) + with utils.chdir(out_dir): + cmd = objectstore.cl_input(fname, unpack=False, anonpipe=False) + " | pigz -d -c | tar -xvp" + do.run(cmd.format(**locals()), "Download pre-prepared genome data: %s" % genome_build) ref_file = glob.glob(os.path.normpath(os.path.join(ref_dir, os.pardir, "seq", "*.fa")))[0] if data.get("genome_build"): gresources = get_resources(data["genome_build"], ref_file, data) @@ -285,7 +284,7 @@ def download_prepped_genome(genome_build, data, name, need_remap, out_dir=None): if need_remap or name == "samtools": return os.path.join(genome_dir, "seq", "%s.fa" % genome_build) else: - ref_dir = os.path.join(genome_dir, REMAP_NAMES.get(name, name)) + ref_dir = os.path.join(genome_dir, REMAP_NAMES.get(name, [name])[-1]) base_name = os.path.commonprefix(os.listdir(ref_dir)) while base_name.endswith("."): base_name = base_name[:-1]
Automated download of rtg indexes on cloud runs This includes pre-built rtg indices in the data preparation step to enable validation for runs on cloud resources.
bcbio_bcbio-nextgen
train
e0e0560799e2f147db93bd3a59f3ae0ddf9f4452
diff --git a/spec/routine_spec.rb b/spec/routine_spec.rb index <HASH>..<HASH> 100644 --- a/spec/routine_spec.rb +++ b/spec/routine_spec.rb @@ -1,7 +1,7 @@ require 'spec_helper' -describe Routine do +describe Lev::Routine do -end \ No newline at end of file +end
Fix NameError: uninitialized constant Routine in routine_spec.rb
lml_lev
train
713112249ee47e414187b79e0ff6ac5931a77ed3
diff --git a/spec/subscriptionOptions.spec.js b/spec/subscriptionOptions.spec.js index <HASH>..<HASH> 100644 --- a/spec/subscriptionOptions.spec.js +++ b/spec/subscriptionOptions.spec.js @@ -223,4 +223,110 @@ describe( 'Subscription Definition Options', function () { } ); + describe( 'When calling withConstraint()', function () { + var events = []; + var count = 0; + beforeEach(function () { + events = []; + sub = monologue.on( "Some.Topic",function ( data ) { + events.push(data); + } ).withConstraint(function(data){ + var cnt = count; + count += 1 + return cnt === 0; + }); + }); + + afterEach( function () { + sub.unsubscribe(); + } ); + + it( 'Should unsubscribe the callback after 1 invocation', function () { + monologue.emit( "Some.Topic", { name: "Paul McCartney" } ); + monologue.emit( "Some.Topic", { name: "John Lennon" } ); + expect( events.length ).to.be( 1 ); + expect( events[0] ).to.eql( { name: "Paul McCartney" } ); + } ); + + } ); + + describe( 'When calling withDebounce', function() { + var events; + + beforeEach(function () { + events = []; + sub = monologue.on( "Debounced.Topic",function ( data ) { + events.push( data ); + } ).withDebounce( 600 ); + }); + + it( "should have only invoked debounced callback once", function (done) { + monologue.emit( "Debounced.Topic", { name: "Help!" } ); // starts the clock on debounce + setTimeout( function () { + monologue.emit( "Debounced.Topic", { name: "Paul McCartney" } ); + }, 20 ); // should not invoke callback + setTimeout( function () { + monologue.emit( "Debounced.Topic", { name: "John Lennon" } ); + }, 100 ); // should not invoke callback + setTimeout( function () { + monologue.emit( "Debounced.Topic", { name: "George Harrison" } ); + }, 150 ); // should not invoke callback + setTimeout( function () { + monologue.emit( "Debounced.Topic", { name: "Ringo Starkey" } ); + }, 750 ); // should not invoke callback + setTimeout( function () { + expect( events[0] ).to.eql( { name: "Ringo Starkey" } ); + expect( events.length ).to.be( 1 ); + sub.unsubscribe(); + done(); + },1500); + } ); + }); + + describe( 'When calling withDelay', function() { + var events; + + beforeEach(function () { + events = []; + sub = monologue.on( "Delayed.Topic",function ( data ) { + events.push( data ); + } ).withDelay( 300 ); + }); + + it( "should have only invoked debounced callback once", function (done) { + monologue.emit( "Delayed.Topic", { name: "Help!" } ); // starts the clock on debounce + setTimeout( function () { + expect( events[0] ).to.eql( { name: "Help!" } ); + expect( events.length ).to.be( 1 ); + sub.unsubscribe(); + done(); + },600); + } ); + }); + + describe(' When calling withThrottle', function() { + var events; + + beforeEach(function () { + events = []; + sub = monologue.on( "Throttled.Topic",function ( data ) { + events.push( data ); + } ).withThrottle( 500 ); + }); + + it( "should have only invoked throttled callback once", function (done) { + monologue.emit( "Throttled.Topic", { name: "Hey, Jude." } ); // starts clock on throttle + events = []; + for ( var i = 0; i < 10; i++ ) { + (function ( x ) { + monologue.emit( "Throttled.Topic", { name: "Hey, Jude." } ); + })( i ); + } + setTimeout( function () { + expect( events.length ).to.be( 1 ); + done(); + }, 800 ); + } ); + }); + }); \ No newline at end of file
Added tests around async SubscriptionDefinition options
postaljs_monologue.js
train
c01d846116a3eb609239b267c65cefc9bedba4de
diff --git a/wandb/keras/__init__.py b/wandb/keras/__init__.py index <HASH>..<HASH> 100644 --- a/wandb/keras/__init__.py +++ b/wandb/keras/__init__.py @@ -99,7 +99,8 @@ class WandbCallback(keras.callbacks.Callback): def on_epoch_end(self, epoch, logs=None): # history - row = {'epoch': epoch} + row = copy.copy(wandb.run.history.row) + row['epoch'] = epoch row.update(logs) if self.log_weights:
Fix keras when doing additonal logging
wandb_client
train
5e90e615b1821e1a0812d0b049d0107681b8daf3
diff --git a/lib/Predis/Connection/ConnectionParameters.php b/lib/Predis/Connection/ConnectionParameters.php index <HASH>..<HASH> 100644 --- a/lib/Predis/Connection/ConnectionParameters.php +++ b/lib/Predis/Connection/ConnectionParameters.php @@ -11,7 +11,7 @@ namespace Predis\Connection; -use Predis\ClientException; +use InvalidArgumentException; /** * Handles parsing and validation of connection parameters. @@ -113,14 +113,16 @@ class ConnectionParameters implements ConnectionParametersInterface $uri = str_ireplace('unix:///', 'unix://localhost/', $uri); } - if (!($parsed = @parse_url($uri)) || !isset($parsed['host'])) { - throw new ClientException("Invalid URI: $uri"); + if (!($parsed = parse_url($uri)) || !isset($parsed['host'])) { + throw new InvalidArgumentException("Invalid parameters URI: $uri"); } if (isset($parsed['query'])) { foreach (explode('&', $parsed['query']) as $kv) { - @list($k, $v) = explode('=', $kv); - $parsed[$k] = $v; + $kv = explode('=', $kv); + if (isset($kv[0], $kv[1])) { + $parsed[$kv[0]] = $kv[1]; + } } unset($parsed['query']); diff --git a/tests/Predis/Connection/ConnectionParametersTest.php b/tests/Predis/Connection/ConnectionParametersTest.php index <HASH>..<HASH> 100644 --- a/tests/Predis/Connection/ConnectionParametersTest.php +++ b/tests/Predis/Connection/ConnectionParametersTest.php @@ -159,8 +159,25 @@ class ParametersTest extends StandardTestCase /** * @group disconnected - * @expectedException Predis\ClientException - * @expectedExceptionMessage Invalid URI: tcp://invalid:uri + */ + public function testParsingURIWithIncompletePairInQueryString() + { + $uri = 'tcp://10.10.10.10?persistent=1&foo=&bar'; + + $expected = array( + 'scheme' => 'tcp', + 'host' => '10.10.10.10', + 'persistent' => '1', + 'foo' => '', + ); + + $this->assertSame($expected, ConnectionParameters::parse($uri)); + } + + /** + * @group disconnected + * @expectedException InvalidArgumentException + * @expectedExceptionMessage Invalid parameters URI: tcp://invalid:uri */ public function testParsingURIThrowOnInvalidURI() {
Minor optimizations for the parsing of URI parameters. Using list() with the warning suppressor is slower than using isset() to check the presence of the first two elements of the array returned by explode(). This also allow us to skip incomplete query string pairs when parsing the URI string. We have also changed the exception being thrown on invalid URIs to a more appropriate one.
nrk_predis
train
f17a14133ee99b233a327395bc18905597541d40
diff --git a/src/pikepdf/_methods.py b/src/pikepdf/_methods.py index <HASH>..<HASH> 100644 --- a/src/pikepdf/_methods.py +++ b/src/pikepdf/_methods.py @@ -72,7 +72,7 @@ def augments(cls_cpp: Type[Any]): Any existing methods may be used, regardless of whether they are defined elsewhere in the support class or in the target class. - For data fields to work, including @property accessors, the target class must be + For data fields to work, the target class must be tagged ``py::dynamic_attr`` in pybind11. Strictly, the target class does not have to be C++ or derived from pybind11.
methods: fix incorrect comment about dynamic_attr
pikepdf_pikepdf
train
4113e2f072e40d613d65f4d1dbee585d338fa202
diff --git a/src/Rah/Danpu/Base.php b/src/Rah/Danpu/Base.php index <HASH>..<HASH> 100644 --- a/src/Rah/Danpu/Base.php +++ b/src/Rah/Danpu/Base.php @@ -157,32 +157,6 @@ abstract class Base } /** - * Gets an array of tables. - * - * @return array|bool - */ - - protected function getTables() - { - if ($tables = $this->pdo->query('SHOW TABLES')) - { - foreach ($tables as $table) - { - $name = current($table); - - if (!in_array($name, $this->config->ignore, true)) - { - $this->tables[] = $name; - } - } - - return $this->tables; - } - - return false; - } - - /** * Locks all tables. * * @return bool diff --git a/src/Rah/Danpu/Export.php b/src/Rah/Danpu/Export.php index <HASH>..<HASH> 100644 --- a/src/Rah/Danpu/Export.php +++ b/src/Rah/Danpu/Export.php @@ -54,7 +54,6 @@ class Export extends Base $this->connect(); $this->tmpFile(); $this->open($this->temp, 'wb'); - $this->getTables(); $this->lock(); $this->dump(); $this->unlock(); @@ -92,8 +91,20 @@ class Export extends Base { $this->write('-- '. date('c') . ' - ' . $this->config->db . '@' . $this->config->host, false); - foreach ($this->tables as $table) + $tables = $this->pdo->prepare('show tables'); + $tables->execute(); + + while ($a = $tables->fetch(\PDO::FETCH_ASSOC)) { + $table = current($a); + + if (in_array($table, (array) $this->config->ignore, true)) + { + continue; + } + + $tables->closeCursor(); + if (($structure = $this->pdo->query('show create table `'.$table.'`')) === false) { throw new Exception('Unable to get the structure for "'.$table.'"');
Don't store table data in the memory. Allows future view support, and reduces memory usage on table-heavy databases.
gocom_danpu
train
8b766dd1d8485ab78f4bb67196be0e03e5e885a9
diff --git a/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor-debug.js b/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor-debug.js index <HASH>..<HASH> 100644 --- a/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor-debug.js +++ b/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor-debug.js @@ -2546,7 +2546,7 @@ var COMMENT = function(editor, gradeid, pageno, x, y, width, colour, rawtext) { drawable.store_position(container, position.x, position.y); drawable.nodes.push(container); node.set('value', this.rawtext); - scrollheight = node.get('scrollHeight'), + scrollheight = node.get('scrollHeight'); node.setStyles({ 'height' : scrollheight + 'px', 'overflow': 'hidden' diff --git a/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor.js b/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor.js index <HASH>..<HASH> 100644 --- a/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor.js +++ b/mod/assign/feedback/editpdf/yui/build/moodle-assignfeedback_editpdf-editor/moodle-assignfeedback_editpdf-editor.js @@ -2546,7 +2546,7 @@ var COMMENT = function(editor, gradeid, pageno, x, y, width, colour, rawtext) { drawable.store_position(container, position.x, position.y); drawable.nodes.push(container); node.set('value', this.rawtext); - scrollheight = node.get('scrollHeight'), + scrollheight = node.get('scrollHeight'); node.setStyles({ 'height' : scrollheight + 'px', 'overflow': 'hidden' diff --git a/mod/assign/feedback/editpdf/yui/src/editor/js/comment.js b/mod/assign/feedback/editpdf/yui/src/editor/js/comment.js index <HASH>..<HASH> 100644 --- a/mod/assign/feedback/editpdf/yui/src/editor/js/comment.js +++ b/mod/assign/feedback/editpdf/yui/src/editor/js/comment.js @@ -196,7 +196,7 @@ var COMMENT = function(editor, gradeid, pageno, x, y, width, colour, rawtext) { drawable.store_position(container, position.x, position.y); drawable.nodes.push(container); node.set('value', this.rawtext); - scrollheight = node.get('scrollHeight'), + scrollheight = node.get('scrollHeight'); node.setStyles({ 'height' : scrollheight + 'px', 'overflow': 'hidden'
MDL-<I> assignfeedback_editpdf: Fix missing semicolon Detected by eslint rule 'semi'
moodle_moodle
train
ee87ba82593fbadcadd2ed840c44bcc42eb7c9f8
diff --git a/lib/parser.js b/lib/parser.js index <HASH>..<HASH> 100644 --- a/lib/parser.js +++ b/lib/parser.js @@ -350,13 +350,7 @@ function parseFile(path) { return parse(stream); } -function parseUrl(url) { - // TODO: Parse a given <<url>> using requests? - return "parseUrl not implemented"; -} - module.exports = { parseText: parseText, - parseFile: parseFile, - parseUrl: parseUrl + parseFile: parseFile };
Remove url parsing for now
ninetwozero_gradle-to-js
train
986e90bc9fd9b0f59d46a8f26f3928169f9e7cad
diff --git a/src/main/java/com/codeborne/selenide/SelenideElement.java b/src/main/java/com/codeborne/selenide/SelenideElement.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/codeborne/selenide/SelenideElement.java +++ b/src/main/java/com/codeborne/selenide/SelenideElement.java @@ -353,4 +353,6 @@ public interface SelenideElement extends WebElement, FindsByLinkText, FindsById, * @return the original Selenium WebElement wrapped by this object */ WebElement toWebElement(); + + SelenideElement contextClick(); } diff --git a/src/main/java/com/codeborne/selenide/impl/AbstractSelenideElement.java b/src/main/java/com/codeborne/selenide/impl/AbstractSelenideElement.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/codeborne/selenide/impl/AbstractSelenideElement.java +++ b/src/main/java/com/codeborne/selenide/impl/AbstractSelenideElement.java @@ -152,6 +152,10 @@ abstract class AbstractSelenideElement implements InvocationHandler { click(); return null; } + else if ("contextClick".equals(method.getName())) { + contextClick(); + return null; + } return delegateMethod(getDelegate(), method, args); } @@ -164,6 +168,10 @@ abstract class AbstractSelenideElement implements InvocationHandler { waitForElement().click(); } + protected void contextClick() { + actions().contextClick(waitForElement()).perform(); + } + protected void followLink() { WebElement link = waitForElement(); String href = link.getAttribute("href"); diff --git a/src/test/java/com/codeborne/selenide/integrationtests/SelenideMethodsTest.java b/src/test/java/com/codeborne/selenide/integrationtests/SelenideMethodsTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/codeborne/selenide/integrationtests/SelenideMethodsTest.java +++ b/src/test/java/com/codeborne/selenide/integrationtests/SelenideMethodsTest.java @@ -294,4 +294,15 @@ public class SelenideMethodsTest { assertTrue(e.getAdditionalInformation().contains("selenide.url: http://localhost:8080www.yandex.ru")); } } + + @Test + public void userCanRightClickOnElement() { + $(By.name("password")).contextClick(); + + $("#login").click(); + $("#login").contextClick(); + + $(By.name("domain")).find("option").click(); + $(By.name("domain")).find("option").contextClick(); + } }
Added method $.contextClick()
selenide_selenide
train
b57b3d3b356628c53fcb6f58a522ee4505f947ca
diff --git a/os-monitor.js b/os-monitor.js index <HASH>..<HASH> 100644 --- a/os-monitor.js +++ b/os-monitor.js @@ -211,7 +211,7 @@ Monitor.prototype.days = function(n) { // deprecated stuff -Monitor.prototype.setConfig = util.deprecate(Monitor.prototype.config); +Monitor.prototype.setConfig = util.deprecate(Monitor.prototype.config, '.setConfig(): Use .config() instead'); // expose OS module Monitor.prototype.os = os;
.setConfig(): added deprecation warning
lfortin_node-os-monitor
train
3a5cd49fe5dcca8d060055f40d30a60b3f0ed36d
diff --git a/public/javascripts/wymeditor/jquery.refinery.wymeditor.js b/public/javascripts/wymeditor/jquery.refinery.wymeditor.js index <HASH>..<HASH> 100755 --- a/public/javascripts/wymeditor/jquery.refinery.wymeditor.js +++ b/public/javascripts/wymeditor/jquery.refinery.wymeditor.js @@ -1253,7 +1253,7 @@ WYMeditor.editor.prototype.dialog = function( dialogType ) { if ((parent_node != null) && (parent_node.tagName.toLowerCase() != WYMeditor.A)) { // wrap the current selection with a funky span (not required for webkit) - if (!this._selected_image && !$.browser.webkit) + if (this._selected_image == null && !$.browser.webkit) { this.wrap("<span id='replace_me_with_" + this._current_unique_stamp + "'>", "</span>"); } @@ -1575,7 +1575,7 @@ WYMeditor.editor.prototype.listen = function() { WYMeditor.editor.prototype.mousedown = function(evt) { var wym = WYMeditor.INSTANCES[this.ownerDocument.title]; - wym._selected_image = (this.tagName.toLowerCase() == WYMeditor.IMG) ? this : null; + wym._selected_image = (evt.target.tagName.toLowerCase() == WYMeditor.IMG) ? evt.target : null; evt.stopPropagation(); }; @@ -5078,11 +5078,4 @@ WYMeditor.WymClassSafari.prototype.getTagForStyle = function(style) { if(/sub/.test(style)) return 'sub'; if(/super/.test(style)) return 'sup'; return false; -}; - -WYMeditor.WymClassSafari.prototype.mousedown = function(evt) { - - var wym = WYMeditor.INSTANCES[this.ownerDocument.title]; - wym._selected_image = (evt.target.tagName.toLowerCase() == WYMeditor.IMG) ? evt.target : null; - evt.stopPropagation(); }; \ No newline at end of file
support inserting images in firefox+webkit
refinery_refinerycms
train
30fc7e923846e5ab5f97167c9a49105126b2e84c
diff --git a/test/helper.py b/test/helper.py index <HASH>..<HASH> 100644 --- a/test/helper.py +++ b/test/helper.py @@ -227,7 +227,7 @@ def check_operator(cls, args, kwargs, input_vector, output_vector, initial_state assert dut.get() == stored_result -JITTER = 0.005 +JITTER = 0.003 async def check_async_operator_coro(cls, args, kwargs, input_vector, output_vector, initial_state=None, has_state=False, loop=None): await check_operator_coro(cls, args, kwargs, input_vector, output_vector, initial_state=initial_state, has_state=has_state, stateful=False, loop=loop) @@ -289,19 +289,33 @@ async def check_operator_coro(cls, args, kwargs, input_vector, output_vector, in assert dut.get() == stored_result # check with input vector + failed_list = [] - last_timestamp = 0 - start_timestamp = loop.time() - target_timestamp = start_timestamp + def _check_temporary(timestamp, value): + result = dut.get() + collector2_len = len(collector2.state_vector) + with dut.subscribe(collector2): + pass + if result is not None: + result = unpack_args(*result) + if has_state: + if collector2.state != value or len(collector2) != collector2_len + 1: + failed_list.append( ('SUBSCRIBE', timestamp, value, result) ) + if result != value or collector2.state != result: + failed_list.append( ('GET', timestamp, value, result) ) + else: + if len(collector2) != collector2_len: + failed_list.append( ('SUBSCRIBE', timestamp, value, result) ) + if result is not None: + failed_list.append( ('GET', timestamp, value, result) ) for timestamp, value in input_vector: - await asyncio.sleep(target_timestamp - loop.time() + timestamp - last_timestamp) - target_timestamp += timestamp - last_timestamp - last_timestamp = timestamp + loop.call_later(timestamp, source.notify, *to_args(value)) - source.notify(*to_args(value)) + for timestamp, value in output_vector: + loop.call_later(timestamp + JITTER, _check_temporary, timestamp, value) - await asyncio.sleep(target_timestamp - loop.time() + output_vector[-1][0] - last_timestamp + 2*JITTER) + await asyncio.sleep(output_vector[-1][0] + 2*JITTER) for value_actual, timestamp_actual, (timestamp_target, value_target) in zip(collector.state_vector, collector.timestamp_vector, output_vector): print(timestamp_target, timestamp_actual, value_target, value_actual) @@ -310,6 +324,8 @@ async def check_operator_coro(cls, args, kwargs, input_vector, output_vector, in print(collector.state_vector, collector.timestamp_vector) assert len(collector.state_vector) == len(output_vector) + assert not failed_list + # dispose permanent subscriber collector.reset() collector2.reset()
extended async test helper with checking for temporary subscription and .get()
semiversus_python-broqer
train
590f167e6b02e11ae1c5ed57e9bbeaee6150e68d
diff --git a/src/main/java/io/github/bonigarcia/wdm/docker/DockerService.java b/src/main/java/io/github/bonigarcia/wdm/docker/DockerService.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/github/bonigarcia/wdm/docker/DockerService.java +++ b/src/main/java/io/github/bonigarcia/wdm/docker/DockerService.java @@ -100,15 +100,11 @@ public class DockerService { this.httpClient = httpClient; this.resolutionCache = resolutionCache; - boolean createDockerClient = true; - if (config.isDockerLocalFallback() && !isRunningInsideDocker()) { - if (!isDockerAvailable()) { - createDockerClient = false; - log.warn( - "Docker is not available in your machine... local browsers are used instead"); - } - } - if (createDockerClient) { + if (config.isDockerLocalFallback() && !isRunningInsideDocker() + && !isDockerAvailable()) { + log.warn( + "Docker is not available in your machine... local browsers are used instead"); + } else { this.dockerClient = createDockerClient(); } }
Smell-fix: improve condition to check Docker client creation
bonigarcia_webdrivermanager
train
e81121582e1c46aa3adbd1fe9c1e1ba31ac292b6
diff --git a/lib/providers/twilio/twilio_provider.rb b/lib/providers/twilio/twilio_provider.rb index <HASH>..<HASH> 100644 --- a/lib/providers/twilio/twilio_provider.rb +++ b/lib/providers/twilio/twilio_provider.rb @@ -38,4 +38,4 @@ module Hermes Twilio::REST::Client.new(self.credentials[:account_sid], self.credentials[:auth_token]) end end -end \ No newline at end of file +end diff --git a/lib/support/extractors.rb b/lib/support/extractors.rb index <HASH>..<HASH> 100644 --- a/lib/support/extractors.rb +++ b/lib/support/extractors.rb @@ -119,4 +119,4 @@ module Hermes end end end -end \ No newline at end of file +end diff --git a/lib/support/phone.rb b/lib/support/phone.rb index <HASH>..<HASH> 100644 --- a/lib/support/phone.rb +++ b/lib/support/phone.rb @@ -3,6 +3,12 @@ module Hermes attr_accessor :country, :number + CODE_LENGTH_RANGE_BY_COUNTRY_CODE = { + "ca" => 5..6, + "uk" => 5..5, + "us" => 5..6, + } + @@countries = { :af => ['+93', 'Afghanistan'], :al => ['+355', 'Albania'], @@ -222,7 +228,17 @@ module Hermes end def full_number - self.class.prefix_for_country(self.country) + self.number + if self.short_code? + self.number + else + self.class.prefix_for_country(self.country) + self.number + end + end + + def short_code? + range = CODE_LENGTH_RANGE_BY_COUNTRY_CODE[self.country] + + range.present? && range.include?(self.number.length) end class << self diff --git a/test/lib/support/extractors_test.rb b/test/lib/support/extractors_test.rb index <HASH>..<HASH> 100644 --- a/test/lib/support/extractors_test.rb +++ b/test/lib/support/extractors_test.rb @@ -77,6 +77,12 @@ describe Hermes::Extractors do assert_equal @wrapper.extract_from(@texter_message, format: :address), from end + it "handles short codes" do + from = Hermes::Phone.new("us", "12345") + from.full_number.must_equal "12345" + from.short_code?.must_equal true + end + it "handles a source and a special naming convention using [] and []=" do plivo_from = Hermes::Phone.new('us', '9193245341') twilio_from = Hermes::Phone.new('us', '9196022733') @@ -124,4 +130,4 @@ describe Hermes::Extractors do assert_equal @wrapper.complex_extract(email), {decoded: false, value: email} assert_equal @wrapper.complex_extract(phone_encoded), {decoded: true, value: phone} end -end \ No newline at end of file +end
Added support for omitting country prefix from short codes
StatusPage_hermes
train
5d0a340754b7770f8eeec128e6dd5802907ff8e3
diff --git a/src/svg.js b/src/svg.js index <HASH>..<HASH> 100644 --- a/src/svg.js +++ b/src/svg.js @@ -2612,6 +2612,7 @@ function gradientRadial(defs, cx, cy, r, fx, fy) { } return el; }; + /*\ * Paper.image [ method ] @@ -2619,27 +2620,13 @@ function gradientRadial(defs, cx, cy, r, fx, fy) { * Embeds an image into the surface. ** - src (string) URI of the source image - - x (number) x coordinate position - - y (number) y coordinate position - - width (number) width of the image - - height (number) height of the image - = (object) Raphaël element object with type `image` - ** - > Usage - | var c = paper.image("apple.png", 10, 10, 80, 80); - \*/ - /*\ - * Paper.image - [ method ] - ** - * Embeds an image into the surface. - ** - - src (string) URI of the source image - - x (number) x coordinate position - - y (number) y coordinate position + - x (number) x offset position + - y (number) y offset position - width (number) width of the image - height (number) height of the image = (object) `image` element + * or + = (object) Raphaël element object with type `image` ** > Usage | var c = paper.image("apple.png", 10, 10, 80, 80);
EDIT Paper.image, to collapse redundant doc entries
adobe-webplatform_Snap.svg
train
d06464050100324bc4154bc5f63fcc12eaa3dd22
diff --git a/kafka_consumer/datadog_checks/kafka_consumer/kafka_consumer.py b/kafka_consumer/datadog_checks/kafka_consumer/kafka_consumer.py index <HASH>..<HASH> 100644 --- a/kafka_consumer/datadog_checks/kafka_consumer/kafka_consumer.py +++ b/kafka_consumer/datadog_checks/kafka_consumer/kafka_consumer.py @@ -86,8 +86,8 @@ class KafkaCheck(AgentCheck): if instance.get('monitor_unlisted_consumer_groups', False): consumer_groups = None elif 'consumer_groups' in instance: - consumer_groups = self._read_config(instance, 'consumer_groups', - cast=self._validate_consumer_groups) + consumer_groups = instance.get('consumer_groups') + self._validate_explicit_consumer_groups(consumer_groups) zk_consumer_offsets = None if zk_hosts_ports and \ @@ -157,7 +157,7 @@ class KafkaCheck(AgentCheck): cli.close() def _get_instance_key(self, instance): - servers = self._read_config(instance, 'kafka_connect_str') + servers = instance.get('kafka_connect_str') key = None if isinstance(servers, basestring): key = servers @@ -170,7 +170,7 @@ class KafkaCheck(AgentCheck): def _get_kafka_client(self, instance): - kafka_conn_str = self._read_config(instance, 'kafka_connect_str') + kafka_conn_str = instance.get('kafka_connect_str') if not kafka_conn_str: raise BadKafkaConsumerConfiguration('Bad instance configuration') @@ -429,7 +429,7 @@ class KafkaCheck(AgentCheck): :param dict consumer_groups: The consumer groups, topics, and partitions that you want to fetch offsets for. If consumer_groups is None, will fetch offsets for all consumer_groups. For examples of what this - dict can look like, see _validate_consumer_groups(). + dict can look like, see _validate_explicit_consumer_groups(). """ zk_consumer_offsets = {} @@ -567,35 +567,27 @@ class KafkaCheck(AgentCheck): return should_zk - @staticmethod - def _read_config(instance, key, cast=None): - val = instance.get(key) - if val is None: - raise BadKafkaConsumerConfiguration('Must provide `%s` value in instance config' % key) - - if cast is None: - return val - - return cast(val) - - def _validate_consumer_groups(self, val): - # val = {'consumer_group': {'topic': [0, 1]}} - # consumer groups are optional - assert isinstance(val, dict) or val is None - if val is not None: - for consumer_group, topics in val.iteritems(): - assert isinstance(consumer_group, basestring) - # topics are optional - assert isinstance(topics, dict) or topics is None - if topics is not None: - for topic, partitions in topics.iteritems(): - assert isinstance(topic, basestring) - # partitions are optional - assert isinstance(partitions, (list, tuple)) or partitions is None - if partitions is not None: - for partition in partitions: - assert isinstance(partition, int) - return val + def _validate_explicit_consumer_groups(self, val): + """Validate any explicitly specified consumer groups. + + While the check does not require specifying consumer groups, + if they are specified this method should be used to validate them. + + val = {'consumer_group': {'topic': [0, 1]}} + """ + assert isinstance(val, dict) + for consumer_group, topics in val.iteritems(): + assert isinstance(consumer_group, basestring) + # topics are optional + assert isinstance(topics, dict) or topics is None + if topics is not None: + for topic, partitions in topics.iteritems(): + assert isinstance(topic, basestring) + # partitions are optional + assert isinstance(partitions, (list, tuple)) or partitions is None + if partitions is not None: + for partition in partitions: + assert isinstance(partition, int) def _send_event(self, title, text, tags, event_type, aggregation_key, severity='info'): """Emit an event to the Datadog Event Stream."""
Remove deprecated _read_config() A while back this was deprecated because it will not be used in the new version of the agent. At the time, this was handled by copy/paste the deprecated method into this check. However, this method should actually be completely removed, as it adds no value to the check and is easily refactored away.
DataDog_integrations-core
train
c8134884123c141a415c85b0778b36ffafe734d7
diff --git a/code/Report.php b/code/Report.php index <HASH>..<HASH> 100644 --- a/code/Report.php +++ b/code/Report.php @@ -171,7 +171,7 @@ class Report extends ViewableData { return Controller::join_links( ReportAdmin::singleton()->Link('show'), - $this->sanitiseClassName(get_class($this)), + $this->sanitiseClassName(static::class), $action ); } @@ -197,7 +197,7 @@ class Report extends ViewableData $sourceRecords = $this->sourceRecords($params, null, null); if (!$sourceRecords instanceof SS_List) { - user_error(get_class($this) . "::sourceRecords does not return an SS_List", E_USER_NOTICE); + user_error(static::class . "::sourceRecords does not return an SS_List", E_USER_NOTICE); return "-1"; } return $sourceRecords->count(); diff --git a/code/ReportWrapper.php b/code/ReportWrapper.php index <HASH>..<HASH> 100644 --- a/code/ReportWrapper.php +++ b/code/ReportWrapper.php @@ -28,7 +28,7 @@ abstract class ReportWrapper extends Report public function ID() { - return get_class($this->baseReport) . '_' . get_class($this); + return get_class($this->baseReport) . '_' . static::class; } /////////////////////////////////////////////////////////////////////////////////////////// diff --git a/code/SideReport.php b/code/SideReport.php index <HASH>..<HASH> 100644 --- a/code/SideReport.php +++ b/code/SideReport.php @@ -43,7 +43,7 @@ class SideReportView extends ViewableData $columns = $this->report->columns(); if ($records && $records->Count()) { - $result = "<ul class=\"" . get_class($this) . "\">\n"; + $result = "<ul class=\"" . static::class . "\">\n"; foreach ($records as $record) { $result .= "<li>\n";
MINOR: Make use of static::class
silverstripe_silverstripe-reports
train
f6ff6cb5dfcad63fe0615802348b3252d1382640
diff --git a/core/workspace_svg.js b/core/workspace_svg.js index <HASH>..<HASH> 100644 --- a/core/workspace_svg.js +++ b/core/workspace_svg.js @@ -768,9 +768,9 @@ Blockly.WorkspaceSvg.prototype.setVisible = function(isVisible) { } if (isVisible) { this.render(); - if (this.toolbox_) { - this.toolbox_.position(); - } + // The window may have changed size while the workspace was hidden. + // Resize recalculates scrollbar position, delete areas, etc. + this.resize(); } else { Blockly.hideChaff(true); Blockly.DropDownDiv.hideWithoutAnimation();
Fix #<I> (#<I>)
LLK_scratch-blocks
train
721d172a64dbe1285d31fdcafed672e24fc5e76e
diff --git a/agrona/src/main/java/org/agrona/concurrent/status/CountersManager.java b/agrona/src/main/java/org/agrona/concurrent/status/CountersManager.java index <HASH>..<HASH> 100644 --- a/agrona/src/main/java/org/agrona/concurrent/status/CountersManager.java +++ b/agrona/src/main/java/org/agrona/concurrent/status/CountersManager.java @@ -390,6 +390,17 @@ public class CountersManager extends CountersReader } /** + * Set an {@link AtomicCounter} key based on counterId, using a consumer callback to update the key buffer. + * + * @param counterId to be set. + * @param keyFunc callback used to set the key. + */ + public void setCounterKey(final int counterId, final Consumer<MutableDirectBuffer> keyFunc) + { + keyFunc.accept(new UnsafeBuffer(metaDataBuffer, metaDataOffset(counterId) + KEY_OFFSET, MAX_KEY_LENGTH)); + } + + /** * Set an {@link AtomicCounter} label based on counterId. * * @param counterId to be set. diff --git a/agrona/src/test/java/org/agrona/concurrent/status/CountersManagerTest.java b/agrona/src/test/java/org/agrona/concurrent/status/CountersManagerTest.java index <HASH>..<HASH> 100644 --- a/agrona/src/test/java/org/agrona/concurrent/status/CountersManagerTest.java +++ b/agrona/src/test/java/org/agrona/concurrent/status/CountersManagerTest.java @@ -287,6 +287,45 @@ public class CountersManagerTest assertThat(counter.label(), is("original label with update")); } + + @Test + public void shouldBeAbleToGetAndUpdateCounterKey() + { + final String originalKey = "original key"; + final String updatedKey = "updated key"; + + final AtomicCounter counter = manager.newCounter( + "label", 101, (keyBuffer) -> keyBuffer.putStringUtf8(0, originalKey)); + + final StringKeyExtractor keyExtractor = new StringKeyExtractor(counter.id()); + + manager.forEach(keyExtractor); + + assertThat(keyExtractor.key, is(originalKey)); + + manager.setCounterKey(counter.id(), (keyBuffer) -> keyBuffer.putStringUtf8(0, updatedKey)); + + } + + private static class StringKeyExtractor implements MetaData + { + private final int id; + private String key; + + private StringKeyExtractor(final int id) + { + this.id = id; + } + + public void accept(final int counterId, final int typeId, final DirectBuffer keyBuffer, final String label) + { + if (counterId == id) + { + key = keyBuffer.getStringUtf8(0); + } + } + } + @Test public void shouldBeAbleToAppendLabel() {
Add a method to update the key for a specific counter.
real-logic_agrona
train
c9dd9e7d6c5e890dc557dd2626a8785cd1abf043
diff --git a/ontobio/golr/golr_query.py b/ontobio/golr/golr_query.py index <HASH>..<HASH> 100644 --- a/ontobio/golr/golr_query.py +++ b/ontobio/golr/golr_query.py @@ -115,7 +115,11 @@ class GolrFields: ASPECT='aspect' RELATION='relation' RELATION_LABEL='relation_label' - + FREQUENCY='frequency' + FREQUENCY_LABEL='frequency_label' + ONSET='onset' + ONSET_LABEL='onset_label' + # This is a temporary fix until # https://github.com/biolink/ontobio/issues/126 is resolved. @@ -1180,7 +1184,11 @@ class GolrAssociationQuery(GolrAbstractQuery): M.OBJECT, M.OBJECT_LABEL, M.OBJECT_TAXON, - M.OBJECT_TAXON_LABEL + M.OBJECT_TAXON_LABEL, + M.FREQUENCY, + M.FREQUENCY_LABEL, + M.ONSET, + M.ONSET_LABEL ] if not self.unselect_evidence: select_fields += [
updating golr fields and select fields to include frequency and onset in DiseasePhenotype assoc
biolink_ontobio
train
f82ef1769099bc2d04ccaeb48fdb3eb2dd299781
diff --git a/pylas/lasdata.py b/pylas/lasdata.py index <HASH>..<HASH> 100644 --- a/pylas/lasdata.py +++ b/pylas/lasdata.py @@ -44,6 +44,31 @@ class LasData: pointdimensions.EDGE_OF_FLIGHT_LINE_HIGH_BIT ) + # Split raw classification + self.classification = pointdimensions.bit_transform( + self.np_point_data['raw_classification'], + pointdimensions.CLASSIFICATION_LOW_BIT, + pointdimensions.CLASSIFICATION_HIGH_BIT + ) + + self.synthetic = pointdimensions.bit_transform( + self.np_point_data['raw_classification'], + pointdimensions.SYNTHETIC_LOW_BIT, + pointdimensions.SYNTHETIC_HIGH_BIT, + ).astype('bool') + + self.key_point = pointdimensions.bit_transform( + self.np_point_data['raw_classification'], + pointdimensions.KEY_POINT_LOW_BIT, + pointdimensions.KEY_POINT_HIGH_BIT + ).astype('bool') + + self.withheld = pointdimensions.bit_transform( + self.np_point_data['raw_classification'], + pointdimensions.WITHHELD_LOW_BIT, + pointdimensions.WITHHELD_HIGH_BIT + ).astype('bool') + @property def X(self): return self.np_point_data['X'] @@ -89,10 +114,6 @@ class LasData: self.np_point_data['intensity'] = value @property - def classification(self): - return self.np_point_data['classification'] - - @property def scan_angle_rank(self): return self.np_point_data['scan_angle_rank'] diff --git a/pylas/pointdimensions.py b/pylas/pointdimensions.py index <HASH>..<HASH> 100644 --- a/pylas/pointdimensions.py +++ b/pylas/pointdimensions.py @@ -18,13 +18,23 @@ SCAN_DIRECTION_FLAG_HIGH_BIT = 7 EDGE_OF_FLIGHT_LINE_LOW_BIT = 7 EDGE_OF_FLIGHT_LINE_HIGH_BIT = 8 +CLASSIFICATION_LOW_BIT = 0 +CLASSIFICATION_HIGH_BIT = 4 +SYNTHETIC_LOW_BIT = 4 +SYNTHETIC_HIGH_BIT = 5 +KEY_POINT_LOW_BIT = 5 +KEY_POINT_HIGH_BIT = 6 +WITHHELD_LOW_BIT = 6 +WITHHELD_HIGH_BIT = 7 + + dimensions = { 'X': ('X', 'u4'), 'Y': ('Y', 'u4'), 'Z': ('Z', 'u4'), 'intensity': ('intensity', 'u2'), 'bit_fields': ('bit_fields', 'u1'), - 'classification': ('classification', 'u1'), + 'raw_classification': ('raw_classification', 'u1'), 'scan_angle_rank': ('scan_angle_rank', 'i1'), 'user_data': ('user_data', 'u1'), 'point_source_id': ('point_source_id', 'u2'), @@ -40,7 +50,7 @@ point_format_0 = ( 'Z', 'intensity', 'bit_fields', - 'classification', + 'raw_classification', 'scan_angle_rank', 'user_data', 'point_source_id'
split classification field as stated in the specs
tmontaigu_pylas
train
90d6e7df2484fdf7bbe7e614c7683906c77bd98e
diff --git a/cmd/policy-main.go b/cmd/policy-main.go index <HASH>..<HASH> 100644 --- a/cmd/policy-main.go +++ b/cmd/policy-main.go @@ -437,8 +437,11 @@ func runPolicyCmd(args cli.Args) { perms := accessPerms(args.Get(1)) targetURL := args.Get(2) if perms.isValidAccessPERM() { - probeErr = doSetAccess(ctx, targetURL, perms) operation = "set" + probeErr = doSetAccess(ctx, targetURL, perms) + if probeErr == nil { + perms, _, probeErr = doGetAccess(ctx, targetURL) + } } else if perms.isValidAccessFile() { probeErr = doSetAccessJSON(ctx, targetURL, perms) operation = "set-json"
policy: Recalculate permission mode after setting a new one (#<I>) `mc policy set` can confuse users by showing that a bucket has 'none' policy mode although issuing `mc policy get` will show it is `custom` when the bucket already other policy statements for some sub-resources. For better UX, mc will issue another request to get the real policy mode.
minio_mc
train
ad45d0abe1797150c5e6d4cc70eebdd7e882f8a0
diff --git a/src/hot/client/index.js b/src/hot/client/index.js index <HASH>..<HASH> 100644 --- a/src/hot/client/index.js +++ b/src/hot/client/index.js @@ -14,7 +14,12 @@ if (!module.hot || process.env.NODE_ENV === 'production') { module.exports = { - makeHot() {}, + makeHot(rootFactory: Function) { + /** + * Return the original rootFactory and be quiet. + */ + return rootFactory; + }, redraw() {}, tryUpdateSelf() {}, callOnce(callback: Function) {
fix(hmr): makeHot in production mode (#<I>) `makeHot` should behave correctly and should return original `rootFactory` not `undefined` otherwise the apps in production mode will get broken.
callstack_haul
train
16c7a1b33cc8d0c3dc5096a1ad45cab3e7f2a1a6
diff --git a/python/thunder/rdds/fileio/readers.py b/python/thunder/rdds/fileio/readers.py index <HASH>..<HASH> 100644 --- a/python/thunder/rdds/fileio/readers.py +++ b/python/thunder/rdds/fileio/readers.py @@ -445,7 +445,11 @@ class BotoS3ReadFileHandle(object): self._offset = 0 def close(self): - self._key.close(fast=True) + try: + self._key.close(fast=True) + except TypeError: + # workaround for early versions of boto that don't have the 'fast' keyword + self._key.close() self._closed = True def read(self, size=-1):
#<I> workaround for missing keywords in early versions of boto
thunder-project_thunder
train
866920fd1259295f02037d94bd39a0537361cdf4
diff --git a/src/core/TSDB.java b/src/core/TSDB.java index <HASH>..<HASH> 100644 --- a/src/core/TSDB.java +++ b/src/core/TSDB.java @@ -230,6 +230,10 @@ public final class TSDB { UniqueId.preloadUidCache(this, uid_cache_map); } + if (config.getString("tsd.core.tag.allow_specialchars") != null) { + Tags.setAllowSpecialChars(config.getString("tsd.core.tag.allow_specialchars")); + } + // load up the functions that require the TSDB object ExpressionFactory.addTSDBFunctions(this); diff --git a/src/core/Tags.java b/src/core/Tags.java index <HASH>..<HASH> 100644 --- a/src/core/Tags.java +++ b/src/core/Tags.java @@ -37,6 +37,7 @@ import net.opentsdb.utils.Pair; public final class Tags { private static final Logger LOG = LoggerFactory.getLogger(Tags.class); + private static String allowSpecialChars = ""; private Tags() { // Can't create instances of this utility class. @@ -547,7 +548,7 @@ public final class Tags { final char c = s.charAt(i); if (!(('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') || c == '-' || c == '_' || c == '.' - || c == '/' || Character.isLetter(c))) { + || c == '/' || Character.isLetter(c) || isAllowSpecialChars(c))) { throw new IllegalArgumentException("Invalid " + what + " (\"" + s + "\"): illegal character: " + c); } @@ -808,4 +809,20 @@ public final class Tags { return true; } + /** + * Set the special characters due to allowing for a key or a value of the tag. + * @param characters character sequences as a string + */ + public static void setAllowSpecialChars(String characters) { + allowSpecialChars = characters == null ? "" : characters; + } + + /** + * Returns true if the character can be used a tag name or a tag value. + * @param character + * @return + */ + static boolean isAllowSpecialChars(char character) { + return allowSpecialChars.indexOf(character) != -1; + } } diff --git a/test/core/TestTags.java b/test/core/TestTags.java index <HASH>..<HASH> 100644 --- a/test/core/TestTags.java +++ b/test/core/TestTags.java @@ -942,4 +942,20 @@ public final class TestTags { final ByteMap<byte[]> uids = Tags.getTagUids(new byte[] {}); assertEquals(0, uids.size()); } + + @Test + public void setAllowSpecialChars() throws Exception { + assertFalse(Tags.isAllowSpecialChars('!')); + + Tags.setAllowSpecialChars(null); + assertFalse(Tags.isAllowSpecialChars('!')); + + Tags.setAllowSpecialChars(""); + assertFalse(Tags.isAllowSpecialChars('!')); + + Tags.setAllowSpecialChars("!)(%"); + assertTrue(Tags.isAllowSpecialChars('!')); + assertTrue(Tags.isAllowSpecialChars('(')); + assertTrue(Tags.isAllowSpecialChars('%')); + } }
Allow to define special characters for keys and values of the tag via config tsd.core.tag.allow_specialchars
OpenTSDB_opentsdb
train
2a27f0b97f470f37eaab556ede39ac7e17a3fc25
diff --git a/lib/notifaction/types/linux.rb b/lib/notifaction/types/linux.rb index <HASH>..<HASH> 100644 --- a/lib/notifaction/types/linux.rb +++ b/lib/notifaction/types/linux.rb @@ -5,6 +5,8 @@ module Notifaction def bubble(message, title) @response = `notify-send "#{title}" "#{message}"` $?.exitstatus == 0 + + fire_hooks({ method: __method__, message: message, title: title }) end def modal(message, title) diff --git a/lib/notifaction/types/osx.rb b/lib/notifaction/types/osx.rb index <HASH>..<HASH> 100644 --- a/lib/notifaction/types/osx.rb +++ b/lib/notifaction/types/osx.rb @@ -5,11 +5,15 @@ module Notifaction def bubble(message, title) @response = `osascript -e 'display notification "#{message}" with title "#{title}"'` $?.exitstatus == 0 + + fire_hooks({ method: __method__, message: message, title: title }) end def modal(message, title, icon = :caution) @response = `osascript -e 'tell app "System Events" to display dialog "#{message}" buttons {"OK"} default button 1 with title "#{title}" with icon #{icon}'` $?.exitstatus == 0 + + fire_hooks({ method: __method__, message: message, title: title }) end end
fire hooks on bubble/modal notifications as well
aapis_notifaction
train
aa064edca5d7f60d758c16b50d513b8a32c797dc
diff --git a/records/config.go b/records/config.go index <HASH>..<HASH> 100644 --- a/records/config.go +++ b/records/config.go @@ -164,7 +164,14 @@ func SetConfig(cjson string) Config { } c.initSOA() + c.initCertificates() + c.initMesosAuthentication() + c.log() + + return *c +} +func (c *Config) initCertificates() { if c.CACertFile != "" { pool, err := readCACertFile(c.CACertFile) if err != nil { @@ -188,11 +195,6 @@ func SetConfig(cjson string) Config { } c.cert = cert } - - c.initMesosAuthentication() - c.log() - - return *c } func (c *Config) initMesosAuthentication() {
refactor cert config logic to reduce cyclomatic complexity
mesosphere_mesos-dns
train
06c2fb08f08ede0b5dff0401c9e8cf7fcf644c37
diff --git a/test/test_add_command.py b/test/test_add_command.py index <HASH>..<HASH> 100644 --- a/test/test_add_command.py +++ b/test/test_add_command.py @@ -340,6 +340,7 @@ class AddCommandTest(CommandTest): self.todolist, self.out, self.error) command.execute() + self.assertTrue(self.todolist.todo(1).is_completed()) self.assertEqual(self.output, "| 1| x 2015-01-01 {} Already completed\n".format(self.today)) self.assertEqual(self.errors, "")
Also check that the todo item is marked complete internally This assertion was missing in commit 6bb<I>d.
bram85_topydo
train
d82543a30c3e413dcd434ddd895e45e0ebc6dbf3
diff --git a/salt/cloud/clouds/vsphere.py b/salt/cloud/clouds/vsphere.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/vsphere.py +++ b/salt/cloud/clouds/vsphere.py @@ -248,7 +248,6 @@ def create(vm_): deploy_kwargs = _deploy(vm_) ret = show_instance(name=vm_['name'], call='action') - show_deploy_args = config.get_cloud_config_value( 'show_deploy_args', vm_, __opts__, default=False ) @@ -292,7 +291,7 @@ def wait_for_ip(vm_): return False log.debug('Pulling VM {0} {1} seconds for an IP address'.format(vm_['name'])) - ip_address = salt.utils.cloud.wait_for_fun(poll_ip, timeout) + ip_address = salt.utils.cloud.wait_for_fun(poll_ip) if ip_address is not False: log.debug('VM {0} has IP address {1}'.format(vm_['name'], ip_address)) @@ -415,7 +414,7 @@ def _get_instance_properties(instance, from_cache=True): properties = instance.get_properties(from_cache) for prop in ('guest_full_name', 'guest_id', 'memory_mb', 'name', 'num_cpu', 'path', 'devices', 'disks', 'files', - 'net', 'ip_address', 'mac_address','hostname'): + 'net', 'ip_address', 'mac_address', 'hostname'): if prop in properties: ret[prop] = properties[prop] else:
lint fix and removed unused var
saltstack_salt
train
5731b27da8b37b87afc4e254ea54f8b4a299f3c4
diff --git a/spec/mdspell/cli_spec.rb b/spec/mdspell/cli_spec.rb index <HASH>..<HASH> 100644 --- a/spec/mdspell/cli_spec.rb +++ b/spec/mdspell/cli_spec.rb @@ -2,10 +2,6 @@ describe MdSpell::CLI do it { is_expected.to respond_to :run } it { is_expected.to respond_to :files } - after(:all) do - MdSpell::Configuration.reset - end - context '#run' do it 'should expect command line options array' do [nil, 'string', 42].each do |argument| diff --git a/spec/mdspell/configuration_spec.rb b/spec/mdspell/configuration_spec.rb index <HASH>..<HASH> 100644 --- a/spec/mdspell/configuration_spec.rb +++ b/spec/mdspell/configuration_spec.rb @@ -1,12 +1,4 @@ describe MdSpell::Configuration do - before(:each) do - MdSpell::Configuration.reset - end - - after(:all) do - MdSpell::Configuration.reset - end - def run_app(args = []) MdSpell::CLI.new.run(args) end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -24,6 +24,9 @@ require 'rspec/collection_matchers' require 'mdspell' RSpec.configure do |config| + # Common hooks + config.before(:each) { MdSpell::Configuration.reset } + # rspec-expectations config goes here. config.expect_with :rspec do |expectations| # This option will default to `true` in RSpec 4. It makes the `description` and
Moved configuration clearing to global before hook.
mtuchowski_mdspell
train
fbfb9c22d16c6a96349361eb53c07abc74006ac9
diff --git a/packages/ember-handlebars/lib/helpers/partial.js b/packages/ember-handlebars/lib/helpers/partial.js index <HASH>..<HASH> 100644 --- a/packages/ember-handlebars/lib/helpers/partial.js +++ b/packages/ember-handlebars/lib/helpers/partial.js @@ -87,15 +87,15 @@ function exists(value) { } function renderPartial(context, name, options) { - var nameParts = name.split("/"), - lastPart = nameParts[nameParts.length - 1]; + var nameParts = name.split("/"); + var lastPart = nameParts[nameParts.length - 1]; nameParts[nameParts.length - 1] = "_" + lastPart; - var view = options.data.view, - underscoredName = nameParts.join("/"), - template = view.templateForName(underscoredName), - deprecatedTemplate = !template && view.templateForName(name); + var view = options.data.view; + var underscoredName = nameParts.join("/"); + var template = view.templateForName(underscoredName); + var deprecatedTemplate = !template && view.templateForName(name); Ember.assert("Unable to find partial with name '"+name+"'.", template || deprecatedTemplate);
Replaces single multiline var statement with multiple var statements
emberjs_ember.js
train
eb6e11a480e68819696e6b2bc4d41d834fd1d4c5
diff --git a/structr-core/src/test/java/org/structr/test/core/script/ScriptingTest.java b/structr-core/src/test/java/org/structr/test/core/script/ScriptingTest.java index <HASH>..<HASH> 100644 --- a/structr-core/src/test/java/org/structr/test/core/script/ScriptingTest.java +++ b/structr-core/src/test/java/org/structr/test/core/script/ScriptingTest.java @@ -4155,6 +4155,93 @@ public class ScriptingTest extends StructrTest { } @Test + public void testAdvancedFindWithRemotePropertySorting() { + + // setup + try (final Tx tx = app.tx()) { + + final JsonSchema schema = StructrSchema.createFromDatabase(app); + final JsonObjectType test = schema.addType("Test"); + final JsonObjectType test2 = schema.addType("Test2"); + final JsonObjectType test3 = schema.addType("Test3"); + + test.relate(test2, "HAS_Test2", Cardinality.OneToOne, "test", "test2"); + test2.relate(test3, "HAS_Test3", Cardinality.OneToOne, "test2", "test3"); + + StructrSchema.extendDatabaseSchema(app, schema); + + tx.success(); + + } catch (FrameworkException fex) { + + fex.printStackTrace(); + fail("Unexpected exception."); + } + + + final ActionContext ctx = new ActionContext(securityContext); + final Class testType = StructrApp.getConfiguration().getNodeEntityClass("Test"); + final Class test2Type = StructrApp.getConfiguration().getNodeEntityClass("Test2"); + final Class test3Type = StructrApp.getConfiguration().getNodeEntityClass("Test3"); + + final PropertyKey test2_test = StructrApp.key(test2Type, "test"); + final PropertyKey test3_test2 = StructrApp.key(test3Type, "test2"); + + // setup + try (final Tx tx = app.tx()) { + + + for (int i = 0; i < 10; i++) { + + final NodeInterface test = app.create(testType, + new NodeAttribute<>(AbstractNode.name, "test1_" + i) + ); + + final NodeInterface test2 = app.create(test2Type, + new NodeAttribute<>(AbstractNode.name, "test2_" + i), + new NodeAttribute<>(test2_test, test) + ); + + final NodeInterface test3 = app.create(test3Type, + new NodeAttribute<>(AbstractNode.name, "test3_" + i), + new NodeAttribute<>(test3_test2, test2) + ); + } + + tx.success(); + + } catch (FrameworkException fex) { + + fex.printStackTrace(); + fail("Unexpected exception."); + } + + try (final Tx tx = app.tx()) { + + + + List<GraphObject> result = (List<GraphObject>) Scripting.evaluate(ctx, null, "${{ return $.find('Test', $.predicate.sort('test2.test3.name', false)); }}", "testFindNewSyntax"); + + assertEquals("Advanced find() returns wrong result", 10, result.size()); + assertEquals("Advanced find() sorted incorrectly", "test1_0", result.get(0).getProperty(AbstractNode.name)); + assertEquals("Advanced find() sorted incorrectly", "test1_1", result.get(1).getProperty(AbstractNode.name)); + assertEquals("Advanced find() sorted incorrectly", "test2_0", ((NodeInterface)result.get(0).getProperty(StructrApp.key(testType, "test2"))).getProperty(AbstractNode.name)); + assertEquals("Advanced find() sorted incorrectly", "test3_0", ((NodeInterface)((NodeInterface)result.get(0).getProperty(StructrApp.key(testType, "test2"))).getProperty(StructrApp.key(test2Type, "test3"))).getProperty(AbstractNode.name)); + + result = (List<GraphObject>) Scripting.evaluate(ctx, null, "${{ return $.find('Test', $.predicate.sort('test2.test3.name', true)); }}", "testFindNewSyntax"); + + assertEquals("Advanced find() sorted incorrectly", "test1_9", result.get(0).getProperty(AbstractNode.name)); + assertEquals("Advanced find() sorted incorrectly", "test1_8", result.get(1).getProperty(AbstractNode.name)); + + tx.success(); + + } catch (FrameworkException fex) { + fex.printStackTrace(); + fail("Unexpected exception"); + } + } + + @Test public void testAdvancedFindRangeQueryLeak() { // setup
Adds scripting test for advanced find using path sorting.
structr_structr
train
72179af69e497289e04bae9e40cf659605babb15
diff --git a/aegean.py b/aegean.py index <HASH>..<HASH> 100755 --- a/aegean.py +++ b/aegean.py @@ -1970,7 +1970,7 @@ def priorized_fit_islands(filename, catfile, hdu_index=0, outfile=None, bkgin=No xmax = max(xmax, min(shape[0], x + xwidth / 2 + 1)) ymax = max(ymax, min(shape[1], y + ywidth / 2 + 1)) - s_lims = [0.8 * pixbeam.b * fwhm2cc, 2 * sy * math.sqrt(2)] + s_lims = [0.8 * min(sx,pixbeam.b * fwhm2cc), 2 * sy * math.sqrt(2)] # Set up the parameters for the fit, including constraints prefix = "c{0}_".format(i)
fixed a bug in priorized fitting that could make sources too big
PaulHancock_Aegean
train
273026afe51555120951292a50e0974b3998b4e3
diff --git a/tests/_utils.py b/tests/_utils.py index <HASH>..<HASH> 100644 --- a/tests/_utils.py +++ b/tests/_utils.py @@ -16,8 +16,10 @@ import collections from nose import tools from lxml import etree +from xml.etree.ElementTree import parse import openquake.nrmllib +from openquake.nrmllib.writers import tostring def deep_eq(a, b): @@ -67,9 +69,7 @@ def _test_dict(a, b): def _test_seq(a, b): """Compare `list` or `tuple` types recursively.""" assert len(a) == len(b), ('Sequence length mismatch. Expected %s, got %s' - % (len(a), len(b)) - ) - + % (len(a), len(b))) for i, item in enumerate(a): _deep_eq(item, b[i]) @@ -82,11 +82,8 @@ def assert_xml_equal(a, b): Paths to XML files, or a file-like object containing the XML contents. """ - - contents_a = etree.tostring(etree.parse(a), pretty_print=True) - contents_b = etree.tostring(etree.parse(b), pretty_print=True) - - tools.assert_equal(contents_a, contents_b) + tools.assert_equal(tostring(parse(a).getroot()), + tostring(parse(b).getroot())) def validates_against_xml_schema(
Changed assert_xml_equal to not depend on lxml
gem_oq-engine
train
523e307211175a5b12c7487d62e65557e07fc792
diff --git a/websocket/index.js b/websocket/index.js index <HASH>..<HASH> 100644 --- a/websocket/index.js +++ b/websocket/index.js @@ -11,6 +11,7 @@ define(module, function(exports, require) { socket: null, + id: 0, key: '', session_id: '', ua: '', diff --git a/websocket/server.js b/websocket/server.js index <HASH>..<HASH> 100644 --- a/websocket/server.js +++ b/websocket/server.js @@ -27,13 +27,13 @@ define(module, (exports, require) => { this.wss.on('connection', this.on_socket_connected); }, - stop: function(done) { + close: function(done) { qp.each(this.sockets, (socket) => socket.close()); - this.wss.close(() => { - if (done) done(); - }); + this.wss.close(done); }, + get_id: function() { return this.id++; }, + on_socket_connect: function(info, done) { if (this.on_connect) { var req_url = url.create({ url: info.req.url }); @@ -51,7 +51,7 @@ define(module, (exports, require) => { on_socket_connected: function(ws, req) { var req_url = url.create({ url: req.url }); - var socket = websocket.create({ socket: ws, url: req_url, headers: req.headers }); + var socket = websocket.create({ id: qp.id(), socket: ws, url: req_url, headers: req.headers }); ws.on('error', (e) => this.on_socket_error(socket, e)); ws.on('close', (code, message) => this.on_socket_close(socket, code, message)); if (this.on_connected) { @@ -85,13 +85,13 @@ define(module, (exports, require) => { }, on_socket_open: function(socket, e) { - log.socket('OPEN', socket.channel); qp.push(this.sockets, socket); + log.socket('OPEN', qp.stringify({ id: socket.id, total: this.sockets.length, name: socket.channel })); }, on_socket_close: function(socket, code, message) { - log.socket('SHUT', code, message || ''); qp.remove(this.sockets, socket); + log.socket('SHUT', qp.stringify({ id: socket.id, total: this.sockets.length, code: code, name: socket.channel })); } });
websockets; shutdown and logging
cjr--_qp-library
train
5286785dbca34e77b6b4340fcb13714a06322882
diff --git a/src/Manager.php b/src/Manager.php index <HASH>..<HASH> 100644 --- a/src/Manager.php +++ b/src/Manager.php @@ -257,7 +257,7 @@ class Manager if (preg_match_all("/$pattern/siU", $file->getContents(), $matches)) { foreach ($matches[2] as $key) { try { - list($fileName, $keyName) = explode('.', $key); + list($fileName, $keyName) = explode('.', $key, 2); } catch (\ErrorException $e) { continue; } diff --git a/tests/SyncCommandTest.php b/tests/SyncCommandTest.php index <HASH>..<HASH> 100644 --- a/tests/SyncCommandTest.php +++ b/tests/SyncCommandTest.php @@ -10,7 +10,7 @@ class SyncCommandTest extends TestCase file_put_contents(__DIR__.'/views_temp/user.blade.php', '{{ trans(\'user.name\') }} {{ trans(\'user.age\') }}'); mkdir(__DIR__.'/views_temp/user'); - file_put_contents(__DIR__.'/views_temp/user/index.blade.php', "{{ trans('user.city') }}"); + file_put_contents(__DIR__.'/views_temp/user/index.blade.php', "{{ trans('user.city') }} {{ trans('user.code.initial') }}"); $this->createTempFiles([ 'en' => ['user' => "<?php\n return ['name' => 'Name'];"], @@ -22,12 +22,14 @@ class SyncCommandTest extends TestCase $userENFile = (array) include $this->app['config']['langman.path'].'/en/user.php'; $userNlFile = (array) include $this->app['config']['langman.path'].'/nl/user.php'; - $this->assertArrayHasKey('name', $userENFile, 'en'); - $this->assertArrayHasKey('age', $userENFile, 'en'); - $this->assertArrayHasKey('city', $userENFile, 'en'); - $this->assertArrayHasKey('name', $userNlFile, 'nl'); - $this->assertArrayHasKey('age', $userNlFile, 'nl'); - $this->assertArrayHasKey('city', $userNlFile, 'nl'); + $this->assertArrayHasKey('name', $userENFile); + $this->assertArrayHasKey('initial', $userENFile['code']); + $this->assertArrayHasKey('age', $userENFile); + $this->assertArrayHasKey('city', $userENFile); + $this->assertArrayHasKey('name', $userNlFile); + $this->assertArrayHasKey('initial', $userNlFile['code']); + $this->assertArrayHasKey('age', $userNlFile); + $this->assertArrayHasKey('city', $userNlFile); array_map('unlink', glob(__DIR__.'/views_temp/user/index.blade.php')); array_map('rmdir', glob(__DIR__.'/views_temp/user'));
handle nested keys in sync command
themsaid_laravel-langman
train
25468ab1dfdaa8cbadb4b886eb31edbe9b3364b6
diff --git a/test/unit/model/v2/test_driver_section.py b/test/unit/model/v2/test_driver_section.py index <HASH>..<HASH> 100644 --- a/test/unit/model/v2/test_driver_section.py +++ b/test/unit/model/v2/test_driver_section.py @@ -134,13 +134,7 @@ def _model_driver_provider_name_not_nullable_when_vagrant_section_data(): ['_model_driver_provider_name_not_nullable_when_vagrant_section_data'], indirect=True) def test_driver_provider_name_not_nullable_when_vagrant_driver(_config): - x = { - 'driver': [{ - 'provider': [{ - 'name': ['unallowed value None', 'null value not allowed'] - }] - }] - } + x = {'driver': [{'provider': [{'name': ['null value not allowed']}]}]} assert x == schema_v2.validate(_config)
Corrected missed test from #<I>
ansible_molecule
train
dbf0e6b64c9a680757b50740a9c9340d62b87c50
diff --git a/tests/acceptance/admin/contacts/CreatePageCest.php b/tests/acceptance/admin/contacts/CreatePageCest.php index <HASH>..<HASH> 100644 --- a/tests/acceptance/admin/contacts/CreatePageCest.php +++ b/tests/acceptance/admin/contacts/CreatePageCest.php @@ -19,10 +19,6 @@ class CreatePageCest { /** * @dataProvider testContactData - * - * @param Admin $I - * @param Example $data - * @throws \Exception */ public function ensureICanSetEntityDataWithPassport(Admin $I, Example $data): void { @@ -30,21 +26,14 @@ class CreatePageCest $I->needPage(Url::to('@contact/create')); $I->see('Create contact', 'button'); $I->executeJS('scroll(0,1000);'); - $I->click('#passport-data-box button'); $createPage->fillFormData($data['passport']); $I->click('#legal-entity-box button'); - $I->expectThrowable(InvalidElementStateException::class, function () use ($createPage, $data) { - $createPage->fillFormData($data['entity']); - }); + $I->expectThrowable(InvalidElementStateException::class, fn () => $createPage->fillFormData($data['entity'])); $I->reloadPage(); } /** * @dataProvider testContactData - * - * @param Admin $I - * @param Example $data - * @throws \Exception */ public function ensureICanSetPassportDataWithEntity(Admin $I, Example $data): void { @@ -52,39 +41,38 @@ class CreatePageCest $I->needPage(Url::to('@contact/create')); $I->see('Create contact', 'button'); $I->executeJS('scroll(0,1000);'); + $createPage->fillFormData($data['organization']); $I->click('#legal-entity-box button'); $createPage->fillFormData($data['entity']); $I->click('#passport-data-box button'); - $I->expectThrowable(InvalidElementStateException::class, function () use ($createPage, $data) { - $createPage->fillFormData($data['passport']); - }); + $I->expectThrowable(InvalidElementStateException::class, fn () => $createPage->fillFormData($data['passport'])); } - /** - * @return array - */ - protected function testContactData(): array + protected function testContactData(): iterable { - return [ - [ - 'passport' => [ - 'inputs' => [ - 'birth_date' => '2000-12-12', - 'passport_no' => '23213', - 'passport_date' => '2001-12-12', - 'passport_by' => 'test test', - ], + yield [ + 'organization' => [ + 'inputs' => [ + 'organization' => 'HiQDev', + ], + ], + 'passport' => [ + 'inputs' => [ + 'birth_date' => '2000-12-12', + 'passport_no' => '23213', + 'passport_date' => '2001-12-12', + 'passport_by' => 'test test', + ], + ], + 'entity' => [ + 'inputs' => [ + 'organization_ru' => '2000-12-12', + 'director_name' => '23213', + 'inn' => '2001-12-12', + 'kpp' => 'test test', ], - 'entity' => [ - 'inputs' => [ - 'organization_ru' => '2000-12-12', - 'director_name' => '23213', - 'inn' => '2001-12-12', - 'kpp' => 'test test', - ], - 'checkboxes' => [ - 'isresident' => true, - ], + 'checkboxes' => [ + 'isresident' => true, ], ], ];
fixed acceptance contact/CreatePageCest
hiqdev_hipanel-module-client
train
7b004e0e9ac47a5aef4c1b5c4063a6856056e193
diff --git a/composer.lock b/composer.lock index <HASH>..<HASH> 100644 --- a/composer.lock +++ b/composer.lock @@ -9,16 +9,16 @@ "packages": [ { "name": "figdice/figdice", - "version": "2.3", + "version": "2.3.1", "source": { "type": "git", - "url": "https://github.com/gabrielzerbib/figdice.git", - "reference": "344c1d96fccdd36bd65c01f65339fe15939dec3b" + "url": "https://github.com/figdice/figdice.git", + "reference": "d57380c033fe5e0b7411dd96c2ecc9c558855bc9" }, "dist": { "type": "zip", - "url": "https://api.github.com/repos/gabrielzerbib/figdice/zipball/344c1d96fccdd36bd65c01f65339fe15939dec3b", - "reference": "344c1d96fccdd36bd65c01f65339fe15939dec3b", + "url": "https://api.github.com/repos/figdice/figdice/zipball/d57380c033fe5e0b7411dd96c2ecc9c558855bc9", + "reference": "d57380c033fe5e0b7411dd96c2ecc9c558855bc9", "shasum": "" }, "require": { @@ -58,7 +58,7 @@ "views", "xml" ], - "time": "2016-01-01 13:20:25" + "time": "2016-01-05 08:33:30" }, { "name": "psr/http-message", diff --git a/src/FigDice.php b/src/FigDice.php index <HASH>..<HASH> 100644 --- a/src/FigDice.php +++ b/src/FigDice.php @@ -53,10 +53,10 @@ class FigDice /** - * Create new FigDice view + * Create new FigDice view. * * @param string $templatesPath Path to templates directory - * @param array $settings Twig environment settings + * @param array $settings environment settings */ public function __construct($templatesPath = '.', $settings = array()) { @@ -79,7 +79,7 @@ class FigDice } /** - * Binds data to placeholders in template + * Binds data to placeholders in template. * * @param $placeholder * @param $data @@ -90,7 +90,7 @@ class FigDice } /** - * Renders template to the ResponseInterface stream + * Renders template from file to the ResponseInterface stream. * * @param ResponseInterface $response * @param $template @@ -105,7 +105,7 @@ class FigDice } /** - * Renders template from provided string. + * Renders template from string to the ResponseInterface stream. * * @param ResponseInterface $response * @param $templateString diff --git a/test/FigDiceTest.php b/test/FigDiceTest.php index <HASH>..<HASH> 100644 --- a/test/FigDiceTest.php +++ b/test/FigDiceTest.php @@ -159,9 +159,9 @@ TEMPLATE; ->method('getBody') ->willReturn($body); $template = <<<TEMPLATE -<fig:template> - <script src="/assets/require.js"></script> +<fig:template xmlns:fig="http://www.figdice.org/"> <link href="/assets/style.css" rel="stylesheet" /> + <script src="/assets/require.js"></script> <script> <fig:attr name="src" value="url" /> </script>
Updated tests to cover fixed bug in FigDice
intraworq_FigDice-View
train
b87c4d647523c498751a583721d8d58720d6b332
diff --git a/plugins/tradingAdvisor/baseTradingMethod.js b/plugins/tradingAdvisor/baseTradingMethod.js index <HASH>..<HASH> 100644 --- a/plugins/tradingAdvisor/baseTradingMethod.js +++ b/plugins/tradingAdvisor/baseTradingMethod.js @@ -91,6 +91,8 @@ var Base = function() { this.asyncTick = false; this.candlePropsCacheSize = 1000; + this._prevAdvice; + this.candleProps = { open: [], high: [], @@ -111,12 +113,6 @@ var Base = function() { // let's run the implemented starting point this.init(); - // should be set up now, check some things - // to make sure everything is implemented - // correctly. - if(!this.name) - log.warn('Warning, trading method has no name'); - if(!config.debug || !this.log) this.log = function() {}; @@ -266,16 +262,20 @@ Base.prototype.addIndicator = function(name, type, parameters) { } Base.prototype.advice = function(newPosition) { - var advice = 'soft'; - if(newPosition) { - advice = newPosition; - } + // ignore soft advice coming from legacy + // strategies. + if(!newPosition) + return; + + // ignore if advice equals previous advice + if(newPosition === this._prevAdvice) + return; + + this._prevAdvice = newPosition; - let candle = this.candle; - candle.start = candle.start.clone(); _.defer(function() { this.emit('advice', { - recommendation: advice, + recommendation: newPosition, portfolio: 1, candle });
only propogate different advice, fix #<I>
askmike_gekko
train
f48b567ffbd6bb935f7eb447309860ef1185f50a
diff --git a/liquibase-core/src/main/java/liquibase/diff/compare/core/DefaultDatabaseObjectComparator.java b/liquibase-core/src/main/java/liquibase/diff/compare/core/DefaultDatabaseObjectComparator.java index <HASH>..<HASH> 100644 --- a/liquibase-core/src/main/java/liquibase/diff/compare/core/DefaultDatabaseObjectComparator.java +++ b/liquibase-core/src/main/java/liquibase/diff/compare/core/DefaultDatabaseObjectComparator.java @@ -3,6 +3,7 @@ package liquibase.diff.compare.core; import liquibase.database.Database; import liquibase.diff.ObjectDifferences; import liquibase.diff.compare.CompareControl; +import liquibase.diff.compare.DatabaseObjectComparatorFactory; import liquibase.structure.DatabaseObject; import liquibase.diff.compare.DatabaseObjectComparator; import liquibase.diff.compare.DatabaseObjectComparatorChain; @@ -30,6 +31,10 @@ public final class DefaultDatabaseObjectComparator implements DatabaseObjectComp @Override public boolean isSameObject(DatabaseObject databaseObject1, DatabaseObject databaseObject2, Database accordingTo, DatabaseObjectComparatorChain chain) { + if (databaseObject1.getSchema() != null && databaseObject2.getSchema() != null && !DatabaseObjectComparatorFactory.getInstance().isSameObject(databaseObject1.getSchema(), databaseObject2.getSchema(), accordingTo)) { + return false; + } + if (databaseObject1.getClass().isAssignableFrom(databaseObject2.getClass()) || databaseObject2.getClass().isAssignableFrom(databaseObject1.getClass())) { return nameMatches(databaseObject1, databaseObject2, accordingTo); }
CORE-<I> Multi-schema snapshot bugfixes Handle schema name in comparisons
liquibase_liquibase
train
c5439df03a6cb771455ecdb9c7583c95a821ad7f
diff --git a/ui/app/services/system.js b/ui/app/services/system.js index <HASH>..<HASH> 100644 --- a/ui/app/services/system.js +++ b/ui/app/services/system.js @@ -28,7 +28,7 @@ export default Service.extend({ const token = this.get('token'); return PromiseArray.create({ - promise: token.authorizedRequest(`/${namespace}/regions`).then(res => res.json()), + promise: token.authorizedRawRequest(`/${namespace}/regions`).then(res => res.json()), }); }), diff --git a/ui/app/services/token.js b/ui/app/services/token.js index <HASH>..<HASH> 100644 --- a/ui/app/services/token.js +++ b/ui/app/services/token.js @@ -1,9 +1,12 @@ -import Service from '@ember/service'; +import Service, { inject as service } from '@ember/service'; import { computed } from '@ember/object'; import { assign } from '@ember/polyfills'; +import queryString from 'query-string'; import fetch from 'nomad-ui/utils/fetch'; export default Service.extend({ + system: service(), + secret: computed({ get() { return window.sessionStorage.nomadTokenSecret; @@ -19,7 +22,12 @@ export default Service.extend({ }, }), - authorizedRequest(url, options = { credentials: 'include' }) { + // All non Ember Data requests should go through authorizedRequest. + // However, the request that gets regions falls into that category. + // This authorizedRawRequest is necessary in order to fetch data + // with the guarantee of a token but without the automatic region + // param since the region cannot be known at this point. + authorizedRawRequest(url, options = { credentials: 'include' }) { const headers = {}; const token = this.get('secret'); @@ -29,4 +37,20 @@ export default Service.extend({ return fetch(url, assign(options, { headers })); }, + + authorizedRequest(url, options) { + const region = this.get('system.activeRegion'); + + if (region) { + url = addParams(url, { region }); + } + + return this.authorizedRawRequest(url, options); + }, }); + +function addParams(url, params) { + const paramsStr = queryString.stringify(params); + const delimiter = url.includes('?') ? '&' : '?'; + return `${url}${delimiter}${paramsStr}`; +}
Add the region qp to all requests made through the token service
hashicorp_nomad
train
6744fd7c5010fd49c0a471bac661a41f9a9e4726
diff --git a/plugin/planned-notification/Listener/PlannedNotificationListener.php b/plugin/planned-notification/Listener/PlannedNotificationListener.php index <HASH>..<HASH> 100644 --- a/plugin/planned-notification/Listener/PlannedNotificationListener.php +++ b/plugin/planned-notification/Listener/PlannedNotificationListener.php @@ -78,13 +78,17 @@ class PlannedNotificationListener { if ($event instanceof LogRoleSubscribeEvent) { $role = $event->getRole(); - $this->manager->generateScheduledTasks( - $role->getWorkspace(), - $event->getActionKey(), - $event->getReceiver(), - $event->getReceiverGroup(), - $role - ); + $workspace = $role->getWorkspace(); + + if (!empty($workspace)) { + $this->manager->generateScheduledTasks( + $workspace, + $event->getActionKey(), + $event->getReceiver(), + $event->getReceiverGroup(), + $role + ); + } } } }
Fixes bug with platform roles (#<I>)
claroline_Distribution
train
5f1eaa7be38b4eb2a060b273051bd5ea7901a32e
diff --git a/jsonweb/token.go b/jsonweb/token.go index <HASH>..<HASH> 100644 --- a/jsonweb/token.go +++ b/jsonweb/token.go @@ -89,6 +89,8 @@ type Token struct { KeyID string `json:"kid"` // Permissions is the set of authorized permissions for the token Permissions []influxdb.Permission `json:"permissions"` + // UserID for the token + UserID influxdb.ID `json:"uid,omitempty"` } // Allowed returns whether or not a permission is allowed based @@ -118,10 +120,9 @@ func (t *Token) Identifier() influxdb.ID { return *id } -// GetUserID returns an invalid id as tokens are generated -// with permissions rather than for or by a particular user +// GetUserID returns the user ID for the token func (t *Token) GetUserID() influxdb.ID { - return influxdb.InvalidID() + return t.UserID } // Kind returns the string "jwt" which is used for auditing
feat: include user ID with web token (#<I>)
influxdata_influxdb
train
cc4b91564784ba06fb9c3512fe25131d031e7251
diff --git a/payu/experiment.py b/payu/experiment.py index <HASH>..<HASH> 100644 --- a/payu/experiment.py +++ b/payu/experiment.py @@ -535,6 +535,7 @@ class Experiment(object): if curdir: os.chdir(curdir) + self.runlog.create_manifest() if self.runlog.enabled: self.runlog.commit() diff --git a/payu/runlog.py b/payu/runlog.py index <HASH>..<HASH> 100644 --- a/payu/runlog.py +++ b/payu/runlog.py @@ -46,7 +46,6 @@ class Runlog(object): self.config = runlog_config self.manifest = [] - self.create_manifest() self.payu_config_dir = os.path.join(os.path.expanduser('~'), '.payu') self.token_path = os.path.join(self.payu_config_dir, 'tokens.yaml')
Runlog manifest generated at experiment run Previously, the model runlog manifest was generated at creation in __init__, which was causing issues for MITgcm which has a dynamically generated config file list. We now generate this list prior to the `runlog.commit` command, which appears to work without issue.
payu-org_payu
train
0466e12c317851364b0c772f8f1470fe2d3f20be
diff --git a/pylti/flask.py b/pylti/flask.py index <HASH>..<HASH> 100644 --- a/pylti/flask.py +++ b/pylti/flask.py @@ -155,20 +155,23 @@ class LTI(object): return session['roles'] def is_role(self, role): - """ + """ Verify if user is in role :param: role: role to verify against :return: if user is in role :exception: LTIException if role is unknown - """ + """ log.debug("is_role {}".format(role)) - roles = session['roles'] + roles = session['roles'].split(',') if role in LTI_ROLES: list = LTI_ROLES[role] + # find the intersection of the roles + roles = set(LTI_ROLES[role]) & set(roles) + is_user_role_there = len(roles) >= 1 log.debug("is_role roles_list={} role={} in list={}" - .format(list, roles, roles in list)) - return roles in list + .format(list, roles, is_user_role_there)) + return is_user_role_there else: raise LTIException("Unknown role {}.".format(role))
Able to have multiple roles on Sakai <I>+ in LTI.is_role().
mitodl_pylti
train
8f8e2c2f41500378f7fa408f6fdc95638e3f6fbb
diff --git a/tests/WhenMonthlyTest.php b/tests/WhenMonthlyTest.php index <HASH>..<HASH> 100755 --- a/tests/WhenMonthlyTest.php +++ b/tests/WhenMonthlyTest.php @@ -564,7 +564,7 @@ class WhenMonthlyTest extends PHPUnit_Framework_TestCase /** * Every three months (quarterly) on the first Monday of the month, - * starting January 7 2019, until February 2, 2021 (issue #TODO) + * starting January 7 2019, until February 2, 2021 (issue #71) * DTSTART;TZID=America/New_York:20190107T170000 * RRULE:FREQ=MONTHLY;INTERVAL=3;BYDAY=1MO;WKST=MO;UNTIL=2021-02-01T18:00:00-0500 */ diff --git a/tests/WhenOccurrencesBetweenTest.php b/tests/WhenOccurrencesBetweenTest.php index <HASH>..<HASH> 100644 --- a/tests/WhenOccurrencesBetweenTest.php +++ b/tests/WhenOccurrencesBetweenTest.php @@ -1,4 +1,4 @@ -5<?php +<?php use When\When; @@ -373,7 +373,7 @@ class WhenOccurrencesBetweenTest extends PHPUnit_Framework_TestCase /** * Every three months (quarterly) on the first Monday of the month, - * starting January 7 2019, until February 2, 2021 (issue #TODO) + * starting January 7 2019, until February 2, 2021 (issue #71) * DTSTART;TZID=America/Los_Angeles:20190107T170000 * RRULE:FREQ=MONTHLY;INTERVAL=3;BYDAY=1MO;WKST=MO;UNTIL=2021-02-01T18:00:00-0800 */
Add issue number (<I>) to latest tests.
tplaner_When
train
7757ec2f434149485536fa98ecfe968c9f9ee391
diff --git a/lib/config.js b/lib/config.js index <HASH>..<HASH> 100644 --- a/lib/config.js +++ b/lib/config.js @@ -95,7 +95,7 @@ module.exports = function() { var m = key.match(rx); if (m) { var current = envconfig; - var names = m[1].toLowerCase().split('.'); + var names = m[1].split('__'); var name = null; for (;;) { name = names.shift(); @@ -108,7 +108,7 @@ module.exports = function() { return //XXX log error else current[name] = obj = {}; - current = obj + current = obj; } else { break } diff --git a/test/configtest.js b/test/configtest.js index <HASH>..<HASH> 100644 --- a/test/configtest.js +++ b/test/configtest.js @@ -47,23 +47,23 @@ describe("config", function() { it("should merge with environment variables", function() { var loadConfig = configloader(); //use default paths assert(process.env.NODE_ENV==='test'); //NODE_ENV=test will always be defined - process.env.CONFIGTEST_INBOTH = 'env'; - process.env.CONFIGTEST_ENVONLY = true; - process.env['CONFIGTEST_NESTED.D'] = true; - process.env['CONFIGTEST_NESTED.A'] = null; - process.env['CONFIGTEST_N1.N2.A'] = 1; - process.env['CONFIGTEST_N1.N2'] = '{"B":2}'; + process.env.CONFIGTEST_inboth = 'env'; + process.env.CONFIGTEST_envonly = true; + process.env['CONFIGTEST_nested__d'] = true; + process.env['CONFIGTEST_nested__a'] = null; + process.env['CONFIGTEST_N1__N2__a'] = 1; + process.env['CONFIGTEST_N1__N2'] = '{"B":2}'; process.env['CONFIGTEST_N3'] = 1; - process.env['CONFIGTEST_N3.A'] = 2; + process.env['CONFIGTEST_N3__a'] = 2; var config = loadConfig("configtest"); assert.strictEqual(config.defaultonly, true); assert.strictEqual(config.envonly, true); assert.equal(config.inboth, "env"); assert.deepEqual(config.nested, {a:null, b:true, d:true}); - assert.deepEqual(config.n1, {n2:{a:1, B:2}}); + assert.deepEqual(config.N1, {N2:{a:1, B:2}}); // CONFIGTEST_N3.A is ignored, CONFIGTEST_N3 is unchanged: - assert.deepEqual(config.n3, 1); + assert.deepEqual(config.N3, 1); assert(!config.overrideonly); });
change config enviroment variable pattern for shell compatibility
onecommons_base
train
2ecd477c6e310495a98c6c353f00fa1f4df94fe4
diff --git a/chess/__init__.py b/chess/__init__.py index <HASH>..<HASH> 100644 --- a/chess/__init__.py +++ b/chess/__init__.py @@ -3459,10 +3459,10 @@ class Board(object): board.chess960 = self.chess960 board.pawns = self.pawns - board.knights= self.knights + board.knights = self.knights board.bishops = self.bishops board.rooks = self.rooks - board.queens= self.queens + board.queens = self.queens board.kings = self.kings board.occupied_co[WHITE] = self.occupied_co[WHITE]
Stylistic fix in Board.copy()
niklasf_python-chess
train
2270042b3bc888f9ded646384f2f824c953199a2
diff --git a/example.php b/example.php index <HASH>..<HASH> 100644 --- a/example.php +++ b/example.php @@ -25,3 +25,10 @@ var_dump($ipAnonymizer->anonymize('2a03:2880:2110:df07:face:b00c::1')); var_dump($ipAnonymizer->anonymize('2610:28:3090:3001:dead:beef:cafe:fed3')); // returns 2610:28:3090:3001:: + +// You can use this class also in a static way: +var_dump(IpAnonymizer::anonymizeIp('192.168.178.123')); +// returns 192.168.178.0 + +var_dump(IpAnonymizer::anonymizeIp('2610:28:3090:3001:dead:beef:cafe:fed3')); +// returns 2610:28:3090:3001:: diff --git a/src/IpAnonymizer.php b/src/IpAnonymizer.php index <HASH>..<HASH> 100644 --- a/src/IpAnonymizer.php +++ b/src/IpAnonymizer.php @@ -1,4 +1,5 @@ <?php + namespace geertw\IpAnonymizer; class IpAnonymizer { @@ -18,6 +19,17 @@ class IpAnonymizer { * @param $address string IP address that must be anonymized * @return string The anonymized IP address. Returns an empty string when the IP address is invalid. */ + public static function anonymizeIp($address) { + $anonymizer = new IpAnonymizer(); + return $anonymizer->anonymize($address); + } + + /** + * Anonymize an IPv4 or IPv6 address. + * + * @param $address string IP address that must be anonymized + * @return string The anonymized IP address. Returns an empty string when the IP address is invalid. + */ public function anonymize($address) { $packedAddress = inet_pton($address);
Fix #1, add a static method to anonymize addresses
geertw_php-ip-anonymizer
train
05abc7f782cd9edf6f1acb414ef554a42bfaf9ab
diff --git a/billing/gateway.py b/billing/gateway.py index <HASH>..<HASH> 100644 --- a/billing/gateway.py +++ b/billing/gateway.py @@ -119,6 +119,7 @@ def get_gateway(gateway, *args, **kwargs): clazz = getattr(gateway_module, gateway_class_name) except AttributeError: raise GatewayNotConfigured("Missing %s class in the gateway module." %gateway_class_name) + gateway_cache[gateway] = clazz # We either hit the cache or load our class object, let's return an instance # of it. - return clazz(*args, **kwargs) \ No newline at end of file + return clazz(*args, **kwargs) diff --git a/billing/integration.py b/billing/integration.py index <HASH>..<HASH> 100644 --- a/billing/integration.py +++ b/billing/integration.py @@ -64,4 +64,5 @@ def get_integration(integration, *args, **kwargs): klass = getattr(integration_module, integration_class_name) except AttributeError: raise IntegrationNotConfigured("Missing %s class in the integration module." %integration_class_name) + integration_cache[integration] = klass return klass(*args, **kwargs)
Add to cache if successfully found.
agiliq_merchant
train
c3218300334ba8f916f5208e8df4e426efb69dfb
diff --git a/actionpack/lib/action_controller/url_rewriter.rb b/actionpack/lib/action_controller/url_rewriter.rb index <HASH>..<HASH> 100644 --- a/actionpack/lib/action_controller/url_rewriter.rb +++ b/actionpack/lib/action_controller/url_rewriter.rb @@ -29,16 +29,16 @@ module ActionController # Generate a url based on the options provided, default_url_options and the # routes defined in routes.rb. The following options are supported: # - # * <tt>:only_path</tt> If true, the relative url is returned. Defaults to +false+. - # * <tt>:protocol</tt> The protocol to connect to. Defaults to 'http'. - # * <tt>:host</tt> Specifies the host the link should be targetted at. + # * <tt>:only_path</tt> - If true, the relative url is returned. Defaults to +false+. + # * <tt>:protocol</tt> - The protocol to connect to. Defaults to 'http'. + # * <tt>:host</tt> - Specifies the host the link should be targetted at. # If <tt>:only_path</tt> is false, this option must be # provided either explicitly, or via +default_url_options+. - # * <tt>:port</tt> Optionally specify the port to connect to. - # * <tt>:anchor</tt> An anchor name to be appended to the path. - # * <tt>:skip_relative_url_root</tt> If true, the url is not constructed using the + # * <tt>:port</tt> - Optionally specify the port to connect to. + # * <tt>:anchor</tt> - An anchor name to be appended to the path. + # * <tt>:skip_relative_url_root</tt> - If true, the url is not constructed using the # +relative_url_root+ set in ActionController::AbstractRequest.relative_url_root. - # * <tt>:trailing_slash</tt> If true, adds a trailing slash, as in "/archive/2009/" + # * <tt>:trailing_slash</tt> - If true, adds a trailing slash, as in "/archive/2009/" # # Any other key (<tt>:controller</tt>, <tt>:action</tt>, etc.) given to # +url_for+ is forwarded to the Routes module.
revised conventions in url_for docs
rails_rails
train
de3f9d82a0e974146133df162e1ad333b49e6807
diff --git a/framework/core/src/Core/Commands/UploadAvatarCommand.php b/framework/core/src/Core/Commands/UploadAvatarCommand.php index <HASH>..<HASH> 100644 --- a/framework/core/src/Core/Commands/UploadAvatarCommand.php +++ b/framework/core/src/Core/Commands/UploadAvatarCommand.php @@ -1,5 +1,6 @@ <?php namespace Flarum\Core\Commands; +use Psr\Http\Message\UploadedFileInterface; use RuntimeException; class UploadAvatarCommand @@ -7,13 +8,13 @@ class UploadAvatarCommand public $userId; /** - * @var \Symfony\Component\HttpFoundation\File\UploadedFile + * @var \Psr\Http\Message\UploadedFileInterface */ public $file; public $actor; - public function __construct($userId, $file, $actor) + public function __construct($userId, UploadedFileInterface $file, $actor) { if (empty($userId) || !intval($userId)) { throw new RuntimeException('No valid user ID specified.'); diff --git a/framework/core/src/Core/Handlers/Commands/UploadAvatarCommandHandler.php b/framework/core/src/Core/Handlers/Commands/UploadAvatarCommandHandler.php index <HASH>..<HASH> 100644 --- a/framework/core/src/Core/Handlers/Commands/UploadAvatarCommandHandler.php +++ b/framework/core/src/Core/Handlers/Commands/UploadAvatarCommandHandler.php @@ -40,14 +40,16 @@ class UploadAvatarCommandHandler // throw an exception otherwise. $user->assertCan($command->actor, 'edit'); - $manager = new ImageManager(array('driver' => 'imagick')); - $manager->make($command->file->getRealPath())->fit(100, 100)->save(); + $tmpFile = tempnam(sys_get_temp_dir(), 'avatar'); + $command->file->moveTo($tmpFile); - $filename = $command->file->getFilename(); $uploadName = Str::lower(Str::quickRandom()) . '.jpg'; + $manager = new ImageManager(array('driver' => 'imagick')); + $manager->make($tmpFile)->fit(100, 100)->save(); + $mount = new MountManager([ - 'source' => new Filesystem(new Local($command->file->getPath())), + 'source' => new Filesystem(new Local(pathinfo($tmpFile, PATHINFO_DIRNAME))), 'target' => $this->uploadDir, ]); @@ -59,7 +61,7 @@ class UploadAvatarCommandHandler event(new AvatarWillBeUploaded($user, $command)); - $mount->move("source://$filename", "target://$uploadName"); + $mount->move("source://".pathinfo($tmpFile, PATHINFO_BASENAME), "target://$uploadName"); $user->save(); $this->dispatchEventsFor($user);
Update avatar uploading code for psr-7 Not sure if a tmp file is the best way, but it works
flarum_core
train
5ace2f0ec24d58899a9dc649e128e4c0064e9134
diff --git a/metpy/calc/tests/test_basic.py b/metpy/calc/tests/test_basic.py index <HASH>..<HASH> 100644 --- a/metpy/calc/tests/test_basic.py +++ b/metpy/calc/tests/test_basic.py @@ -4,7 +4,8 @@ import numpy as np -from metpy.calc.basic import * # noqa: F403 +from metpy.calc import (coriolis_parameter, get_wind_components, get_wind_dir, get_wind_speed, + heat_index, pressure_to_height_std, windchill) from metpy.testing import assert_almost_equal, assert_array_almost_equal, assert_array_equal from metpy.units import units diff --git a/metpy/calc/tests/test_kinematics.py b/metpy/calc/tests/test_kinematics.py index <HASH>..<HASH> 100644 --- a/metpy/calc/tests/test_kinematics.py +++ b/metpy/calc/tests/test_kinematics.py @@ -4,7 +4,8 @@ import numpy as np -from metpy.calc.kinematics import * # noqa: F403 +from metpy.calc import (advection, convergence_vorticity, geostrophic_wind, h_convergence, + v_vorticity) from metpy.constants import g, omega, Re from metpy.testing import assert_almost_equal, assert_array_equal from metpy.units import concatenate, units diff --git a/metpy/calc/tests/test_thermo.py b/metpy/calc/tests/test_thermo.py index <HASH>..<HASH> 100644 --- a/metpy/calc/tests/test_thermo.py +++ b/metpy/calc/tests/test_thermo.py @@ -4,7 +4,10 @@ import numpy as np -from metpy.calc.thermo import * # noqa: F403 +from metpy.calc import (dewpoint, dewpoint_rh, dry_lapse, equivalent_potential_temperature, + lcl, lfc, mixing_ratio, moist_lapse, parcel_profile, + potential_temperature, saturation_mixing_ratio, + saturation_vapor_pressure, vapor_pressure) from metpy.testing import assert_almost_equal, assert_array_almost_equal from metpy.units import units diff --git a/metpy/calc/tests/test_tools.py b/metpy/calc/tests/test_tools.py index <HASH>..<HASH> 100644 --- a/metpy/calc/tests/test_tools.py +++ b/metpy/calc/tests/test_tools.py @@ -4,7 +4,7 @@ import numpy as np -from metpy.calc.tools import * # noqa: F403 +from metpy.calc import find_intersections, nearest_intersection_idx, resample_nn_1d from metpy.testing import assert_array_almost_equal, assert_array_equal diff --git a/metpy/calc/tests/test_turbulence.py b/metpy/calc/tests/test_turbulence.py index <HASH>..<HASH> 100644 --- a/metpy/calc/tests/test_turbulence.py +++ b/metpy/calc/tests/test_turbulence.py @@ -5,7 +5,7 @@ import numpy as np from numpy.testing import assert_almost_equal, assert_array_equal -from metpy.calc.turbulence import * # noqa: F403 +from metpy.calc.turbulence import friction_velocity, get_perturbation, kinematic_flux, tke class TestTurbulenceKineticEnergy(object): diff --git a/metpy/plots/tests/test_skewt.py b/metpy/plots/tests/test_skewt.py index <HASH>..<HASH> 100644 --- a/metpy/plots/tests/test_skewt.py +++ b/metpy/plots/tests/test_skewt.py @@ -7,7 +7,7 @@ from matplotlib.gridspec import GridSpec import numpy as np import pytest -from metpy.plots.skewt import * # noqa: F403 +from metpy.plots import Hodograph, SkewT from metpy.testing import hide_tick_labels, make_figure, test_style from metpy.units import units
MNT: Remove * imports from tests and import from top-level. This makes them better tests of the export machinery, as well as insulates the tests to any possible moves within sub-packages.
Unidata_MetPy
train