hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
3ff33b74bdc54f9cb5d3eaf528a6d00f14faa418
diff --git a/src/Mmi/Security/Auth.php b/src/Mmi/Security/Auth.php index <HASH>..<HASH> 100644 --- a/src/Mmi/Security/Auth.php +++ b/src/Mmi/Security/Auth.php @@ -296,7 +296,6 @@ class Auth { //pobieranie usera i hasła ze zmiennych środowiskowych $this->setIdentity(\Mmi\App\FrontController::getInstance()->getEnvironment()->authUser) ->setCredential(\Mmi\App\FrontController::getInstance()->getEnvironment()->authPassword); - $model = $this->_modelName; $record = $model::authenticate($this->_identity, $this->_credential); //autoryzacja poprawna @@ -306,7 +305,7 @@ class Auth { //odpowiedź 401 \Mmi\App\FrontController::getInstance()->getResponse() ->setHeader('WWW-Authenticate', 'Basic realm="' . $realm . '"') - ->setCodeForbidden() + ->setCodeUnauthorized() ->setContent($errorMessage) ->send(); exit;
fixed forbidden -> unauthorized header http basic auth
milejko_mmi
train
27a001f212d694e9a13f1d325bd5138823336cc7
diff --git a/tests/Transformers/TransformerFactoryTest.php b/tests/Transformers/TransformerFactoryTest.php index <HASH>..<HASH> 100644 --- a/tests/Transformers/TransformerFactoryTest.php +++ b/tests/Transformers/TransformerFactoryTest.php @@ -19,6 +19,7 @@ use GrahamCampbell\Throttle\Transformers\ArrayTransformer; use GrahamCampbell\Throttle\Transformers\RequestTransformer; use GrahamCampbell\Throttle\Transformers\TransformerFactory; use Illuminate\Http\Request; +use InvalidArgumentException; use Mockery; /** @@ -51,27 +52,23 @@ class TransformerFactoryTest extends AbstractTestCase $this->assertInstanceOf(Data::class, $transformer->transform($array, 123, 321)); } - /** - * @expectedException \InvalidArgumentException - * @expectedExceptionMessage The data array does not provide the required ip and route information. - */ public function testEmptyArray() { $factory = new TransformerFactory(); - $transformer = $factory->make([]); - $this->assertInstanceOf(ArrayTransformer::class, $transformer); + $this->expectException(InvalidArgumentException::class); + $this->expectExceptionMessage('The data array does not provide the required ip and route information.'); - $this->assertInstanceOf(Data::class, $transformer->transform([])); + $transformer = $factory->make([]); } - /** - * @expectedException \InvalidArgumentException - * @expectedExceptionMessage An array, or an instance of Illuminate\Http\Request was expected. - */ public function testError() { $factory = new TransformerFactory(); + + $this->expectException(InvalidArgumentException::class); + $this->expectExceptionMessage('An array, or an instance of Illuminate\Http\Request was expected.'); + $transformer = $factory->make(123); } }
Update TransformerFactoryTest.php
GrahamCampbell_Laravel-Throttle
train
5fb925bc11800f83ddb73de9e73a2ee9a0c268f8
diff --git a/tests/frontend/org/voltdb/catalog/TestCatalogDiffs.java b/tests/frontend/org/voltdb/catalog/TestCatalogDiffs.java index <HASH>..<HASH> 100644 --- a/tests/frontend/org/voltdb/catalog/TestCatalogDiffs.java +++ b/tests/frontend/org/voltdb/catalog/TestCatalogDiffs.java @@ -559,7 +559,7 @@ public class TestCatalogDiffs extends TestCase { VoltProjectBuilder builder = new VoltProjectBuilder(); builder.addLiteralSchema("\nCREATE TABLE A (C1 BIGINT NOT NULL, C2 BIGINT NOT NULL);"); - builder.addProcedures(org.voltdb.catalog.ProcedureA.class); + builder.addStmtProcedure("the_requisite_procedure", "select * from A;"); builder.compile(testDir + File.separator + "addpart1.jar"); Catalog catOriginal = catalogForJar(testDir + File.separator + "addpart1.jar");
Fix partitioning error in testcase. Unused procedure in testcase was mispartitioned (the testcase attempts to change a replicated table to partitioned table via a catalog change.)
VoltDB_voltdb
train
35c66b02b5c9f044caeb65b85ef524ac16213639
diff --git a/src/cli.js b/src/cli.js index <HASH>..<HASH> 100755 --- a/src/cli.js +++ b/src/cli.js @@ -65,7 +65,7 @@ if (require.main === module) { ) .option( '-e, --electron-version <value>', - "electron version to package, without the 'v', see https://github.com/atom/electron/releases", + "electron version to package, without the 'v', see https://github.com/electron/electron/releases", ) .option( '--no-overwrite', @@ -73,7 +73,7 @@ if (require.main === module) { ) .option( '-c, --conceal', - 'packages the source code within your app into an archive, defaults to false, see http://electron.atom.io/docs/v0.36.0/tutorial/application-packaging/', + 'packages the source code within your app into an archive, defaults to false, see https://electronjs.org/docs/tutorial/application-packaging', ) .option( '--counter',
Update documentation URLs in the CLI help message (#<I>) This updates URLs in the command-line help to avoid redirects.
jiahaog_nativefier
train
0ca0a805d67830e12a3d3676fac0e4920390c16a
diff --git a/api/src/main/java/org/spout/renderer/api/Camera.java b/api/src/main/java/org/spout/renderer/api/Camera.java index <HASH>..<HASH> 100644 --- a/api/src/main/java/org/spout/renderer/api/Camera.java +++ b/api/src/main/java/org/spout/renderer/api/Camera.java @@ -46,7 +46,7 @@ public class Camera { * * @param projection The projection matrix */ - private Camera(Matrix4f projection) { + public Camera(Matrix4f projection) { this.projection = projection; }
Make the Camera constructor public again We don’t need to restrain camera creation to predefined projection matrices, and this makes more sense than adding a static method instead.
flow_caustic
train
ed522a0e3a5ddfbdda01c426802b088cce2b4191
diff --git a/mike/mkdocs.py b/mike/mkdocs.py index <HASH>..<HASH> 100644 --- a/mike/mkdocs.py +++ b/mike/mkdocs.py @@ -6,7 +6,7 @@ from ruamel import yaml def site_dir(config_file): with open(config_file) as f: - config = yaml.safe_load(f) + config = yaml.load(f) site = config.get('site_dir', 'site') return os.path.join(os.path.dirname(config_file), site)
Don't use `safe_load` when loading YAML to allow users to use stuff like `!!python...` This is less-secure, but since MkDocs already does it, it doesn't actually open up any new attack surface, as far as I can tell.
jimporter_mike
train
12b42dd8a992dfc23851a02cbc36a73594faeb5e
diff --git a/src/index.test.js b/src/index.test.js index <HASH>..<HASH> 100644 --- a/src/index.test.js +++ b/src/index.test.js @@ -1,3 +1,4 @@ +/* eslint-disable no-template-curly-in-string */ import createBabylonOptions from "babylon-options" import pluginTester from "babel-plugin-tester"
Convince eslint that test code is fine.
sebastian-software_babel-plugin-smart-webpack-import
train
e66114ad71b1f45dc011704010f049ea37746d45
diff --git a/lib/websocket.js b/lib/websocket.js index <HASH>..<HASH> 100644 --- a/lib/websocket.js +++ b/lib/websocket.js @@ -73,7 +73,7 @@ function WSConnection() { ended = false; try { - streamData = stanza.parse(Stream, wrap(data)); + streamData = stanza.parse(Stream, data + self.streamEnd); } catch (e) { try { streamData = stanza.parse(Stream, data);
Propery handle stream parsing error conditions
legastero_stanza.io
train
0de013184a35baa6e92ad47f41e47f26d59f03b5
diff --git a/src/languages/time-ago-language-it_IT.js b/src/languages/time-ago-language-it_IT.js index <HASH>..<HASH> 100644 --- a/src/languages/time-ago-language-it_IT.js +++ b/src/languages/time-ago-language-it_IT.js @@ -9,7 +9,7 @@ angular.module('yaru22.angular-timeago').config(function(timeAgoSettings) { seconds: 'meno di un minuto', minute: 'circa un minuto', minutes: '%d minuti', - hour: 'circa un\' ora', + hour: 'circa un\'ora', hours: 'circa %d ore', day: 'un giorno', days: '%d giorni',
Fixed punctuation for hour field in italian
yaru22_angular-timeago
train
f7fbe9a6502ea1390d0de4ff958c4c6f66e57681
diff --git a/DataFixtures/ORM/LoadMediaData.php b/DataFixtures/ORM/LoadMediaData.php index <HASH>..<HASH> 100644 --- a/DataFixtures/ORM/LoadMediaData.php +++ b/DataFixtures/ORM/LoadMediaData.php @@ -50,7 +50,7 @@ class LoadMediaData extends AbstractDataFixture */ public function load(ObjectManager $manager) { - $rootPath = $this->container->get('kernel')->getRootDir() . '/../web/themes/wellcommerce/assets/prod/'; + $rootPath = $this->container->get('kernel')->getRootDir() . '/../web/themes/wellcommerce-default-theme/assets/prod/'; $uploader = $this->container->get('media.manager.admin'); $uploadPath = $uploader->getUploadRootDir('images'); $filesystem = $this->container->get('filesystem');
Changed default theme name (cherry picked from commit <I>af0f<I>fabad<I>c<I>c<I>afea<I>b<I>)
WellCommerce_CouponBundle
train
34c887f70dcd387251503a386d475638acbf3e6e
diff --git a/spam_lists/service_models.py b/spam_lists/service_models.py index <HASH>..<HASH> 100644 --- a/spam_lists/service_models.py +++ b/spam_lists/service_models.py @@ -6,7 +6,7 @@ for remote and local spam listing services ''' from __future__ import unicode_literals -#pylint: disable-msg=redefined-builtin +#pylint: disable=redefined-builtin from builtins import zip, map, str, range, object from dns import name from dns.resolver import NXDOMAIN, query diff --git a/test/unit/test_structures.py b/test/unit/test_structures.py index <HASH>..<HASH> 100644 --- a/test/unit/test_structures.py +++ b/test/unit/test_structures.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -#pylint: disable-msg=redefined-builtin +#pylint: disable=redefined-builtin from builtins import str, range, object from dns import reversename from nose_parameterized import parameterized @@ -13,7 +13,7 @@ SimpleClassificationCodeMap, SumClassificationCodeMap, create_host from test.compat import unittest, Mock -#pylint: disable-msg=too-few-public-methods +#pylint: disable=too-few-public-methods class ClassificationCodeMapTestMixin(object): ''' A base class for tests for classification code map classes diff --git a/test/unit/test_utils.py b/test/unit/test_utils.py index <HASH>..<HASH> 100644 --- a/test/unit/test_utils.py +++ b/test/unit/test_utils.py @@ -4,7 +4,7 @@ from __future__ import unicode_literals from collections import defaultdict from random import shuffle -#pylint: disable-msg=redefined-builtin +#pylint: disable=redefined-builtin from builtins import next, range from nose_parameterized import parameterized from requests.exceptions import ConnectionError, InvalidSchema, InvalidURL, \ diff --git a/test/unit/test_validation.py b/test/unit/test_validation.py index <HASH>..<HASH> 100644 --- a/test/unit/test_validation.py +++ b/test/unit/test_validation.py @@ -4,7 +4,7 @@ from __future__ import unicode_literals import unittest -#pylint: disable-msg=redefined-builtin +#pylint: disable=redefined-builtin from builtins import object from nose_parameterized import parameterized
Replace deprecated pylint annotations The "disable-msg" annotation is being replaced with "disable"
piotr-rusin_spam-lists
train
f22516e4207e0cf5b5467164b1660ac1a55488eb
diff --git a/test/spec/directives/ivh-treeview.js b/test/spec/directives/ivh-treeview.js index <HASH>..<HASH> 100644 --- a/test/spec/directives/ivh-treeview.js +++ b/test/spec/directives/ivh-treeview.js @@ -136,7 +136,7 @@ describe('Directive ivhTreeview', function() { scope.bag1[1].children.push({label: 'five panel baseball'}); scope.$apply(); expect($el.find('[title="five panel baseball"]').length).toBe(1); - expect($el.find('[title="baseball"]').hasClass('ivh-treeview-node-leaf')).toBe(false); + expect($el.find('[title="baseball"]').parent().hasClass('ivh-treeview-node-leaf')).toBe(false); }); it('should update when child nodes are added (re-assignment)', function() { @@ -144,7 +144,7 @@ describe('Directive ivhTreeview', function() { scope.bag1[1].children = [{label: 'five panel baseball'}]; scope.$apply(); expect($el.find('[title="five panel baseball"]').length).toBe(1); - expect($el.find('[title="baseball"]').hasClass('ivh-treeview-node-leaf')).toBe(false); + expect($el.find('[title="baseball"]').parent().hasClass('ivh-treeview-node-leaf')).toBe(false); }); it('should allow an options object for overrides', function() {
test: Check for leaf class on correct elements
iVantage_angular-ivh-treeview
train
e0d117ce80c91c6aa22eda69545a79270325ca92
diff --git a/internal/merger/merger.go b/internal/merger/merger.go index <HASH>..<HASH> 100644 --- a/internal/merger/merger.go +++ b/internal/merger/merger.go @@ -99,7 +99,6 @@ func init() { "go_proto_library", }, attrs: []string{ - "compilers", "proto", }, }, {
internal/merger: make "compilers" attribute unmergeable (#<I>) Gazelle won't modify or delete this attribute if it's set in existing rules. Fixes #<I>
bazelbuild_bazel-gazelle
train
5630a767668aafb755052b4183184ea289620031
diff --git a/integration/cluster.go b/integration/cluster.go index <HASH>..<HASH> 100644 --- a/integration/cluster.go +++ b/integration/cluster.go @@ -837,6 +837,7 @@ func NewClusterV3(t *testing.T, cfg *ClusterConfig) *ClusterV3 { clus := &ClusterV3{ cluster: NewClusterByConfig(t, cfg), } + clus.Launch(t) for _, m := range clus.Members { client, err := NewClientV3(m) if err != nil { @@ -844,7 +845,6 @@ func NewClusterV3(t *testing.T, cfg *ClusterConfig) *ClusterV3 { } clus.clients = append(clus.clients, client) } - clus.Launch(t) return clus }
integration: NewClusterV3 should launch cluster before creating clients
etcd-io_etcd
train
9f9758598a82fb0e2ba4f70b22d093cd7c7a2921
diff --git a/core/src/utilities/helpers/routing-helpers.js b/core/src/utilities/helpers/routing-helpers.js index <HASH>..<HASH> 100644 --- a/core/src/utilities/helpers/routing-helpers.js +++ b/core/src/utilities/helpers/routing-helpers.js @@ -361,8 +361,9 @@ class RoutingHelpersClass { getIntentObject(intentLink) { const intentParams = intentLink.split('?intent=')[1]; if (intentParams) { - const elements = intentParams.split('-'); - if (elements.length === 2) { + const firstDash = intentParams.indexOf('-'); + if (firstDash > 0) { + const elements = [intentParams.slice(0, firstDash), intentParams.slice(firstDash + 1)]; // avoids usage of '-' in semantic object and action const semanticObject = elements[0]; const actionAndParams = elements[1].split('?');
hotfix intent param with dash (#<I>)
kyma-project_luigi
train
9713d128cdd9f9e78a3e1778a76620ae8802760b
diff --git a/src/GraphQLController.php b/src/GraphQLController.php index <HASH>..<HASH> 100644 --- a/src/GraphQLController.php +++ b/src/GraphQLController.php @@ -105,7 +105,7 @@ class GraphQLController extends Controller 'query' => $query, 'parsedQuery' => $parsedQuery, ] = $this->handleAutomaticPersistQueries($schemaName, $params); - } catch (AutomaticPersistedQueriesError $e) { + } catch (AutomaticPersistedQueriesError | Error $e) { return $graphql ->decorateExecutionResult(new ExecutionResult(null, [$e])) ->toArray($debug); diff --git a/tests/Unit/AutomatedPersistedQueriesTest.php b/tests/Unit/AutomatedPersistedQueriesTest.php index <HASH>..<HASH> 100644 --- a/tests/Unit/AutomatedPersistedQueriesTest.php +++ b/tests/Unit/AutomatedPersistedQueriesTest.php @@ -447,4 +447,41 @@ class AutomatedPersistedQueriesTest extends TestCase ]; self::assertEquals($expected, $content); } + + public function testPersistedQueryParseError(): void + { + $query = '{ parse(error) }'; + + $response = $this->call('GET', '/graphql', [ + 'query' => $query, + 'extensions' => [ + 'persistedQuery' => [ + 'version' => 1, + 'sha256Hash' => hash('sha256', $query), + ], + ], + ]); + + self::assertEquals(200, $response->getStatusCode()); + + $content = $response->json(); + + $expected = [ + 'errors' => [ + [ + 'message' => 'Syntax Error: Expected :, found )', + 'extensions' => [ + 'category' => 'graphql', + ], + 'locations' => [ + [ + 'line' => 1, + 'column' => 14, + ], + ], + ], + ], + ]; + self::assertEquals($expected, $content); + } }
apq: gracefully handle GraphQL parse errors
rebing_graphql-laravel
train
0f6d4eac6de4c95178826aa009f779fc9dd0de18
diff --git a/phoebe/frontend/bundle.py b/phoebe/frontend/bundle.py index <HASH>..<HASH> 100644 --- a/phoebe/frontend/bundle.py +++ b/phoebe/frontend/bundle.py @@ -626,7 +626,7 @@ class Bundle(object): """ return self.get_system().clear_synthetic() - def set_time(self, time, label=None, server=None): + def set_time(self, time, label=None, server=None, **kwargs): """ Set the time of a system, taking compute options into account. diff --git a/phoebe/frontend/gui/phoebe_dialogs.py b/phoebe/frontend/gui/phoebe_dialogs.py index <HASH>..<HASH> 100644 --- a/phoebe/frontend/gui/phoebe_dialogs.py +++ b/phoebe/frontend/gui/phoebe_dialogs.py @@ -130,6 +130,14 @@ class CreatePopPrefs(QDialog, gui.Ui_popPrefs_Dialog): # then disable certain items self.p_panel_fitting.setEnabled(False) self.p_panel_versions.setEnabled(False) + self.p_panel_system.setEnabled(False) # maybe enable for release if ready? + + # fitting options + self.label_13.setVisible(False) + self.fo_edit_combo.setVisible(False) + self.fo_delete.setVisible(False) + self.fo_add.setVisible(False) + self.fo_psedit.setVisible(False) def set_gui_from_prefs(self,prefs=None,init=False): if prefs is None: @@ -347,7 +355,7 @@ class CreatePopPrefs(QDialog, gui.Ui_popPrefs_Dialog): typ = 'compute' elif w == self.fo_add: typ = 'fitting' - elif w == self.sx_serveredit_add: + elif w == self.sx_serveredit_add or w == self.serverlist_add: typ = 'server' label = 'new %s' % typ diff --git a/phoebe/frontend/gui/phoebe_gui.py b/phoebe/frontend/gui/phoebe_gui.py index <HASH>..<HASH> 100644 --- a/phoebe/frontend/gui/phoebe_gui.py +++ b/phoebe/frontend/gui/phoebe_gui.py @@ -310,6 +310,7 @@ class PhoebeGUI(QMainWindow, gui.Ui_PHOEBE_MainWindow): self.mp_splash_triplePushButton.setEnabled(False) self.tb_view_rpAction.setEnabled(False) self.tb_view_versionsAction.setEnabled(False) + self.tb_view_systemAction.setEnabled(False) # maybe enable for release if ready? # Set system to None - this will then result in a call to on_new_bundle # any additional setup should be done there diff --git a/phoebe/frontend/gui/phoebe_widgets.py b/phoebe/frontend/gui/phoebe_widgets.py index <HASH>..<HASH> 100644 --- a/phoebe/frontend/gui/phoebe_widgets.py +++ b/phoebe/frontend/gui/phoebe_widgets.py @@ -1861,7 +1861,7 @@ class ParameterTreeWidget(GeneralParameterTreeWidget): QObject.connect(pop.constraintHelp, SIGNAL("clicked()"), self.on_help_clicked) QObject.connect(pop.presetHelp, SIGNAL("clicked()"), self.on_help_clicked) - if hasattr(par, 'adjust'): + if hasattr(par, 'adjust') and 'nofit' not in self.style: pop.check.setCheckState(check.checkState()) else: pop.check.setVisible(False)
various minor gui and bundle bug fixes
phoebe-project_phoebe2
train
b93a2fce49911e2a43a3d481bef6269f93b2b250
diff --git a/spec/serf/errors/policy_failure_spec.rb b/spec/serf/errors/policy_failure_spec.rb index <HASH>..<HASH> 100644 --- a/spec/serf/errors/policy_failure_spec.rb +++ b/spec/serf/errors/policy_failure_spec.rb @@ -4,8 +4,8 @@ require 'serf/errors/policy_failure' describe Serf::Errors::PolicyFailure do - it 'is kind of RuntimeError' do - subject.kind_of? RuntimeError - end + it { + should be_a_kind_of(RuntimeError) + } end
Fix Spec bug on PolicyFailure. Details: * I didn't properly make a proper rspec 'should' assertion on the subject. The test passed no matter what. * Now I made a good should assertion.
byu_serf
train
52cc59939ff88035138be76f07f349ad10cf5ce2
diff --git a/src/main/java/org/dasein/cloud/openstack/nova/os/AbstractMethod.java b/src/main/java/org/dasein/cloud/openstack/nova/os/AbstractMethod.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/dasein/cloud/openstack/nova/os/AbstractMethod.java +++ b/src/main/java/org/dasein/cloud/openstack/nova/os/AbstractMethod.java @@ -1165,7 +1165,6 @@ public abstract class AbstractMethod { throw new NovaException(items); } else { - std.info("Expected OK for GET request, got " + code); String data = null; try { diff --git a/src/main/java/org/dasein/cloud/openstack/nova/os/NovaException.java b/src/main/java/org/dasein/cloud/openstack/nova/os/NovaException.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/dasein/cloud/openstack/nova/os/NovaException.java +++ b/src/main/java/org/dasein/cloud/openstack/nova/os/NovaException.java @@ -64,17 +64,15 @@ public class NovaException extends CloudException { if (items.message.equals("unknown")) { String[] names = JSONObject.getNames(ob); for (String key : names) { - if (key.contains("Error") || key.contains("Fault")) { - try { - JSONObject msg = ob.getJSONObject(key); - if (msg.has("message") && !msg.isNull("message")) { - items.message = msg.getString("message"); - } - } - catch (JSONException e) { - items.message = ob.getString(key); + try { + JSONObject msg = ob.getJSONObject(key); + if (msg.has("message") && !msg.isNull("message")) { + items.message = msg.getString("message"); } } + catch (JSONException e) { + items.message = ob.getString(key); + } } } if( ob.has("details") ) { diff --git a/src/main/java/org/dasein/cloud/openstack/nova/os/compute/CinderVolume.java b/src/main/java/org/dasein/cloud/openstack/nova/os/compute/CinderVolume.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/dasein/cloud/openstack/nova/os/compute/CinderVolume.java +++ b/src/main/java/org/dasein/cloud/openstack/nova/os/compute/CinderVolume.java @@ -270,11 +270,11 @@ public class CinderVolume extends AbstractVolumeSupport { list.add("xvdj"); } else { - list.add("/dev/sdf"); - list.add("/dev/sdg"); - list.add("/dev/sdh"); - list.add("/dev/sdi"); - list.add("/dev/sdj"); + list.add("/dev/vdf"); + list.add("/dev/vdg"); + list.add("/dev/vdh"); + list.add("/dev/vdi"); + list.add("/dev/vdj"); } return list; }
general tidy up loosely related to bugzid <I> bubble all cloud errors up to console remove info log message on <I> OK fix volume device ids to match those used by cloud
dasein-cloud_dasein-cloud-openstack
train
57feeb269bdade6bdb0cb887e1d520a0c18d41f5
diff --git a/source/php/Module/Posts/Posts.php b/source/php/Module/Posts/Posts.php index <HASH>..<HASH> 100644 --- a/source/php/Module/Posts/Posts.php +++ b/source/php/Module/Posts/Posts.php @@ -160,9 +160,9 @@ class Posts extends \Modularity\Module 'key' => 'group_' . md5('mod_posts_taxonomy_display'), 'title' => __('Taxonomy display', 'municipio'), 'fields' => array(), - 'location' => array ( - array ( - array ( + 'location' => array( + array( + array( 'param' => 'post_type', 'operator' => '==', 'value' => 'mod-posts', @@ -331,12 +331,28 @@ class Posts extends \Modularity\Module */ public function saveColumnFields($postId) { - if (!isset($_POST['modularity-mod-posts-expandable-list'])) { - delete_post_meta($postId, 'modularity-mod-posts-expandable-list'); + + //Meta key + $metaKey = "modularity-mod-posts-expandable-list"; + + //Bail early if autosave + if (defined('DOING_AUTOSAVE') && DOING_AUTOSAVE) { + return false; + } + + //Bail early if not a post request + if (!isset($_POST) ||(is_array($_POST) && empty($_POST)) ||!is_array($_POST)) { + return false; + } + + //Delete if not posted data + if (!isset($_POST[$metaKey])) { + delete_post_meta($postId, $metaKey); return; } - update_post_meta($postId, 'modularity-mod-posts-expandable-list', $_POST['modularity-mod-posts-expandable-list']); + //Save meta data + update_post_meta($postId, $metaKey, $_POST[$metaKey]); } /** @@ -400,7 +416,7 @@ class Posts extends \Modularity\Module public function columnFieldsMetaBoxContent($post, $args) { $fields = $args['args'][0]; - $fieldValues = get_post_meta( $post->ID, 'modularity-mod-posts-expandable-list', true); + $fieldValues = get_post_meta($post->ID, 'modularity-mod-posts-expandable-list', true); foreach ($fields as $field) { $fieldSlug = sanitize_title($field); @@ -424,7 +440,6 @@ class Posts extends \Modularity\Module $columns = array(); if (is_array($posts)) { - foreach ($posts as $post) { $values = get_field('posts_list_column_titles', $post); @@ -434,7 +449,6 @@ class Posts extends \Modularity\Module } } } - } return $columns; @@ -491,7 +505,6 @@ class Posts extends \Modularity\Module } return $posts; - } /** @@ -639,7 +652,9 @@ class Posts extends \Modularity\Module case 'manual': $getPostsArgs['post__in'] = $fields->posts_data_posts; - if ($orderby == 'false') $getPostsArgs['orderby'] = 'post__in'; + if ($orderby == 'false') { + $getPostsArgs['orderby'] = 'post__in'; + } break; }
Do some test on save_post before deleting or updating.
helsingborg-stad_Modularity
train
23a4814979a0f79112f4eeabe4a842377494c0c8
diff --git a/pysat/tests/test_meta.py b/pysat/tests/test_meta.py index <HASH>..<HASH> 100644 --- a/pysat/tests/test_meta.py +++ b/pysat/tests/test_meta.py @@ -1213,6 +1213,21 @@ class TestBasics(object): assert ('NEW2' == self.meta.var_case_name('NEW2')) return + def test_var_case_name_list_input(self): + """Test `meta.var_case_name` preserves the required output case.""" + + self.meta['new'] = {'units': 'hey', 'long_name': 'boo'} + self.meta['NEW2'] = {'units': 'hey2', 'long_name': 'boo2'} + + output = self.meta.var_case_name(['new2', 'nEw2', 'neW2', 'NEW2']) + target = ['NEW2'] * len(output) + assert np.all(target == output) + + output = self.meta.var_case_name(['new', 'nEw', 'neW', 'NEW']) + target = ['new'] * len(output) + assert np.all(target == output) + return + def test_get_attribute_name_case_preservation(self): """Test that meta labels and values preserve the input case."""
TST: Added test for list input to `var_case_name`
rstoneback_pysat
train
6726062e4801370505dfe7f5d0f58573fa7d4810
diff --git a/openpnm/algorithms/GenericTransport.py b/openpnm/algorithms/GenericTransport.py index <HASH>..<HASH> 100644 --- a/openpnm/algorithms/GenericTransport.py +++ b/openpnm/algorithms/GenericTransport.py @@ -279,22 +279,19 @@ class GenericTransport(GenericAlgorithm): mode = self._parse_mode(mode, allowed=['merge', 'overwrite'], single=True) self._set_BC(pores=pores, bctype='value', bcvalues=values, mode=mode) - def set_rate_BC(self, pores, values, mode='merge'): + def set_rate_BC(self, pores, rates, mode='merge'): r""" Apply constant rate boundary conditons to the specified locations. - This is similar to a Neumann boundary condition, but is - slightly different since it's the conductance multiplied by the - gradient, while Neumann conditions specify just the gradient. - Parameters ---------- pores : array_like The pore indices where the condition should be applied - values : scalar or array_like - The values of rate to apply in each pore. If a scalar is supplied - it is assigned to all locations, and if a vector is applied it - must be the same size as the indices given in ``pores``. + rates : scalar or array_like + The rates to apply in each pore. If a scalar is supplied + that rate is divided evenly among all locations, and if a vector + is supplied it must be the same size as the indices given in + ``pores`. mode : string, optional Controls how the boundary conditions are applied. Options are: @@ -311,8 +308,10 @@ class GenericTransport(GenericAlgorithm): The definition of ``quantity`` is specified in the algorithm's ``settings``, e.g. ``alg.settings['quantity'] = 'pore.pressure'``. """ + rates = np.array(rates) + bctype = 'total_rate' if rates.size == 1 else 'rate' mode = self._parse_mode(mode, allowed=['merge', 'overwrite'], single=True) - self._set_BC(pores=pores, bctype='rate', bcvalues=values, mode=mode) + self._set_BC(pores=pores, bctype=bctype, bcvalues=rates, mode=mode) @docstr.get_sectionsf( base='GenericTransport._set_BC', sections=['Parameters', 'Notes']) @@ -335,11 +334,16 @@ class GenericTransport(GenericAlgorithm): +-------------+--------------------------------------------------+ | 'rate' | Specify the flow rate into each location | +-------------+--------------------------------------------------+ + | 'total_rate'| Specify the total flow rate to be divided evenly | + | | among all locations | + +-------------+--------------------------------------------------+ bcvalues : int or array_like The boundary value to apply, such as concentration or rate. If - a single value is given, it's assumed to apply to all locations. - Different values can be applied to all pores in the form of an - array of the same length as ``pores``. + a single value is given, it's assumed to apply to all locations + unless the 'total_rate' bc_type is supplied whereby a single value + corresponds to a total rate to be divded evenly among all pores. + Otherwise, different values can be applied to all pores in the form + of an array of the same length as ``pores``. mode : string, optional Controls how the boundary conditions are applied. Options are: @@ -360,7 +364,8 @@ class GenericTransport(GenericAlgorithm): """ # Hijack the parse_mode function to verify bctype argument - bctype = self._parse_mode(bctype, allowed=['value', 'rate'], + bctype = self._parse_mode(bctype, + allowed=['value', 'rate', 'total_rate'], single=True) mode = self._parse_mode(mode, allowed=['merge', 'overwrite'], single=True) @@ -383,6 +388,8 @@ class GenericTransport(GenericAlgorithm): self['pore.bc_' + bctype] = np.nan # Store boundary values + if bctype == 'total_rate': + values = values/pores.size self['pore.bc_' + bctype][pores] = values def remove_BC(self, pores=None, bctype='all'):
added total_rate feature to set_rate_BC method
PMEAL_OpenPNM
train
31b644cbf39dda9da3bdc6329ba809ebf4bba8f6
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -435,7 +435,7 @@ function streamCompare(stream1, stream2, optionsOrCompare) { /** Handles stream end events. * - * @this !module:stream.Readable + * @this {!module:stream.Readable} * @private */ function endListener(state) { @@ -489,7 +489,7 @@ function streamCompare(stream1, stream2, optionsOrCompare) { * our guarantees. We call it as if it were to convey this behavior and to * avoid ESLint no-param-reassign. * - * @this !StreamState + * @this {!StreamState} * @param {*} data Data read from the stream for this StreamState. * @private */
Add braces around type for @this Necessary in closure mode.
kevinoid_stream-compare
train
4452bc6d70e0a55bbbe3fc6978ae9914f7bee9f5
diff --git a/src/EditorControls.js b/src/EditorControls.js index <HASH>..<HASH> 100644 --- a/src/EditorControls.js +++ b/src/EditorControls.js @@ -289,6 +289,26 @@ class EditorControls extends Component { } break; + case EDITOR_ACTIONS.MOVE_TO: + // checking is fromIndex and toIndex is a number because just checking + // if not there will not work as index can be 0 and that value is falsy + if (payload.path && !isNaN(payload.fromIndex) && !isNaN(payload.toIndex)) { + if (payload.path === 'data') { + const a = graphDiv.data[payload.fromIndex]; + const b = graphDiv.data[payload.toIndex]; + graphDiv.data.splice(payload.toIndex, 1, a); + graphDiv.data.splice(payload.fromIndex, 1, b); + } + if (this.props.onUpdate) { + this.props.onUpdate( + graphDiv.data.slice(), + graphDiv.layout, + graphDiv._transitionData._frames + ); + } + } + break; + default: throw new Error(this.localize('must specify an action type to handleEditorUpdate')); } diff --git a/src/components/containers/PlotlyFold.js b/src/components/containers/PlotlyFold.js index <HASH>..<HASH> 100644 --- a/src/components/containers/PlotlyFold.js +++ b/src/components/containers/PlotlyFold.js @@ -21,7 +21,7 @@ export class Fold extends Component { if (!this.foldVisible && !this.props.messageIfEmpty) { return null; } - const {deleteContainer} = this.context; + const {deleteContainer, moveContainer} = this.context; const { canDelete, children, @@ -79,7 +79,10 @@ export class Fold extends Component { onClick={e => { // prevents fold toggle to happen when clicking on moving arrow controls e.stopPropagation(); - alert('up'); + if (!moveContainer || typeof moveContainer !== 'function') { + throw new Error('moveContainer must be a function'); + } + moveContainer('up'); }} > {canMoveUp ? <AngleDownIcon /> : null} @@ -89,7 +92,10 @@ export class Fold extends Component { onClick={e => { // prevents fold toggle to happen when clicking on moving arrow controls e.stopPropagation(); - alert('down'); + if (!moveContainer || typeof moveContainer !== 'function') { + throw new Error('moveContainer must be a function'); + } + moveContainer('down'); }} > {canMoveDown ? <AngleDownIcon /> : null} @@ -205,6 +211,7 @@ PlotlyFold.plotly_editor_traits = { PlotlyFold.contextTypes = Object.assign( { deleteContainer: PropTypes.func, + moveContainer: PropTypes.func, }, containerConnectedContextTypes ); diff --git a/src/lib/connectTraceToPlot.js b/src/lib/connectTraceToPlot.js index <HASH>..<HASH> 100644 --- a/src/lib/connectTraceToPlot.js +++ b/src/lib/connectTraceToPlot.js @@ -19,6 +19,7 @@ export default function connectTraceToPlot(WrappedComponent) { this.deleteTrace = this.deleteTrace.bind(this); this.updateTrace = this.updateTrace.bind(this); + this.moveTrace = this.moveTrace.bind(this); this.setLocals(props, context); } @@ -40,6 +41,7 @@ export default function connectTraceToPlot(WrappedComponent) { : plotly.PlotSchema.getTraceValObject(fullTrace, nestedProperty({}, attr).parts), updateContainer: this.updateTrace, deleteContainer: this.deleteTrace, + moveContainer: this.moveTrace, container: trace, fullContainer: fullTrace, traceIndexes: this.props.traceIndexes, @@ -178,6 +180,19 @@ export default function connectTraceToPlot(WrappedComponent) { } } + moveTrace(direction) { + const traceIndex = this.props.traceIndexes[0]; + const desiredIndex = direction === 'up' ? traceIndex - 1 : traceIndex + 1; + this.context.onUpdate({ + type: EDITOR_ACTIONS.MOVE_TO, + payload: { + fromIndex: traceIndex, + toIndex: desiredIndex, + path: 'data', + }, + }); + } + render() { return <WrappedComponent name={this.name} icon={this.icon} {...this.props} />; } @@ -206,6 +221,7 @@ export default function connectTraceToPlot(WrappedComponent) { container: PropTypes.object, fullContainer: PropTypes.object, traceIndexes: PropTypes.array, + moveContainer: PropTypes.func, }; const {plotly_editor_traits} = WrappedComponent; diff --git a/src/lib/constants.js b/src/lib/constants.js index <HASH>..<HASH> 100644 --- a/src/lib/constants.js +++ b/src/lib/constants.js @@ -38,6 +38,7 @@ export const EDITOR_ACTIONS = { DELETE_IMAGE: 'plotly-editor-delete-image', DELETE_RANGESELECTOR: 'plotly-editor-delete-rangeselector', DELETE_TRANSFORM: 'plotly-editor-delete-transform', + MOVE_TO: 'plotly-editor-move-to', }; export const DEFAULT_FONTS = [
add ability to move folds up and down / can be extended to other accordions
plotly_react-chart-editor
train
d4b7be009c3cbae61b67a0306ff9f0d9d845bfe3
diff --git a/cmd/syncthing/main.go b/cmd/syncthing/main.go index <HASH>..<HASH> 100644 --- a/cmd/syncthing/main.go +++ b/cmd/syncthing/main.go @@ -720,7 +720,6 @@ func syncthingMain(runtimeOptions RuntimeOptions) { dbFile := locations[locDatabase] ldb, err := db.Open(dbFile) - if err != nil { l.Fatalln("Cannot open database:", err, "- Is another copy of Syncthing already running?") } @@ -746,12 +745,30 @@ func syncthingMain(runtimeOptions RuntimeOptions) { } } - if cfg.RawCopy().OriginalVersion == 15 { - // The config version 15->16 migration is about handling ignores and - // delta indexes and requires that we drop existing indexes that - // have been incorrectly ignore filtered. + // Grab the previously running version string from the database. + + miscDB := db.NewNamespacedKV(ldb, string(db.KeyTypeMiscData)) + prevVersion, _ := miscDB.String("prevVersion") + + // Strip away prerelease/beta stuff and just compare the release + // numbers. 0.14.44 to 0.14.45-banana is an upgrade, 0.14.45-banana to + // 0.14.45-pineapple is not. + + prevParts := strings.Split(prevVersion, "-") + curParts := strings.Split(Version, "-") + if prevParts[0] != curParts[0] { + if prevVersion != "" { + l.Infoln("Detected upgrade from", prevVersion, "to", Version) + } + + // Drop delta indexes in case we've changed random stuff we + // shouldn't have. ldb.DropDeltaIndexIDs() + + // Remember the new version. + miscDB.PutString("prevVersion", Version) } + if cfg.RawCopy().OriginalVersion < 19 { // Converts old symlink types to new in the entire database. ldb.ConvertSymlinkTypes() diff --git a/lib/db/leveldb.go b/lib/db/leveldb.go index <HASH>..<HASH> 100644 --- a/lib/db/leveldb.go +++ b/lib/db/leveldb.go @@ -26,6 +26,7 @@ const ( KeyTypeDeviceIdx KeyTypeIndexID KeyTypeFolderMeta + KeyTypeMiscData ) func (l VersionList) String() string {
cmd/syncthing: Reset delta indexes on upgrade
syncthing_syncthing
train
fdd9bd8f7acdb3c5b43f08e2df4e594cab5ca88a
diff --git a/lib/ronin/ui/output/handler.rb b/lib/ronin/ui/output/handler.rb index <HASH>..<HASH> 100644 --- a/lib/ronin/ui/output/handler.rb +++ b/lib/ronin/ui/output/handler.rb @@ -24,6 +24,13 @@ module Ronin module Output module Handler # + # Prints the given _messages_. + # + def self.puts(*messages) + STDOUT.puts(*messages) + end + + # # Prints the given _messages_ as info diagnostics. # def self.print_info(*messages) diff --git a/lib/ronin/ui/output/output.rb b/lib/ronin/ui/output/output.rb index <HASH>..<HASH> 100644 --- a/lib/ronin/ui/output/output.rb +++ b/lib/ronin/ui/output/output.rb @@ -72,6 +72,13 @@ module Ronin protected # + # Prints the given _messages_. + # + def puts(*messages) + Output.handler.puts(*messages) + end + + # # Prints the given _messages_ as info diagnostics. # def print_info(*messages)
Added Output#puts and Handler.puts.
ronin-ruby_ronin
train
524d21da14ca8f36e7afa8bd3c5a74612bd35985
diff --git a/lib/rack/timeout.rb b/lib/rack/timeout.rb index <HASH>..<HASH> 100644 --- a/lib/rack/timeout.rb +++ b/lib/rack/timeout.rb @@ -1,5 +1,6 @@ # encoding: utf-8 require 'timeout' +require 'securerandom' module Rack class Timeout @@ -7,6 +8,11 @@ module Rack class RequestTooOldError < Error; end class RequestAbortedError < Error; end + RequestData = Struct.new(:id, :age, :timeout, :duration, :state) + ENV_INFO_KEY = 'rack-timeout.info' + FINAL_STATES = [:dropped, :aborted, :completed] + MAX_REQUEST_AGE = 30 # seconds + @timeout = 15 class << self attr_accessor :timeout @@ -16,47 +22,42 @@ module Rack @app = app end - MAX_REQUEST_AGE = 30 # seconds def call(env) + info = env[ENV_INFO_KEY] ||= RequestData.new + info.id ||= env['HTTP_HEROKU_REQUEST_ID'] || SecureRandom.hex request_start = env['HTTP_X_REQUEST_START'] # unix timestamp in ms request_start = Time.at(request_start.to_i / 1000) if request_start - request_age = Time.now - request_start if request_start - time_left = MAX_REQUEST_AGE - request_age if request_age - timeout = [self.class.timeout, time_left].compact.min - - env['rack-timeout.request-age'] = request_age - env['rack-timeout.timeout'] = timeout if timeout > 0 + info.age = Time.now - request_start if request_start + time_left = MAX_REQUEST_AGE - info.age if info.age + info.timeout = [self.class.timeout, time_left].compact.select { |n| n >= 0 }.min - if timeout <= 0 - Rack::Timeout.set_state_and_log! env, :dropped + if time_left && time_left <= 0 + Rack::Timeout.set_state_and_log! info, :dropped raise RequestTooOldError end - Rack::Timeout.set_state_and_log! env, :ready - ::Timeout.timeout(timeout, RequestAbortedError) do - ready_time = Time.now - response = @app.call(env) - env['rack-timeout.duration'] = Time.now - ready_time - Rack::Timeout.set_state_and_log! env, :completed + Rack::Timeout.set_state_and_log! info, :ready + ::Timeout.timeout(info.timeout, RequestAbortedError) do + ready_time = Time.now + response = @app.call(env) + info.duration = Time.now - ready_time + Rack::Timeout.set_state_and_log! info, :completed response end end - FINAL_STATES = [:dropped, :aborted, :completed] - def self.set_state_and_log!(env, state) - env["rack-timeout.state"] = state unless FINAL_STATES.include? env["rack-timeout.state"] - - id, state = env.values_at(*%w[ HTTP_HEROKU_REQUEST_ID rack-timeout.state ]) - age, timeout, duration = env.values_at(*%w[ rack-timeout.request-age rack-timeout.timeout rack-timeout.duration ]) - .map { |s| "%.fms" % (s * 1000) if s } - - s = "rack-timeout:" - s << " id=" << id if id - s << " age=" << age if age - s << " timeout=" << timeout if timeout - s << " duration=" << duration if duration - s << " state=" << state.to_s if state + def self.set_state_and_log!(info, state) + return if FINAL_STATES.include? info.state + info.state = state + ms = ->(s) { "%.fms" % (s * 1000) } + s = 'rack-timeout:' + s << ' id=' << info.id if info.id + s << ' age=' << ms[info.age] if info.age + s << ' timeout=' << ms[info.timeout] if info.timeout + s << ' duration=' << ms[info.duration] if info.duration + s << ' state=' << info.state.to_s if info.state s << "\n" + $stderr << s end @@ -68,7 +69,7 @@ module Rack def call(env) @app.call(env) rescue Rack::Timeout::RequestAbortedError - Rack::Timeout.set_state_and_log!(env, :aborted) + Rack::Timeout.set_state_and_log!(env[ENV_INFO_KEY], :aborted) raise end end
a bunch more refactoring; notable changes: - don't log a new if state is not altered - generate our own request id when HTTP_HEROKU_REQUEST_ID is not present - use a RequestData struct to contain our env stuff instead of various rack-timeout.key-name keys in env
heroku_rack-timeout
train
fd3af4b13a837aa45f804ab1901624ddb3a422f3
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -39,6 +39,12 @@ module.exports = function(grunt) { jasmine_node_watch: { command: './node_modules/jasmine-node/bin/jasmine-node --color --autotest tests/' } + }, + + githooks: { + all: { + 'pre-commit': 'jshint shell:jasmine_node' + } } }); @@ -50,6 +56,7 @@ module.exports = function(grunt) { require('load-grunt-tasks')(grunt); // Register tasks. + grunt.registerTask('setup', ['githooks']); grunt.registerTask('test', ['shell:jasmine_node']); grunt.registerTask('test:watch', ['shell:jasmine_node_watch']); grunt.registerTask('default', ['test:watch']);
Configured grunt-githooks
rmariuzzo_grunt-fb-flo
train
849897444d1ad4f714e9a65f127ffa06ba2a6ac6
diff --git a/internetarchive/cli/ia_search.py b/internetarchive/cli/ia_search.py index <HASH>..<HASH> 100644 --- a/internetarchive/cli/ia_search.py +++ b/internetarchive/cli/ia_search.py @@ -33,7 +33,7 @@ options: -i, --itemlist Output identifiers only. -f, --field=<field>... Metadata fields to return. -n, --num-found Print the number of results to stdout. - -t, --timeout=<seconds> Set the timeout in seconds [default: 24]. + -t, --timeout=<seconds> Set the timeout in seconds [default: 300]. """ from __future__ import absolute_import, print_function, unicode_literals import sys diff --git a/internetarchive/search.py b/internetarchive/search.py index <HASH>..<HASH> 100644 --- a/internetarchive/search.py +++ b/internetarchive/search.py @@ -92,7 +92,7 @@ class Search(object): # Set timeout. if 'timeout' not in self.request_kwargs: - self.request_kwargs['timeout'] = 24 + self.request_kwargs['timeout'] = 300 # Set retries. self.session.mount_http_adapter(max_retries=self.max_retries)
Increased search timeout to <I> seconds
jjjake_internetarchive
train
82cdf2447978b90c88e77573ac6d561ef851e051
diff --git a/lib/rules/globals.js b/lib/rules/globals.js index <HASH>..<HASH> 100644 --- a/lib/rules/globals.js +++ b/lib/rules/globals.js @@ -10,13 +10,13 @@ // ----------------------------------------------------------------------------- import {Variable} from 'escope' -import {CLIENT, SERVER, UNIVERSAL} from '../util/environment' +import {NON_METEOR, UNIVERSAL, CLIENT, SERVER} from '../util/environment' import globalsExportedByPackages from '../util/data/globalsExportedByPackages' import getExecutorsFromComments from '../util/executors/getExecutorsFromComments' module.exports = getMeta => context => { - const {isLintedEnv, env} = getMeta(context.getFilename()) + const {env} = getMeta(context.getFilename()) // --------------------------------------------------------------------------- // Helpers @@ -42,7 +42,7 @@ module.exports = getMeta => context => { // Public // --------------------------------------------------------------------------- - if (!isLintedEnv) { + if (env === NON_METEOR) { return {} } diff --git a/tests/lib/rules/globals.js b/tests/lib/rules/globals.js index <HASH>..<HASH> 100644 --- a/tests/lib/rules/globals.js +++ b/tests/lib/rules/globals.js @@ -9,7 +9,7 @@ // Requirements // ----------------------------------------------------------------------------- -import {SERVER, PACKAGE} from '../../../dist/util/environment' +import {SERVER, PACKAGE, NON_METEOR} from '../../../dist/util/environment' const rule = require('../../../dist/rules/globals') const RuleTester = require('eslint').RuleTester @@ -19,14 +19,14 @@ const RuleTester = require('eslint').RuleTester // ----------------------------------------------------------------------------- const ruleTester = new RuleTester() -ruleTester.run('globals', rule(() => ({env: SERVER, isLintedEnv: true})), { +ruleTester.run('globals', rule(() => ({env: SERVER})), { valid: ['Session.set("hi", true)'], invalid: [] }) -ruleTester.run('globals', rule(() => ({env: PACKAGE, isLintedEnv: true})), { +ruleTester.run('globals', rule(() => ({env: PACKAGE})), { valid: [ ` @@ -46,7 +46,7 @@ ruleTester.run('globals', rule(() => ({env: PACKAGE, isLintedEnv: true})), { }) -ruleTester.run('globals', rule(() => ({env: SERVER, isLintedEnv: false})), { +ruleTester.run('globals', rule(() => ({env: NON_METEOR})), { valid: ['Session.set("hi", true)'], invalid: [] })
fix(globals): Set globals in envs other than client and server
dferber90_eslint-plugin-meteor
train
22436555a7d8c5fa207f1f5500451d8e96986315
diff --git a/helper/schema/resource_data_test.go b/helper/schema/resource_data_test.go index <HASH>..<HASH> 100644 --- a/helper/schema/resource_data_test.go +++ b/helper/schema/resource_data_test.go @@ -2258,6 +2258,37 @@ func TestResourceDataState(t *testing.T) { Attributes: map[string]string{}, }, }, + + // #22 + { + Schema: map[string]*Schema{ + "foo": &Schema{ + Type: TypeString, + Optional: true, + Computed: true, + }, + }, + + State: nil, + + Diff: &terraform.InstanceDiff{ + Attributes: map[string]*terraform.ResourceAttrDiff{ + "foo": &terraform.ResourceAttrDiff{ + NewComputed: true, + }, + }, + }, + + Set: map[string]interface{}{ + "foo": "bar", + }, + + Result: &terraform.InstanceState{ + Attributes: map[string]string{ + "foo": "bar", + }, + }, + }, } for i, tc := range cases {
helper/schema: test setting computed value and retrieving it via state
hashicorp_terraform
train
af0891365becb9ae73a7cb2450f75901c490b358
diff --git a/contribs/gmf/src/services/permalink.js b/contribs/gmf/src/services/permalink.js index <HASH>..<HASH> 100644 --- a/contribs/gmf/src/services/permalink.js +++ b/contribs/gmf/src/services/permalink.js @@ -1,6 +1,7 @@ goog.provide('gmf.Permalink'); goog.require('gmf'); +goog.require('ngeo'); goog.require('ngeo.AutoProjection'); goog.require('gmf.Themes'); goog.require('gmf.TreeManager'); @@ -51,6 +52,19 @@ gmf.module.value('gmfPermalinkOptions', /** @type {gmfx.PermalinkOptions} */ ({})); +/** Configure the ngeo state manager */ +(function() { + var regexp = []; + for (const key in gmf.PermalinkParamPrefix) { + regexp.push(new RegExp(gmf.PermalinkParamPrefix[key] + '.*')); + } + for (const key in gmf.PermalinkParam) { + regexp.push(new RegExp(gmf.PermalinkParamPrefix[key])); + } + ngeo.module.value('ngeoUsedKeyRegexp', regexp); +})(); + + /** * The Permalink service for GMF, which uses the `ngeo.StateManager` to manage * the GMF application state. Here's the list of states are are managed: diff --git a/src/services/statemanager.js b/src/services/statemanager.js index <HASH>..<HASH> 100644 --- a/src/services/statemanager.js +++ b/src/services/statemanager.js @@ -6,15 +6,19 @@ goog.require('ngeo'); goog.require('ngeo.Location'); +ngeo.module.value('ngeoUsedKeyRegexp', [new RegExp('.*')]); + + /** * Provides a service for managing the application state. * The application state is written to both the URL and the local storage. * @constructor * @struct - * @param {ngeo.Location} ngeoLocation ngeo location service. + * @param {!ngeo.Location} ngeoLocation ngeo location service. + * @param {!Array.<!RegExp>} ngeoUsedKeyRegexp regexp used to identify the used keys. * @ngInject */ -ngeo.StateManager = function(ngeoLocation) { +ngeo.StateManager = function(ngeoLocation, ngeoUsedKeyRegexp) { /** * Object representing the application's initial state. @@ -23,7 +27,7 @@ ngeo.StateManager = function(ngeoLocation) { this.initialState = {}; /** - * @type {ngeo.Location} + * @type {!ngeo.Location} */ this.ngeoLocation = ngeoLocation; @@ -32,6 +36,11 @@ ngeo.StateManager = function(ngeoLocation) { */ this.localStorage = new goog.storage.mechanism.HTML5LocalStorage(); + /** + * @type {!Array.<!RegExp>} + */ + this.usedKeyRegexp = ngeoUsedKeyRegexp; + /** * @type {Array.<string>} @@ -43,7 +52,7 @@ ngeo.StateManager = function(ngeoLocation) { // is no state in the location URL. var paramKeys = ngeoLocation.getParamKeys(); - var i, key, theme; + var i, theme; var themeRegex = new RegExp(/\/theme\/([^\?\/]*)/); var urlPath = ngeoLocation.getPath(); var locationInitState = {}; @@ -53,21 +62,35 @@ ngeo.StateManager = function(ngeoLocation) { if (this.localStorage.isAvailable()) { var count = this.localStorage.getCount(); for (i = 0; i < count; ++i) { - key = this.localStorage.key(i); + var key = this.localStorage.key(i); goog.asserts.assert(key !== null); - this.initialState[key] = this.getItemFromLocalStorage_(key); - //Do not copy excluded parameters in the URL - if (this.excludedKeyListForURL.indexOf(key) < 0) { - locationInitState[key] = this.initialState[key]; + for (const keyRegexp of this.usedKeyRegexp) { + if (key.match(keyRegexp)) { + var value = this.localStorage.get(key); + goog.asserts.assert(value !== null); + this.initialState[key] = value; + + //Do not copy excluded parameters in the URL + if (this.excludedKeyListForURL.indexOf(key) < 0) { + locationInitState[key] = this.initialState[key]; + } + break; + } } } this.ngeoLocation.updateParams(locationInitState); } } else { - for (i = 0; i < paramKeys.length; ++i) { - key = paramKeys[i]; - this.initialState[key] = this.getItemFromLocation_(key); + for (const key of paramKeys) { + for (const keyRegexp of this.usedKeyRegexp) { + if (key.match(keyRegexp)) { + var value = this.ngeoLocation.getParam(key); + goog.asserts.assert(value !== null); + this.initialState[key] = value; + break; + } + } } //Retrieve selected theme in url path theme = urlPath.match(themeRegex);
Don't add unknown keys in the permalink
camptocamp_ngeo
train
97139fb44d3c87b68567cd4a9c7291bf8b672420
diff --git a/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/MultiTenancyDecisionTaskTest.java b/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/MultiTenancyDecisionTaskTest.java index <HASH>..<HASH> 100644 --- a/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/MultiTenancyDecisionTaskTest.java +++ b/engine/src/test/java/org/camunda/bpm/engine/test/api/multitenancy/MultiTenancyDecisionTaskTest.java @@ -18,9 +18,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import org.camunda.bpm.engine.ProcessEngineException; -import org.camunda.bpm.engine.history.HistoricDecisionInstanceQuery; import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase; -import org.camunda.bpm.engine.repository.DecisionDefinition; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.model.bpmn.Bpmn; import org.camunda.bpm.model.bpmn.BpmnModelInstance; @@ -104,7 +102,7 @@ public class MultiTenancyDecisionTaskTest extends PluggableProcessEngineTestCase deploymentForTenant(TENANT_ONE, DMN_FILE, process); - deploymentForTenant(TENANT_TWO, DMN_FILE_VERSION_TWO, process); + deploymentForTenant(TENANT_TWO, DMN_FILE, process); deploymentForTenant(TENANT_TWO, DMN_FILE_VERSION_TWO); ProcessInstance processInstanceOne = runtimeService.createProcessInstanceByKey("process") @@ -117,17 +115,6 @@ public class MultiTenancyDecisionTaskTest extends PluggableProcessEngineTestCase assertThat((String)runtimeService.getVariable(processInstanceOne.getId(), "decisionVar"), is("A")); assertThat((String)runtimeService.getVariable(processInstanceTwo.getId(), "decisionVar"), is("C")); - - // check whether DMN_FILE_VERSION_TWO version 2 is really used - DecisionDefinition latestDecisionDefinitionTenantTwo = repositoryService.createDecisionDefinitionQuery() - .tenantIdIn(TENANT_TWO).decisionDefinitionKey("decision").latestVersion().singleResult(); - - HistoricDecisionInstanceQuery decisionInstanceQuery = historyService.createHistoricDecisionInstanceQuery() - .tenantIdIn(TENANT_TWO).decisionDefinitionId(latestDecisionDefinitionTenantTwo.getId()).includeOutputs(); - - assertThat(decisionInstanceQuery.singleResult().getOutputs().size(), is(1)); - assertThat((String)decisionInstanceQuery.singleResult().getOutputs().iterator().next().getValue(), is("C")); - } public void testEvaluateDecisionTaskWithVersionBinding() {
fix(test): make test case runnable for all history levels related to #CAM-<I>
camunda_camunda-bpm-platform
train
dea9035d9b72d2939a1e7904f98b7472c7e2fb47
diff --git a/src/Surfnet/StepupMiddlewareClient/Identity/Dto/RaListingSearchQuery.php b/src/Surfnet/StepupMiddlewareClient/Identity/Dto/RaListingSearchQuery.php index <HASH>..<HASH> 100644 --- a/src/Surfnet/StepupMiddlewareClient/Identity/Dto/RaListingSearchQuery.php +++ b/src/Surfnet/StepupMiddlewareClient/Identity/Dto/RaListingSearchQuery.php @@ -58,19 +58,6 @@ final class RaListingSearchQuery implements HttpQuery } /** - * @param string $institution - * @return RaListingSearchQuery - */ - public function setInstitution($institution) - { - $this->assertNonEmptyString($institution, 'institution'); - - $this->institution = $institution; - - return $this; - } - - /** * @param string $orderBy * @return RaListingSearchQuery */
Revert ability to set Institution on RaListingSearchQeury
OpenConext_Stepup-Middleware-clientbundle
train
85a148ecb0e4046b296351ac9cb0b0293894a5ee
diff --git a/Kwf/Controller/Action/Cli/Web/ProcessControlController.php b/Kwf/Controller/Action/Cli/Web/ProcessControlController.php index <HASH>..<HASH> 100644 --- a/Kwf/Controller/Action/Cli/Web/ProcessControlController.php +++ b/Kwf/Controller/Action/Cli/Web/ProcessControlController.php @@ -209,11 +209,11 @@ class Kwf_Controller_Action_Cli_Web_ProcessControlController extends Kwf_Control $killed[] = $p['pid']; } else { if (!$this->_getParam('silent')) echo "kill $p[pid] $p[cmd] $p[args]\n"; - posix_kill($p['pid'], SIGTERM); + system("kill $p[pid]"); $killed[] = $p['pid']; foreach ($p['childPIds'] as $pid) { if (!$this->_getParam('silent')) echo " kill child process $pid\n"; - posix_kill($pid, SIGTERM); + system("kill $pid"); $killed[] = $pid; } }
don't use posix_kill to have less dependencies on loaded php module
koala-framework_koala-framework
train
eb84b79d9e4fcc7271c2e6dd7a7902dec16dc560
diff --git a/internals/states.py b/internals/states.py index <HASH>..<HASH> 100644 --- a/internals/states.py +++ b/internals/states.py @@ -1344,7 +1344,7 @@ class HSMMIntNegBinVariantSubHMMsStates(HSMMStatesIntegerNegativeBinomialVariant # TODO something with temperature self._remove_substates_from_subHMMs() alphan = self.messages_forwards_normalized() - self.hmm_sample_backwards_normalized(alphan) + self.sample_backwards_normalized(alphan) def _map_states(self): # NOTE: "big" stateseq includes substates and duration pseudostates
using sparse backwards sampling checked never instantiating dense matrix
mattjj_pyhsmm
train
c65081e4bdea6302f783e4d621a5b2fbb1a9592c
diff --git a/app/models/post.rb b/app/models/post.rb index <HASH>..<HASH> 100644 --- a/app/models/post.rb +++ b/app/models/post.rb @@ -51,7 +51,7 @@ class Post < ActiveRecord::Base end def find_all_associated_media - find_media_from_body << self.featured_media + find_media_from_body.push(self.featured_media).compact.uniq end def find_media_from_body diff --git a/db/schema.rb b/db/schema.rb index <HASH>..<HASH> 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -136,7 +136,8 @@ ActiveRecord::Schema.define(version: 20140514042944) do add_index "taggings", ["tag_id", "taggable_id", "taggable_type", "context", "tagger_id", "tagger_type"], name: "taggings_idx", unique: true, using: :btree create_table "tags", force: true do |t| - t.string "name" + t.string "name" + t.integer "taggings_count", default: 0 end add_index "tags", ["name"], name: "index_tags_on_name", unique: true, using: :btree
Compact/ensure uniqueness of associated media array
cortex-cms_cortex
train
82b48dff77fda170a3a7e11f9287bc53ce95f830
diff --git a/simpleclient/src/main/java/io/prometheus/client/CollectorRegistry.java b/simpleclient/src/main/java/io/prometheus/client/CollectorRegistry.java index <HASH>..<HASH> 100644 --- a/simpleclient/src/main/java/io/prometheus/client/CollectorRegistry.java +++ b/simpleclient/src/main/java/io/prometheus/client/CollectorRegistry.java @@ -189,10 +189,19 @@ public class CollectorRegistry { } private Collector.MetricFamilySamples filter(Collector.MetricFamilySamples next) { - if (includedNames.isEmpty() || includedNames.contains(next.name)) { + if (includedNames.isEmpty()) { return next; } else { - return null; + Iterator<Collector.MetricFamilySamples.Sample> it = next.samples.iterator(); + while (it.hasNext()) { + if (!includedNames.contains(it.next().name)) { + it.remove(); + } + } + if (next.samples.size() == 0) { + return null; + } + return next; } } diff --git a/simpleclient/src/test/java/io/prometheus/client/CollectorRegistryTest.java b/simpleclient/src/test/java/io/prometheus/client/CollectorRegistryTest.java index <HASH>..<HASH> 100644 --- a/simpleclient/src/test/java/io/prometheus/client/CollectorRegistryTest.java +++ b/simpleclient/src/test/java/io/prometheus/client/CollectorRegistryTest.java @@ -80,15 +80,20 @@ public class CollectorRegistryTest { Collector ec = new EmptyCollector().register(registry); SkippedCollector sr = new SkippedCollector().register(registry); PartiallyFilterCollector pfr = new PartiallyFilterCollector().register(registry); - HashSet<String> names = new HashSet<String>(); + HashSet<String> metrics = new HashSet<String>(); + HashSet<String> series = new HashSet<String>(); for (Collector.MetricFamilySamples metricFamilySamples : Collections.list(registry.filteredMetricFamilySamples( - new HashSet<String>(Arrays.asList("", "s", "c", "part_filter_a", "part_filter_c"))))) { - names.add(metricFamilySamples.name); + new HashSet<String>(Arrays.asList("", "s_sum", "c", "part_filter_a", "part_filter_c"))))) { + metrics.add(metricFamilySamples.name); + for (Collector.MetricFamilySamples.Sample sample : metricFamilySamples.samples) { + series.add(sample.name); + } } assertEquals(1, sr.collectCallCount); assertEquals(2, pfr.collectCallCount); - assertEquals(new HashSet<String>(Arrays.asList("s", "c", "part_filter_a", "part_filter_c")), names); + assertEquals(new HashSet<String>(Arrays.asList("s", "c", "part_filter_a", "part_filter_c")), metrics); + assertEquals(new HashSet<String>(Arrays.asList("s_sum", "c", "part_filter_a", "part_filter_c")), series); } @Test
Filter based on time series, not metric.
prometheus_client_java
train
603bb6c8786137e877d2f2eb2e52c4d019f9d4c6
diff --git a/system/src/Grav/Framework/Flex/FlexObject.php b/system/src/Grav/Framework/Flex/FlexObject.php index <HASH>..<HASH> 100644 --- a/system/src/Grav/Framework/Flex/FlexObject.php +++ b/system/src/Grav/Framework/Flex/FlexObject.php @@ -618,12 +618,24 @@ class FlexObject implements FlexObjectInterface, FlexAuthorizeInterface $this->triggerEvent('onBeforeSave'); $storage = $this->getFlexDirectory()->getStorage(); - - $key = $this->getStorageKey() ?: '@@' . spl_object_hash($this); $meta = $this->getMetaData(); /** @var string|null $origKey */ $origKey = $meta['storage_key'] ?? null; + $storageKey = $this->getStorageKey() ?: '@@' . spl_object_hash($this); + + if (method_exists($storage, 'parseKey')) { + if (null !== $origKey) { + $origParts =$storage->parseKey($origKey); + $origKey = $origParts['key']; + + } + $keyParts = $storage->parseKey($storageKey); + $key = $keyParts['key']; + } else { + $key = $storageKey; + } + if (null !== $origKey && $key !== $origKey) { if (!empty($meta['copy'])) { $storage->copyRow($origKey, $key); @@ -632,7 +644,7 @@ class FlexObject implements FlexObjectInterface, FlexAuthorizeInterface } } - $result = $storage->replaceRows([$key => $this->prepareStorage()]); + $result = $storage->replaceRows([$storageKey => $this->prepareStorage()]); $value = reset($result); $meta = $value['__META'] ?? null; @@ -652,7 +664,7 @@ class FlexObject implements FlexObjectInterface, FlexAuthorizeInterface // Make sure that the object exists before continuing (just in case). if (!$this->exists()) { - throw new \RuntimeException('Saving failed: Object does not exist!'); + throw new \RuntimeException('Save failed: Object does not exist!'); } if (method_exists($this, 'saveUpdatedMedia')) { diff --git a/system/src/Grav/Framework/Flex/Storage/FolderStorage.php b/system/src/Grav/Framework/Flex/Storage/FolderStorage.php index <HASH>..<HASH> 100644 --- a/system/src/Grav/Framework/Flex/Storage/FolderStorage.php +++ b/system/src/Grav/Framework/Flex/Storage/FolderStorage.php @@ -254,10 +254,11 @@ class FolderStorage extends AbstractFilesystemStorage if (null === $key || $key === '') { $path = $this->dataFolder; } else { + $parts = $this->parseKey($key, false); $options = [ $this->dataFolder, // {FOLDER} - $key, // {KEY} - \mb_substr($key, 0, 2), // {KEY:2} + $parts['key'], // {KEY} + $parts['key:2'], // {KEY:2} '***', // {FILE} '***' // {EXT} ]; @@ -285,11 +286,12 @@ class FolderStorage extends AbstractFilesystemStorage */ public function getPathFromKey(string $key): string { + $parts = $this->parseKey($key); $options = [ $this->dataFolder, // {FOLDER} - $key, // {KEY} - \mb_substr($key, 0, 2), // {KEY:2} - $this->dataFile, // {FILE} + $parts['key'], // {KEY} + $parts['key:2'], // {KEY:2} + $parts['file'], // {FILE} $this->dataExt // {EXT} ]; @@ -297,6 +299,24 @@ class FolderStorage extends AbstractFilesystemStorage } /** + * @param string $key + * @param bool $variations + * @return array + */ + public function parseKey(string $key, bool $variations = true): array + { + $keys = [ + 'key' => $key, + 'key:2' => \mb_substr($key, 0, 2), + ]; + if ($variations) { + $keys['file'] = $this->dataFile; + } + + return $keys; + } + + /** * Get key from the filesystem path. * * @param string $path diff --git a/system/src/Grav/Framework/Flex/Storage/SimpleStorage.php b/system/src/Grav/Framework/Flex/Storage/SimpleStorage.php index <HASH>..<HASH> 100644 --- a/system/src/Grav/Framework/Flex/Storage/SimpleStorage.php +++ b/system/src/Grav/Framework/Flex/Storage/SimpleStorage.php @@ -316,7 +316,21 @@ class SimpleStorage extends AbstractFilesystemStorage */ public function getMediaPath(string $key = null): string { - return sprintf('%s/%s/%s', $this->dataFolder, basename($this->dataPattern, $this->dataFormatter->getDefaultFileExtension()), $key); + $parts = $this->parseKey($key); + + return sprintf('%s/%s/%s', $this->dataFolder, basename($this->dataPattern, $this->dataFormatter->getDefaultFileExtension()), $parts['key']); + } + + /** + * @param string $key + * @param bool $variations + * @return array + */ + public function parseKey(string $key, bool $variations = true): array + { + return [ + 'key' => $key, + ]; } protected function save(): void
Make it possible to save multiple variations of the Flex Object
getgrav_grav
train
7cb9d08b7f73cc95271758f098c9230eb71f987b
diff --git a/packages/maniajs-jukebox/src/jukebox.js b/packages/maniajs-jukebox/src/jukebox.js index <HASH>..<HASH> 100644 --- a/packages/maniajs-jukebox/src/jukebox.js +++ b/packages/maniajs-jukebox/src/jukebox.js @@ -59,6 +59,88 @@ module.exports.default = class Jukebox { }); } + /** + * Remove map from jukebox. + * @param {string} map Map UID. + * @return {boolean} + */ + remove (map) { + let count = this.jukebox.length; + this.jukebox = this.jukebox.filter((jukeMap) => { + return jukeMap.uid !== map; + }); + return this.jukebox.length !== count; + } + + /** + * Clear Jukebox. + * @return {boolean} + */ + clear () { + this.jukebox = []; + return true; + } + + /** + * List jukebox. + * @param {Player} player + * @param params + */ + list (player, params) { + let cols = [ + { + name: 'Name', + field: 'name', + width: 100, + level: 0 + }, + { + name: 'Author', + field: 'author', + width: 40, + level: 0 + }, + { + name: 'Jukeboxed by', + field: 'jukeAuthor', + width: 40, + level: 0 + }, + { + name: '', + field: 'canRemove', + width: 5, + button: true, + event: 'remove' + } + ]; + let data = this.getListData(player); + let list = this.app.ui.list('Maps on the server', player.login, cols, data); + list.display(); + + list.on('remove', (entry) => { + if (this.remove (entry.entry.uid)) { + this.plugin.server.send().chat(`$c70$<$fff${player.nickname}$>$c70 removed map $c70$<$fff${entry.entry.name}$>$c70 from the jukebox!`).exec(); + list.close(); + this.list(player, []); + } + }); + } + + getListData(player) { + let data = []; + this.jukebox.forEach((juke) => { + let row = { + uid: juke.uid, + name: juke.name, + author: juke.author, + jukeAuthor: juke.jukeAuthor + }; + if (juke.jukeAuthor === player.nickname || player.level > 1) row.canRemove = true; + data.push(row); + }); + return data; + } /** * Check if map is already in jukebox. diff --git a/packages/maniajs-jukebox/src/plugin.js b/packages/maniajs-jukebox/src/plugin.js index <HASH>..<HASH> 100644 --- a/packages/maniajs-jukebox/src/plugin.js +++ b/packages/maniajs-jukebox/src/plugin.js @@ -46,7 +46,29 @@ module.exports.default = class extends Plugin { this.maplist.display(player, params); }); + // List command. + this.server.command.on('jukebox', 0, (playerObject, params) => { + if (! params.length) { + return this.server.send().chat('$fffUsage: /jukebox [$eeelist, clear$fff]', {destination: playerObject.login}).exec(); + } + let player = this.players.list[playerObject.login]; + switch (params.shift()) { + case 'list': + this.jukebox.list(player, params); + break; + case 'clear': + if (playerObject.level > 2) { + this.jukebox.clear(); + this.server.send().chat(`$c70$<$fff${playerObject.nickname}$>$c70 cleared the jukebox!`).exec(); + } else { + this.server.send().chat('$fffYou don\'t have the right permission to use this command!', {destination: playerObject.login}).exec(); + } + break; + default: + return this.server.send().chat('$fffUsage: /jukebox [$eeelist, clear$fff]', {destination: playerObject.login}).exec(); + } + }); this.server.on('match.end', (params) => { this.jukebox.endmap(params);
Adding remove, list and clear commands.
ManiaJS_plugins
train
48459e9082e9477ca783ae920388671363e09761
diff --git a/src/Symfony/Component/Config/Definition/ArrayNode.php b/src/Symfony/Component/Config/Definition/ArrayNode.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Config/Definition/ArrayNode.php +++ b/src/Symfony/Component/Config/Definition/ArrayNode.php @@ -28,7 +28,7 @@ class ArrayNode extends BaseNode implements PrototypeNodeInterface protected $children; protected $prototype; protected $keyAttribute; - protected $keyAttributeIsRemoved; + protected $removeKeyAttribute; protected $allowFalse; protected $allowNewKeys; protected $addIfNotSet; @@ -49,7 +49,7 @@ class ArrayNode extends BaseNode implements PrototypeNodeInterface $this->children = array(); $this->xmlRemappings = array(); - $this->keyAttributeIsRemoved = true; + $this->removeKeyAttribute = true; $this->allowFalse = false; $this->addIfNotSet = false; $this->allowNewKeys = true; @@ -115,9 +115,9 @@ class ArrayNode extends BaseNode implements PrototypeNodeInterface * * @param Boolean $remove Whether or not the key attribute should be removed. */ - public function setKeyAttributeIsRemoved($remove) + public function setRemoveKeyAttribute($remove) { - $this->keyAttributeIsRemoved = $remove; + $this->removeKeyAttribute = $remove; } /** @@ -394,7 +394,7 @@ class ArrayNode extends BaseNode implements PrototypeNodeInterface $k = $v[$this->keyAttribute]; // remove the key attribute if configured to - if ($this->keyAttributeIsRemoved) { + if ($this->removeKeyAttribute) { unset($v[$this->keyAttribute]); } } @@ -497,7 +497,7 @@ class ArrayNode extends BaseNode implements PrototypeNodeInterface } /** - * Set whether or not to this array should just prevent child values from + * Set whether or not this array should just prevent child values from * keys that have no corresponding child nodes. * * If true (default), an exception will be thrown if unrecognized options diff --git a/src/Symfony/Component/Config/Definition/Builder/TreeBuilder.php b/src/Symfony/Component/Config/Definition/Builder/TreeBuilder.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Config/Definition/Builder/TreeBuilder.php +++ b/src/Symfony/Component/Config/Definition/Builder/TreeBuilder.php @@ -167,7 +167,7 @@ class TreeBuilder if (null !== $node->key) { $configNode->setKeyAttribute($node->key); - $configNode->setKeyAttributeIsRemoved($node->removeKeyItem); + $configNode->setRemoveKeyAttribute($node->removeKeyItem); } if (true === $node->atLeastOne) { diff --git a/tests/Symfony/Tests/Component/Config/Definition/ArrayNodeTest.php b/tests/Symfony/Tests/Component/Config/Definition/ArrayNodeTest.php index <HASH>..<HASH> 100644 --- a/tests/Symfony/Tests/Component/Config/Definition/ArrayNodeTest.php +++ b/tests/Symfony/Tests/Component/Config/Definition/ArrayNodeTest.php @@ -129,7 +129,7 @@ class ArrayNodeTest extends \PHPUnit_Framework_TestCase { $node = new ArrayNode('root'); $node->setKeyAttribute('id'); - $node->setKeyAttributeIsRemoved(false); + $node->setRemoveKeyAttribute(false); $prototype = new ArrayNode(null); $prototype->setPreventExtraKeys(false); // just so it allows anything
[Config] Renaming the key attribute removal property for consistency per Johannes' recommendation. Also fixing a PHPDoc typo per Stof.
symfony_symfony
train
4ca40d7f3d50f242949a929fb54b42baeb56ed14
diff --git a/src/ElephantOnCouch/Couch.php b/src/ElephantOnCouch/Couch.php index <HASH>..<HASH> 100755 --- a/src/ElephantOnCouch/Couch.php +++ b/src/ElephantOnCouch/Couch.php @@ -972,7 +972,7 @@ final class Couch { $this->validateAndEncodeDbName($name); if ($name != $this->dbName) { - $this->send(new Request(Request::PUT_METHOD, "/".rawurlencode($name)."/")); + $this->send(new Request(Request::PUT_METHOD, "/".$name."/")); if ($autoSelect) $this->dbName = $name; @@ -1490,7 +1490,7 @@ final class Couch { $this->validateDocPath($path); $this->validateAndEncodeDocId($docId); - $path = "/".$this->dbName."/".$path.rawurlencode($docId); + $path = "/".$this->dbName."/".$path.$docId; $request = new Request(Request::DELETE_METHOD, $path); $request->setQueryParam("rev", (string)$rev);
sometimes I was rawurlencode() two times
dedalozzo_eoc-client
train
fdcdf87910f63d610ceb72ee7d75c14668a3cabf
diff --git a/PHPCompatibility/Sniff.php b/PHPCompatibility/Sniff.php index <HASH>..<HASH> 100644 --- a/PHPCompatibility/Sniff.php +++ b/PHPCompatibility/Sniff.php @@ -1081,10 +1081,21 @@ abstract class Sniff implements \PHP_CodeSniffer_Sniff 'T_ANON_CLASS' => true, 'T_TRAIT' => true, ); - if ($this->validDirectScope($phpcsFile, $stackPtr, $validScopes) === true) { + + $scopePtr = $this->validDirectScope($phpcsFile, $stackPtr, $validScopes); + if ($scopePtr !== false) { // Make sure it's not a method parameter. if (empty($tokens[$stackPtr]['nested_parenthesis']) === true) { return true; + } else { + $parenthesis = array_keys($tokens[$stackPtr]['nested_parenthesis']); + $deepestOpen = array_pop($parenthesis); + if ($deepestOpen < $scopePtr + || isset($tokens[$deepestOpen]['parenthesis_owner']) === false + || $tokens[$tokens[$deepestOpen]['parenthesis_owner']]['code'] !== T_FUNCTION + ) { + return true; + } } } @@ -1115,7 +1126,7 @@ abstract class Sniff implements \PHP_CodeSniffer_Sniff 'T_ANON_CLASS' => true, 'T_INTERFACE' => true, ); - if ($this->validDirectScope($phpcsFile, $stackPtr, $validScopes) === true) { + if ($this->validDirectScope($phpcsFile, $stackPtr, $validScopes) !== false) { return true; } @@ -1137,7 +1148,7 @@ abstract class Sniff implements \PHP_CodeSniffer_Sniff * format to allow for newer token types. * Value is irrelevant. * - * @return bool + * @return int|bool StackPtr to the scope if valid, false otherwise. */ protected function validDirectScope(\PHP_CodeSniffer_File $phpcsFile, $stackPtr, $validScopes) { @@ -1158,7 +1169,7 @@ abstract class Sniff implements \PHP_CodeSniffer_Sniff } if (isset($validScopes[$tokens[$ptr]['type']]) === true) { - return true; + return $ptr; } return false; diff --git a/PHPCompatibility/Sniffs/FunctionNameRestrictions/RemovedMagicAutoloadSniff.php b/PHPCompatibility/Sniffs/FunctionNameRestrictions/RemovedMagicAutoloadSniff.php index <HASH>..<HASH> 100644 --- a/PHPCompatibility/Sniffs/FunctionNameRestrictions/RemovedMagicAutoloadSniff.php +++ b/PHPCompatibility/Sniffs/FunctionNameRestrictions/RemovedMagicAutoloadSniff.php @@ -66,7 +66,7 @@ class RemovedMagicAutoloadSniff extends Sniff return; } - if ($this->validDirectScope($phpcsFile, $stackPtr, $this->checkForScopes) === true) { + if ($this->validDirectScope($phpcsFile, $stackPtr, $this->checkForScopes) !== false) { return; } diff --git a/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.inc b/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.inc index <HASH>..<HASH> 100644 --- a/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.inc +++ b/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.inc @@ -95,3 +95,21 @@ class MyClass { /* Case 32 */ $varF = 'string'; } + +$a = ( $foo == $bar ? new stdClass() : + new class() { + /* Case 33 */ + public $var = true; + + /* Case 34 */ + public function something($var = false) {} + } +); + +function_call( 'param', new class { + /* Case 35 */ + public $year = 2017; + + /* Case 36 */ + public function __construct( $open, $post_id ) {} +}, 10, 2 ); diff --git a/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.php b/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.php index <HASH>..<HASH> 100644 --- a/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.php +++ b/PHPCompatibility/Util/Tests/Core/IsClassPropertyUnitTest.php @@ -114,6 +114,10 @@ class IsClassPropertyUnitTest extends CoreMethodTestFrame array('/* Case 30 */', true), array('/* Case 31 */', true), array('/* Case 32 */', true), + array('/* Case 33 */', true), + array('/* Case 34 */', false), + array('/* Case 35 */', true), + array('/* Case 36 */', false), ); } }
Sniff::isClassProperty: bug fix When a class would be nested in parenthesis, the method did not recognize class properties properly. To check for this without code duplication, the return of the `Sniff::validDirectScope()` method has changed. It used to always return a boolean. Now it will return `false` if not in a valid direct scope and the `$stackPtr` to the scope if it is. Includes unit test.
PHPCompatibility_PHPCompatibility
train
c324c56b582c6a2b60c2962e3fe66ea11cd05a86
diff --git a/src/presets/ssr-auth-aes128.js b/src/presets/ssr-auth-aes128.js index <HASH>..<HASH> 100644 --- a/src/presets/ssr-auth-aes128.js +++ b/src/presets/ssr-auth-aes128.js @@ -17,7 +17,7 @@ const MAX_TIME_DIFF = 30; // seconds /** * @description - * shadowsocksr "auth_aes128" base class implementation. + * shadowsocksr "auth_aes128_xxx" implementation. * * @protocol * @@ -102,8 +102,6 @@ export default class SsrAuthAes128Preset extends IPreset { _adBuf = null; - _requestPending = null; - static onInit() { SsrAuthAes128Preset.userKey = EVP_BytesToKey(__KEY__, 16, 16); SsrAuthAes128Preset.clientId = crypto.randomBytes(4); @@ -119,7 +117,6 @@ export default class SsrAuthAes128Preset extends IPreset { onDestroy() { this._adBuf.clear(); this._adBuf = null; - this._requestPending = null; } createHmac(buffer, key = SsrAuthAes128Preset.userKey) { @@ -151,7 +148,7 @@ export default class SsrAuthAes128Preset extends IPreset { client_id = crypto.randomBytes(4); SsrAuthAes128Preset.connectionId = connection_id; } else { - SsrAuthAes128Preset.connectionId = connection_id++; + connection_id = ++SsrAuthAes128Preset.connectionId; } const random_bytes_len = getRandomInt(0, buffer.length > 400 ? 512 : 1024); @@ -231,22 +228,22 @@ export default class SsrAuthAes128Preset extends IPreset { const part12_hmac_key = Buffer.concat([iv, userKey]); // part 1 - const part1_random = buffer.slice(0, 1); const part1_hmac = buffer.slice(1, 7); - const part1_hmac_calc = this.createHmac(part1_random, part12_hmac_key).slice(0, 6); + const part1_hmac_calc = this.createHmac(buffer.slice(0, 1), part12_hmac_key).slice(0, 6); if (!part1_hmac_calc.equals(part1_hmac)) { return fail(`unexpected hmac in part 1, dump=${dumpHex(buffer)}`); } // part 2 - const uid = buffer.slice(7, 11); - const cbc_enc_header = buffer.slice(11, 27); const part2_hmac = buffer.slice(27, 31); - const part2_hmac_calc = this.createHmac(Buffer.concat([uid, cbc_enc_header]), part12_hmac_key).slice(0, 4); + const part2_hmac_calc = this.createHmac(buffer.slice(7, 27), part12_hmac_key).slice(0, 4); if (!part2_hmac_calc.equals(part2_hmac)) { return fail(`unexpected hmac in part 2, dump=${dumpHex(buffer)}`); } + // const uid = buffer.slice(7, 11); + const cbc_enc_header = buffer.slice(11, 27); + const decipher_key = EVP_BytesToKey(userKey.toString('base64') + this._salt, 16, 16); const decipher = crypto.createDecipheriv('aes-128-cbc', decipher_key, Buffer.alloc(16)); const header = decipher.update(Buffer.concat([ @@ -279,15 +276,14 @@ export default class SsrAuthAes128Preset extends IPreset { } const payload = buffer.slice(31 + random_bytes_len, pack_len - 4); - const extra_data = buffer.slice(pack_len); + const extra_chunk = buffer.slice(pack_len); this._isHeaderRecv = true; - if (extra_data.length > 0) { - this._requestPending = payload; - this._adBuf.put(extra_data, {next, fail}); - } else { - next(payload); + next(payload); + + if (extra_chunk.length > 0) { + this._adBuf.put(extra_chunk, {next, fail}); } } else { this._adBuf.put(buffer, {next, fail}); @@ -335,12 +331,7 @@ export default class SsrAuthAes128Preset extends IPreset { this._decodeChunkId += 1; const random_bytes_len = chunk[4] < 0xff ? chunk[4] : chunk.readUInt16LE(5); const payload = chunk.slice(4 + random_bytes_len, -4); - if (this._requestPending !== null) { - next(Buffer.concat([this._requestPending, payload])); - this._requestPending = null; - } else { - next(payload); - } + next(payload); } // udp
presets: minor fix for ssr-auth-aes<I>
blinksocks_blinksocks
train
6abee2008b314a65553202b15d9a333d171e3433
diff --git a/daemon/cluster/noderunner.go b/daemon/cluster/noderunner.go index <HASH>..<HASH> 100644 --- a/daemon/cluster/noderunner.go +++ b/daemon/cluster/noderunner.go @@ -124,8 +124,11 @@ func (n *nodeRunner) start(conf nodeStartConfig) error { n.cluster.config.Backend, n.cluster.config.PluginBackend, n.cluster.config.ImageBackend), - HeartbeatTick: 1, - ElectionTick: 3, + HeartbeatTick: 1, + // Recommended value in etcd/raft is 10 x (HeartbeatTick). + // Lower values were seen to have caused instability because of + // frequent leader elections when running on flakey networks. + ElectionTick: 10, UnlockKey: conf.lockKey, AutoLockManagers: conf.autolock, PluginGetter: n.cluster.config.Backend.PluginGetter(),
Increase raft ElectionTick to <I>xHeartbeatTick
moby_moby
train
686704676e2d0e9a65a6777e72af1cf559f9aa30
diff --git a/marshmallow/fields.py b/marshmallow/fields.py index <HASH>..<HASH> 100644 --- a/marshmallow/fields.py +++ b/marshmallow/fields.py @@ -578,6 +578,12 @@ def get_args(func): return inspect.getargspec(func).args +def _callable(obj): + if not callable(obj): + raise MarshallingError('{0!r} is not callable.'.format(obj)) + return obj + + class Method(Raw): """A field that takes the value returned by a Serializer method. @@ -612,7 +618,7 @@ class Function(Raw): def __init__(self, func, **kwargs): super(Function, self).__init__(**kwargs) - self.func = func + self.func = _callable(func) @validated def output(self, key, obj): diff --git a/tests/test_marshmallow.py b/tests/test_marshmallow.py index <HASH>..<HASH> 100644 --- a/tests/test_marshmallow.py +++ b/tests/test_marshmallow.py @@ -850,8 +850,7 @@ class TestFields(unittest.TestCase): assert_equal("FOO", field.output("key", self.user)) def test_function_with_uncallable_param(self): - field = fields.Function("uncallable") - assert_raises(MarshallingError, lambda: field.output("key", self.user)) + assert_raises(MarshallingError, lambda: fields.Function("uncallable")) def test_datetime_field(self): field = fields.DateTime() @@ -1143,10 +1142,6 @@ class TestContext(unittest.TestCase): noncollab = User('Foo') assert_false(UserContextSerializer(noncollab, context=context).data['is_collab']) - def test_context_bad_signature(self): - assert 0, 'finish me' - - if __name__ == '__main__': unittest.main()
Function field initialization fails early if argument is not callable.
marshmallow-code_marshmallow
train
9c836c0d3daedca05e8caecfbd81cccf2bb94378
diff --git a/src/Core/Config/Configurable.php b/src/Core/Config/Configurable.php index <HASH>..<HASH> 100644 --- a/src/Core/Config/Configurable.php +++ b/src/Core/Config/Configurable.php @@ -25,6 +25,7 @@ trait Configurable /** * Get inherited config value * + * @deprecated 5.0 Use ->config()->get() instead * @param string $name * @return mixed */ @@ -48,6 +49,7 @@ trait Configurable /** * Update the config value for a given property * + * @deprecated 5.0 Use ->config()->set() instead * @param string $name * @param mixed $value * @return $this
Added missing @deprecated tags This PR just adds a couple of @deprecated tags where they were missing, where IDE's like PHPStorm immediately alert the user that it's deprecated.
silverstripe_silverstripe-framework
train
5a7d06ae247ca423e385af45403a3913b0d66875
diff --git a/lib/queryko/filters/base.rb b/lib/queryko/filters/base.rb index <HASH>..<HASH> 100644 --- a/lib/queryko/filters/base.rb +++ b/lib/queryko/filters/base.rb @@ -1,12 +1,13 @@ module Queryko module Filters class Base - attr_reader :table_name, :column_name, :feature + attr_reader :table_name, :column_name, :feature, :as def initialize(options = {}, feature) @table_name = options.fetch(:table_name) @column_name = options.fetch(:column_name) @feature = feature + @as = options[:as] end end end diff --git a/spec/lib/queryko/filters/base_spec.rb b/spec/lib/queryko/filters/base_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/queryko/filters/base_spec.rb +++ b/spec/lib/queryko/filters/base_spec.rb @@ -5,7 +5,8 @@ RSpec.describe Queryko::Filters::Base do let(:options) do { table_name: 'users', - column_name: 'name' + column_name: 'name', + as: 'fullname' } end let(:filter_class) do @@ -20,4 +21,5 @@ RSpec.describe Queryko::Filters::Base do it { expect(filter.table_name).to eq('users') } it { expect(filter.column_name).to eq('name') } + it { expect(filter.as).to eq('fullname') } end
Add 'as' property for Filter Base class
neume_queryko
train
9616ee3627ae2ec1cd4a713d967b18b8db11600a
diff --git a/zappa/cli.py b/zappa/cli.py index <HASH>..<HASH> 100644 --- a/zappa/cli.py +++ b/zappa/cli.py @@ -284,6 +284,10 @@ class ZappaCLI(object): help=('When invoking remotely, invoke this python as a string,' ' not as a modular path.') ) + invoke_parser.add_argument( + '--no-color', action='store_true', + help=("Don't color the output") + ) invoke_parser.add_argument('command_rest') ## @@ -297,6 +301,10 @@ class ZappaCLI(object): "required if --all is specified") manage_parser.add_argument('--all', action='store_true', help=all_help) manage_parser.add_argument('command_rest', nargs='+', help=rest_help) + manage_parser.add_argument( + '--no-color', action='store_true', + help=("Don't color the output") + ) ## # Rollback @@ -506,7 +514,11 @@ class ZappaCLI(object): print("Please enter the function to invoke.") return - self.invoke(self.vargs['command_rest'], raw_python=self.vargs['raw']) + self.invoke( + self.vargs['command_rest'], + raw_python=self.vargs['raw'], + no_color=self.vargs['no_color'], + ) elif command == 'manage': # pragma: no cover if not self.vargs.get('command_rest'): @@ -524,7 +536,11 @@ class ZappaCLI(object): else: command = command_tail[0] # ex: zappa manage dev showmigrations admin - self.invoke(command, command="manage") + self.invoke( + command, + command="manage", + no_color=self.vargs['no_color'], + ) elif command == 'tail': # pragma: no cover self.tail( @@ -1071,8 +1087,7 @@ class ZappaCLI(object): removed_arns = self.zappa.remove_async_sns_topic(self.lambda_name) click.echo('SNS Topic removed: %s' % ', '.join(removed_arns)) - - def invoke(self, function_name, raw_python=False, command=None): + def invoke(self, function_name, raw_python=False, command=None, no_color=False): """ Invoke a remote function. """ @@ -1097,7 +1112,13 @@ class ZappaCLI(object): ) if 'LogResult' in response: - print(base64.b64decode(response['LogResult'])) + if no_color: + print(base64.b64decode(response['LogResult'])) + else: + decoded = base64.b64decode(response['LogResult']).decode() + formated = self.format_invoke_command(decoded) + colorized = self.colorize_invoke_command(formated) + print(colorized) else: print(response)
Add --no-color to invoke and manage cli, print colored invoke and manage commands by default
Miserlou_Zappa
train
31964d03e8c0d3df690980f29509ebc7c3230586
diff --git a/lib/autokey/scripting/engine.py b/lib/autokey/scripting/engine.py index <HASH>..<HASH> 100644 --- a/lib/autokey/scripting/engine.py +++ b/lib/autokey/scripting/engine.py @@ -95,6 +95,7 @@ Folders created within temporary folders must themselves be set temporary") new_folder.temporary = True return new_folder + def create_phrase(self, folder, name: str, contents: str, abbreviations: Union[str, List[str]]=None, hotkey: Tuple[List[Union[model.Key, str]], Union[model.Key, str]]=None, @@ -177,15 +178,51 @@ Folders created within temporary folders must themselves be set temporary") It can be used for _really_ advanced use cases, where further customizations are desired. Use at your own risk. No guarantees are made about the object’s structure. Read the AutoKey source code for details. """ - # Start with some simple input type-checking. - if type(folder) is not model.Folder: + # Start with input type-checking. + if not isinstance(folder, model.Folder): raise ValueError("Expected a folder, not {}".format( type(folder)) ) - if type(name) is not str: + if not isinstance(name, str): raise ValueError("Expected name to be str, not {}".format( + type(name)) + ) + if not isinstance(contents, str): + raise ValueError("Expected contents to be str, not {}".format( type(contents)) ) + # TODO This doesn't validate if the list contains non-strings. + if abbreviations is not None and \ + type(abbreviations) is not str and \ + type(abbreviations) is not list: + raise ValueError("Expected abbreviations to be str or List[str], not {}".format( + type(abbreviations)) + ) + # I can't figure out how to validate hotkey. + # if hotkey is not None and type(hotkey) is not Tuple[List[Union[model.Key, str]], Union[model.Key, str]]: + # raise ValueError("Expected hotkey to be Tuple[List[Union[model.Key, str]], Union[model.Key, str]], not {}".format( + # type(hotkey)) + # ) + if send_mode is not None and not isinstance(send_mode, model.SendMode): + raise ValueError("Expected send_mode to be model.SendMode, not {}".format( + type(send_mode)) + ) + if window_filter is not None and not isinstance(window_filter, str): + raise ValueError("Expected window_filter to be str, not {}".format( + type(window_filter)) + ) + if not isinstance(show_in_system_tray, bool): + raise ValueError("Expected show_in_system_tray to be bool, not {}".format( + type(show_in_system_tray)) + ) + if not isinstance(always_prompt, bool): + raise ValueError("Expected always_prompt to be bool, not {}".format( + type(always_prompt)) + ) + if not isinstance(temporary, bool): + raise ValueError("Expected temporary to be bool, not {}".format( + type(temporary)) + ) # TODO: The validation should be done by some controller functions in the model base classes. if abbreviations: if isinstance(abbreviations, str): @@ -217,14 +254,9 @@ Phrases created within temporary folders must themselves be explicitly set tempo p.set_hotkey(*hotkey) if window_filter: p.set_window_titles(window_filter) - # XXX: Could these next 3 be refactored to just set p.<val> to the - # boolean, rather than the if? - if show_in_system_tray: - p.show_in_tray_menu = True - if always_prompt: - p.prompt = True - if temporary: - p.temporary = True + p.show_in_tray_menu = show_in_system_tray + p.prompt = always_prompt + p.temporary = temporary folder.add_item(p) # Don't save a json if it is a temporary hotkey. Won't persist across diff --git a/tests/scripting_api/test_engine.py b/tests/scripting_api/test_engine.py index <HASH>..<HASH> 100644 --- a/tests/scripting_api/test_engine.py +++ b/tests/scripting_api/test_engine.py @@ -41,15 +41,34 @@ def create_engine() -> typing.Tuple[Engine, autokey.model.Folder]: return engine, test_folder + def test_engine_create_phrase_invalid_input_types_raises_value_error(): engine, folder = create_engine() with patch("autokey.model.Phrase.persist"): assert_that( - calling(engine.create_phrase).with_args("Not a folder", "contents", "abreviation",), + calling(engine.create_phrase).with_args("Not a folder", "name", "contents",), raises(ValueError), "Folder is not checked for type=model.Folder") assert_that( - calling(engine.create_phrase).with_args(folder, folder, "abreviation",), + calling(engine.create_phrase).with_args(folder, folder, + "contents"), raises(ValueError), "name is not checked for type=str") + assert_that( + calling(engine.create_phrase).with_args(folder, "name", + folder), + raises(ValueError), "contents is not checked for type=str") + assert_that( + calling(engine.create_phrase).with_args(folder, "name", + "contents", folder), + raises(ValueError), "abbreviations is not checked for type=str") + assert_that( + calling(engine.create_phrase).with_args(folder, "name", + "contents", ["t1", "t2"]), + not_(raises(ValueError)), "abbreviations is not checked for type=list") + # assert_that( + # calling(engine.create_phrase).with_args(folder, "name", + # "contents", ["t1", folder]), + # not_(raises(ValueError)), "abbreviations is not checked for type=list[str]") + def test_engine_create_phrase_adds_phrase_to_parent(): engine, folder = create_engine()
Type validate all input to create_phrase
autokey_autokey
train
3bbfb167716e0f0116a5debf548c470e63a241a2
diff --git a/core/src/main/java/org/infinispan/statetransfer/PushConfirmationsMap.java b/core/src/main/java/org/infinispan/statetransfer/PushConfirmationsMap.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/infinispan/statetransfer/PushConfirmationsMap.java +++ b/core/src/main/java/org/infinispan/statetransfer/PushConfirmationsMap.java @@ -105,6 +105,8 @@ class PushConfirmationsMap { lastViewId = viewId; membersCount = members.size(); actualConfirmationsCount = 0; + pushConfirmations.clear(); + joinConfirmations.clear(); } finally { lock.unlock(); } diff --git a/core/src/test/java/org/infinispan/statetransfer/StateTransferFunctionalTest.java b/core/src/test/java/org/infinispan/statetransfer/StateTransferFunctionalTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/org/infinispan/statetransfer/StateTransferFunctionalTest.java +++ b/core/src/test/java/org/infinispan/statetransfer/StateTransferFunctionalTest.java @@ -177,6 +177,7 @@ public class StateTransferFunctionalTest extends MultipleCacheManagersTest { logTestEnd(m); } + @Test(enabled = false, description = "Assymetric caches are *really* not supported in this version") public void testInitialStateTransferCacheNotPresent(Method m) throws Exception { testCount++; logTestStart(m); @@ -274,6 +275,7 @@ public class StateTransferFunctionalTest extends MultipleCacheManagersTest { logTestEnd(m); } + @Test(enabled = false, description = "The new state transfer doesn't work with cache or cache manager restarts (yet)") public void testInitialStateTransferAfterRestart(Method m) throws Exception { testCount++; logTestStart(m);
ISPN-<I> - I disabled some tests that restarted caches in replicated mode, the new state transfer doesn't support restarts. We'd need to solve the whole asymmetric caches issue first.
infinispan_infinispan
train
688be1bc4b96f06da6141e99404817de3a261708
diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -2,6 +2,7 @@ var gp = require('./'); var assert = require('assert'); +var isWin32 = require('os').platform() === 'win32'; describe('glob-parent', function() { it('should strip glob magic to return parent path', function() { @@ -73,13 +74,20 @@ describe('glob-parent', function() { assert.equal(gp('path/\\[bar]'), 'path/[bar]'); assert.equal(gp('[bar]'), '.'); assert.equal(gp('[bar]/'), '.'); - assert.equal(gp('\\[bar]'), '[bar]'); - assert.equal(gp('[bar\\]'), '.'); + assert.equal(gp('./\\[bar]'), './[bar]'); + assert.equal(gp('\\[bar]/'), '[bar]'); + assert.equal(gp('[bar\\]/'), '.'); assert.equal(gp('path/foo \\[bar]/'), 'path/foo [bar]'); assert.equal(gp('path/\\{foo,bar}/'), 'path/{foo,bar}'); assert.equal(gp('\\{foo,bar}/'), '{foo,bar}'); - assert.equal(gp('\\{foo,bar\\}'), '{foo,bar}'); - assert.equal(gp('{foo,bar\\}'), '.'); + assert.equal(gp('\\{foo,bar\\}/'), '{foo,bar}'); + assert.equal(gp('{foo,bar\\}/'), '.'); + if (!isWin32) { + assert.equal(gp('\\[bar]'), '[bar]'); + assert.equal(gp('[bar\\]'), '.'); + assert.equal(gp('\\{foo,bar\\}'), '{foo,bar}'); + assert.equal(gp('{foo,bar\\}'), '.'); + } }); it('should respect glob enclosures with embedded separators', function() { @@ -163,7 +171,7 @@ describe('glob2base test patterns', function() { }); }); -if (require('os').platform() === 'win32') { +if (isWin32) { describe('technically invalid windows globs', function() { it('should manage simple globs with backslash path separator', function() { assert.equal(gp('C:\\path\\*.js'), 'C:/path')
test: update escaping tests for windows
gulpjs_glob-parent
train
7eb24087ff163c91541318b802e574f20004f63e
diff --git a/tests/Phug/CasesTest.php b/tests/Phug/CasesTest.php index <HASH>..<HASH> 100644 --- a/tests/Phug/CasesTest.php +++ b/tests/Phug/CasesTest.php @@ -28,8 +28,7 @@ class CasesTest extends AbstractRendererTest public function testRender($expected, $actual, $message) { $debug = $this->renderer->getOption('debug'); - // Debug mode to heavy for HHVM - $this->renderer->setOption('debug', !defined('HHVM_VERSION')); + $this->renderer->setOption('debug', true); $render = $this->renderer->renderFile($actual); $this->renderer->setOption('debug', $debug);
Re-enable debug for HHVM cases
phug-php_renderer
train
b4d064d6f278764dff21cacada4f8d771e5e822a
diff --git a/workalendar/tests/test_europe.py b/workalendar/tests/test_europe.py index <HASH>..<HASH> 100644 --- a/workalendar/tests/test_europe.py +++ b/workalendar/tests/test_europe.py @@ -379,6 +379,7 @@ class BelgiumTest(GenericCalendarTest): self.assertIn(date(2015, 11, 1), holidays) self.assertIn(date(2015, 12, 25), holidays) + class GermanyTest(GenericCalendarTest): cal_class = Germany
Add blank line to comply with PEP8
peopledoc_workalendar
train
f7ef197d3030b34d3a269440dab64c8e52540651
diff --git a/library/index.php b/library/index.php index <HASH>..<HASH> 100644 --- a/library/index.php +++ b/library/index.php @@ -6,7 +6,8 @@ class index extends Codup\main function index () { - $this->view->text = "here's some text!"; + + $this->view->text = "Text from default controller: ".__file__; } function action () diff --git a/library/views/index/index.php b/library/views/index/index.php index <HASH>..<HASH> 100644 --- a/library/views/index/index.php +++ b/library/views/index/index.php @@ -1,5 +1,11 @@ +Template file: +<?php + +echo __file__; -<button id="click">click</button> +?> +<br/> +Examples: <a href="/dev" title="Defaults to controler index and action index">Dev</a> | <a href="/dev/index/action" title="controller index action action">Dev:action</a> | <a href="/dev/index!" title="See errors bellow">invalid</a><br/> <?php echo $this->text; ?>
added links to the test module on the default first page as examples showing also some paths
ghousseyn_phiber
train
cb0ca22fbac8f430480ca9fc19208ed11f895592
diff --git a/saltcloud/clouds/ec2.py b/saltcloud/clouds/ec2.py index <HASH>..<HASH> 100644 --- a/saltcloud/clouds/ec2.py +++ b/saltcloud/clouds/ec2.py @@ -101,7 +101,7 @@ def __virtual__(): ) ) - global avail_images, avail_sizes, script, destroy, list_nodes + global avail_images, avail_sizes, script, destroy global list_nodes_full, list_nodes_select # open a connection in a specific region @@ -111,7 +111,6 @@ def __virtual__(): avail_images = namespaced_function(avail_images, globals(), (conn,)) avail_sizes = namespaced_function(avail_sizes, globals(), (conn,)) script = namespaced_function(script, globals(), (conn,)) - list_nodes = namespaced_function(list_nodes, globals(), (conn,)) list_nodes_full = namespaced_function(list_nodes_full, globals(), (conn,)) list_nodes_select = namespaced_function(list_nodes_select, globals(), (conn,)) @@ -535,16 +534,17 @@ def get_tags(name): ''' Retrieve tags for a node ''' - location = get_location() - conn = get_conn(location=location) - node = get_node(conn, name) - try: - log.info('Retrieving tags from {0}'.format(name)) - data = conn.ex_describe_tags(resource=node) - log.info(data) - except Exception as exc: - log.error('Failed to retrieve tags from {0}'.format(name)) - log.error(exc) + instances = get_instance(name=name) + if not instances: + kwargs = {'instance': name} + instances = get_instance(kwargs=kwargs) + instance_id = instances[0]['instancesSet']['item']['instanceId'] + params = {'Action': 'DescribeTags', + 'Filter.1.Name': 'resource-id', + 'Filter.1.Value': instance_id} + result = query(params, setname='tagSet') + log.info(result) + return result def del_tags(name, kwargs): @@ -605,10 +605,10 @@ def destroy(name): Wrap core libcloudfuncs destroy method, adding check for termination protection ''' - instances = show_instance(name=name) + instances = get_instance(name=name) if not instances: kwargs = {'instance': name} - instances = show_instance(kwargs=kwargs) + instances = get_instance(kwargs=kwargs) instance_id = instances[0]['instancesSet']['item']['instanceId'] params = {'Action': 'TerminateInstances', @@ -618,17 +618,17 @@ def destroy(name): pprint.pprint(result) -def showimage(name, kwargs): +def show_image(name, kwargs): ''' Show the details from EC2 concerning an AMI ''' params = {'ImageId.1': kwargs['image'], 'Action': 'DescribeImages'} - import pprint - pprint.pprint(query(params)) + result = query(params) + log.info(result) -def show_instance(name=None, kwargs=None): +def get_instance(name=None, kwargs=None): ''' Show the details from EC2 concerning an AMI ''' @@ -646,3 +646,35 @@ def show_instance(name=None, kwargs=None): else: return instances + +def show_instance(name=None, kwargs=None): + ''' + Show the details from EC2 concerning an AMI + ''' + result = get_instance(name, kwargs) + log.info(result) + import pprint + pprint.pprint(result) + return result + + +def list_nodes(name=None): + ''' + Return a list of the VMs that are on the provider + ''' + ret = {} + instances = get_instance() + for instance in instances: + name = instance['instancesSet']['item']['tagSet']['item']['value'] + ret[name] = { + 'id': instance['instancesSet']['item']['instanceId'], + 'image': instance['instancesSet']['item']['imageId'], + 'size': instance['instancesSet']['item']['instanceType'], + 'state': instance['instancesSet']['item']['instanceState']['name'] + } + if 'privateIpAddress' in instance['instancesSet']['item']: + ret[name]['private_ips'] = [instance['instancesSet']['item']['privateIpAddress']] + if 'ipAddress' in instance['instancesSet']['item']: + ret[name]['public_ips'] = [instance['instancesSet']['item']['ipAddress']] + return ret +
Replace list_nodes with a direct API version
saltstack_salt
train
8330b63649453a14afce49b575f65e954781c9d7
diff --git a/src/Parser/RegularParser.php b/src/Parser/RegularParser.php index <HASH>..<HASH> 100644 --- a/src/Parser/RegularParser.php +++ b/src/Parser/RegularParser.php @@ -99,7 +99,7 @@ final class RegularParser implements ParserInterface return $closingName; } - if(false === $content) { + if(false === $content || $closingName !== $name) { $this->backtrack(false); $text = $this->backtrack(false); diff --git a/tests/ParserTest.php b/tests/ParserTest.php index <HASH>..<HASH> 100644 --- a/tests/ParserTest.php +++ b/tests/ParserTest.php @@ -173,6 +173,9 @@ final class ParserTest extends \PHPUnit_Framework_TestCase new ParsedShortcode(new Shortcode('b', array(), ' [a][a][a] '), '[b] [a][a][a] [/b]', 0), new ParsedShortcode(new Shortcode('b', array(), ' [a][a][a] '), '[b] [a][a][a] [/b]', 19), )), + array($s, '[name]random[/other]', array( + new ParsedShortcode(new Shortcode('name', array(), null), '[name]', 0), + )), ); /**
fixed RegularParser issue with matching top-level just-closed shortcodes
thunderer_Shortcode
train
12aeac3f932269e7dd85c3cef013f7e7cc705342
diff --git a/lib/Rails/Cache/Store/MemCachedStore.php b/lib/Rails/Cache/Store/MemCachedStore.php index <HASH>..<HASH> 100755 --- a/lib/Rails/Cache/Store/MemCachedStore.php +++ b/lib/Rails/Cache/Store/MemCachedStore.php @@ -36,14 +36,12 @@ class MemCachedStore extends AbstractStore # There was some kind of error. } } else { - return unserialize($value); + return $value; } } public function write($key, $val, array $params) { - $val = serialize($val); - if (isset($params['expires_in'])) { if (!ctype_digit((string)$params['expires_in'])) $expires_in = strtotime('+' . $params['expires_in']); @@ -69,4 +67,4 @@ class MemCachedStore extends AbstractStore return false; } } -} \ No newline at end of file +}
removed serialization in memcached
railsphp_railsphp
train
95dcff84f5d290b5bd428815e696a2e6bf2cce5e
diff --git a/accessories/aircon.js b/accessories/aircon.js index <HASH>..<HASH> 100644 --- a/accessories/aircon.js +++ b/accessories/aircon.js @@ -139,7 +139,7 @@ class AirConAccessory extends BroadlinkRMAccessory { // Some devices don't include a thermometer if (pseudoDeviceTemperature !== undefined) return; - if (!autoHeatTemperature && !autoCoolTemperature) return; + if ((!autoHeatTemperature && !autoCoolTemperature) || !this.isAutoSwitchOn()) return; this.getCurrentTemperature((err, temperature) => { this.thermostatService.setCharacteristic(Characteristic.CurrentTemperature, temperature); @@ -165,7 +165,7 @@ class AirConAccessory extends BroadlinkRMAccessory { return; } - if (!autoHeatTemperature && !autoCoolTemperature) return; + if ((!autoHeatTemperature && !autoCoolTemperature) || !this.isAutoSwitchOn()) return; if (autoHeatTemperature && temperature < autoHeatTemperature) { this.state.runningAutomatically = true; @@ -200,6 +200,23 @@ class AirConAccessory extends BroadlinkRMAccessory { this.autoOnTimeout = undefined } + isAutoSwitchOn () { + return this.autoSwitchAccessory && this.autoSwitchAccessory.state && this.autoSwitchAccessory.state.switchState; + } + + updateAccessories (accessories) { + const { config, log } = this; + const { autoSwitch } = config; + + if (!autoSwitch) return; + + const autoSwitchAccessories = accessories.filter(accessory => accessory.name === autoSwitch); + + if (autoSwitchAccessories.length === 0) return log(`${name} No accessory could be found with the name "${autoSwitch}". Please update the "autoSwitch" value or add a matching switch accessory.`); + + this.autoSwitchAccessory = autoSwitchAccessories[0]; + } + // Thermostat sendTemperature (temperature, previousTemperature) { const { config, data, host, log, name, state } = this; diff --git a/config-sample.json b/config-sample.json index <HASH>..<HASH> 100644 --- a/config-sample.json +++ b/config-sample.json @@ -123,6 +123,27 @@ } }, { + "name":"Air Conditioner Advanced", + "type":"air-conditioner", + "autoSchedule": [ + { "days": [ "Monday", "Tuesday", "Wednesday", "Thursday", "Friday" ], "startTime": 6, "endTime": 8 }, + { "days": [ "Monday", "Tuesday", "Wednesday", "Thursday", "Friday" ], "startTime": 16, "endTime": 22 }, + { "days": [ "Saturday", "Sunday" ], "startTime": 6, "endTime": 22 }, + ], + "autoSwitch": "A/C Auto Switch", + "data":{ + "off":"2600500000012...", + "temperature30":{ + "pseudo-mode":"heat", + "data":"2600500000012..." + }, + "temperature16":{ + "pseudo-mode":"cool", + "data":"2600500000012..." + } + } + }, + { "name": "Fan", "type": "fan", "data": { diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -73,6 +73,12 @@ class BroadlinkRMPlatform { accessories.push(homeKitAccessory); }) + accessories.forEach((accessory) => { + if (typeof accessory !== Accessory.AirCon) return; + + accessory.updateAccessories(accessories) + }) + callback(accessories); } }
Support for using a separate switch to enable/disable use of autoHeatTemperature/autoCoolTemperature for the air-conditioner accessory.
lprhodes_homebridge-broadlink-rm
train
0556a4ae9e8e6a8f0a682956f50890a23f3c98be
diff --git a/thinc/layers/tensorflow_wrapper.py b/thinc/layers/tensorflow_wrapper.py index <HASH>..<HASH> 100644 --- a/thinc/layers/tensorflow_wrapper.py +++ b/thinc/layers/tensorflow_wrapper.py @@ -1,6 +1,6 @@ from typing import Callable, Tuple, Any from ..model import Model -from ..shims import TensorFlowShim +from ..shims import TensorflowShim from ..util import xp2tensorflow, tensorflow2xp from ..types import Array @@ -11,7 +11,7 @@ except ImportError: has_tensorflow = False -def TensorFlowWrapper(tensorflow_model: Any) -> Model: +def TensorflowWrapper(tensorflow_model: Any) -> Model: """Wrap a TensorFlow model, so that it has the same API as Thinc models. To optimize the model, you'll need to create a Tensorflow optimizer and call optimizer.apply_gradients after each batch @@ -19,7 +19,7 @@ def TensorFlowWrapper(tensorflow_model: Any) -> Model: assert has_tensorflow, "Tensorflow not found!" assert isinstance(tensorflow_model, tf.keras.models.Model), \ "tensorflow_model must be an instance of tf.keras.models.Model" - return Model("tensorflow", forward, shims=[TensorFlowShim(tensorflow_model)]) + return Model("tensorflow", forward, shims=[TensorflowShim(tensorflow_model)]) def forward(model: Model, X: Array, is_train: bool) -> Tuple[Array, Callable]:
Fix tensorflow naming
explosion_thinc
train
05463978db316ba869fb31823f8449f421b9f38f
diff --git a/src/client.js b/src/client.js index <HASH>..<HASH> 100644 --- a/src/client.js +++ b/src/client.js @@ -184,9 +184,6 @@ HQClient.prototype.rpcproxy = function(){ var result = self.router.search(route); - console.log('-------------------------------------------'); - console.dir(result); - /* we have no routes diff --git a/src/mesh.js b/src/mesh.js index <HASH>..<HASH> 100644 --- a/src/mesh.js +++ b/src/mesh.js @@ -130,13 +130,6 @@ Mesh.prototype.addworker = function(worker){ Mesh.prototype.removeworker = function(worker){ var self = this; - console.log('-------------------------------------------'); - console.log('-------------------------------------------'); - console.log('removing worker'); - - console.dir(worker); - console.dir(this.available); - if(!this.available[worker.id]){ return; } diff --git a/src/router.js b/src/router.js index <HASH>..<HASH> 100644 --- a/src/router.js +++ b/src/router.js @@ -135,10 +135,10 @@ Router.prototype.search = function(route){ var parts = route.split('/'); while(!workerids && parts.length>0){ parts.pop(); - workerids = self.state.routes[parts.join('/')]; + workerids = self.state.routes[parts.join('/') || '/']; } - var finalroute = parts.join('/'); + var finalroute = parts.join('/') || '/'; return mapids(workerids, finalroute); } @@ -212,7 +212,7 @@ Router.prototype.heartbeat = function(packet){ self.state.lastseen[worker.id] = new Date().getTime(); } -Router.prototype.addroute = function(route, worker){ +Router.prototype.addroute = Router.prototype.add = function(route, worker){ this.cache = {}; @@ -245,9 +245,16 @@ Router.prototype.removeworker = function(remworker){ this.cache = {}; var worker = this.state.workers[remworker.id]; + _.each(this.state.routes, function(workers, route){ if(workers[remworker.id]){ - delete(workers[remworker.idj]); + var newworkers = {}; + for(var wid in workers){ + if(wid!=remworker.id){ + newworkers[wid] = workers[wid]; + } + } + self.state.routes[route] = newworkers; self.emit('removed', route, remworker); self.emit('removed.' + route, route, remworker); } @@ -255,6 +262,7 @@ Router.prototype.removeworker = function(remworker){ delete(this.state.workers[remworker.id]); delete(this.state.lastseen[remworker.id]); + return this; } \ No newline at end of file diff --git a/test/router.test.js b/test/router.test.js index <HASH>..<HASH> 100644 --- a/test/router.test.js +++ b/test/router.test.js @@ -8,24 +8,25 @@ describe('router', function(){ var router = telegraft.router(); - router.add('warehouse:/hello', { + router.add('/hello', { id:10 }) - router.add('warehouse:/hello', { + router.add('/hello', { id:11 }) - router.add('warehouse:/', { + router.add('/', { id:12 }) - var results1 = router.search('warehouse:/hello/123'); + var results1 = router.search('/hello/123'); results1.workers.length.should.equal(2); results1.workers[0].id.should.equal(10); results1.workers[1].id.should.equal(11); - var results2 = router.search('warehouse:/other'); + var results2 = router.search('/other'); + results2.workers.length.should.equal(1); results2.workers[0].id.should.equal(12); @@ -37,33 +38,34 @@ describe('router', function(){ var router = telegraft.router(); - router.on('added.warehouse:/hello', function(){ + router.on('added./hello', function(){ counter++; }) - router.on('removed.warehouse:/hello', function(){ + router.on('removed./hello', function(){ counter++; }) - router.add('warehouse:/hello', { + router.add('/hello', { id:10 }) - router.add('warehouse:/hello', { + router.add('/hello', { id:11 }) - router.add('warehouse:/', { + router.add('/', { id:12 }) - router.remove('warehouse:/hello', { + router.removeworker({ id:11 }) + counter.should.equal(3); - var results = router.search('warehouse:/hello'); + var results = router.search('/hello'); results.workers.length.should.equal(1); })
fixed routing error for slash and got tests to work again
binocarlos_telegraft
train
b6b174fe64b8762cf6f59b277dfc8073e2ae0ce3
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -32,10 +32,11 @@ var picos = { } }; -var getPicoByECI = function(eci){ - return _.find(picos, function(pico){ +var getPicoByECI = function(eci, callback){ + var pico = _.find(picos, function(pico){ return _.includes(pico.channels, eci); }); + callback(undefined, pico); }; var jsonResp = function(res, data){ @@ -56,32 +57,33 @@ router.set('/sky/event/:eci/:eid/:domain/:type', function(req, res, route){ type: route.params.type, attrs: route.data }; - var pico = getPicoByECI(event.eci); - - selectRulesToEval(pico, rulesets, event, function(err, to_eval){ + getPicoByECI(event.eci, function(err, pico){ if(err) return errResp(res, err); - - λ.map(to_eval, function(e, callback){ - - var ctx = { - pico: pico, - db: db, - vars: {}, - event: event, - meta: { - rule_name: e.rule_name, - txn_id: 'TODO',//TODO transactions - rid: e.rid, - eid: event.eid - } - }; - - evalRule(e.rule, ctx, callback); - - }, function(err, directives){ + selectRulesToEval(pico, rulesets, event, function(err, to_eval){ if(err) return errResp(res, err); - jsonResp(res, { - directives: directives + + λ.map(to_eval, function(e, callback){ + + var ctx = { + pico: pico, + db: db, + vars: {}, + event: event, + meta: { + rule_name: e.rule_name, + txn_id: 'TODO',//TODO transactions + rid: e.rid, + eid: event.eid + } + }; + + evalRule(e.rule, ctx, callback); + + }, function(err, directives){ + if(err) return errResp(res, err); + jsonResp(res, { + directives: directives + }); }); }); }); @@ -93,25 +95,27 @@ router.set('/sky/cloud/:rid/:function', function(req, res, route){ var args = _.omit(route.data, '_eci'); var fn_name = route.params['function']; - var pico = getPicoByECI(eci); - if(!pico){ - return errResp(res, new Error('Bad eci')); - } - if(!_.includes(pico.rulesets, rid)){ - return errResp(res, new Error('Pico does not have that rid')); - } + getPicoByECI(eci, function(err, pico){ + if(err) return errResp(res, err); + if(!pico){ + return errResp(res, new Error('Bad eci')); + } + if(!_.includes(pico.rulesets, rid)){ + return errResp(res, new Error('Pico does not have that rid')); + } - var ctx = { - pico: pico, - db: db, - rid: rid, - fn_name: fn_name, - args: args - }; + var ctx = { + pico: pico, + db: db, + rid: rid, + fn_name: fn_name, + args: args + }; - queryRulesetFn(ctx, rulesets, function(err, data){ - if(err) return errResp(res, err); - jsonResp(res, data); + queryRulesetFn(ctx, rulesets, function(err, data){ + if(err) return errResp(res, err); + jsonResp(res, data); + }); }); });
getPicoByECI is now async
Picolab_pico-engine
train
e5a3876814e0844e90142daae72c690c5c27765a
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup, find_packages setup(name='sqltemplate', - version='0.5.1', + version='0.5.1-2', description='Core library for database querying tools ' 'based on templates', classifiers=[ @@ -16,7 +16,10 @@ setup(name='sqltemplate', author='Marcin Nowak', author_email='[email protected]', url='https://github.com/marcinn/sqltemplate', - install_requires=['flatdict>=1.2.0,<2.0.0', 'sqlparse>=0.1.19,<0.2'], + install_requires=['flatdict>=1.2.0,<2.0.0'], + extras_require={ + 'prettysql': 'sqlparse>=0.1.19,<1.0', + }, keywords='python sql template', packages=find_packages('.'), include_package_data=True,
move sqlparse as an extra requirement
marcinn_sqltemplate
train
2e8825366ea096b8d84db7309a7955e02534e0bb
diff --git a/fireplace/player.py b/fireplace/player.py index <HASH>..<HASH> 100644 --- a/fireplace/player.py +++ b/fireplace/player.py @@ -21,6 +21,7 @@ class Player(Entity, TargetableByAuras): outgoing_healing_adjustment = slot_property("outgoing_healing_adjustment") shadowform = slot_property("shadowform") spellpower_double = slot_property("spellpower_double", sum) + spellpower_adjustment = slot_property("spellpower", sum) type = CardType.PLAYER def __init__(self, name): @@ -78,7 +79,9 @@ class Player(Entity, TargetableByAuras): @property def spellpower(self): - return sum(minion.spellpower for minion in self.field) + aura_power = self.controller.spellpower_adjustment + minion_power = sum(minion.spellpower for minion in self.field) + return aura_power + minion_power @property def characters(self):
Take Player spellpower into account in Player.spellpower
jleclanche_fireplace
train
10504f1706cf5992b59b7b71e608989247add403
diff --git a/packages/net/env/browser/iframe.js b/packages/net/env/browser/iframe.js index <HASH>..<HASH> 100644 --- a/packages/net/env/browser/iframe.js +++ b/packages/net/env/browser/iframe.js @@ -3,6 +3,36 @@ import net.interfaces; from util.browser import $; +function findFrame() { + var target = window; + if (top == target) { return ''; } + + var path = [], + search = function(win) { + for (var i = 0, len = win.length; i < len; ++i) { + if (win[i] == target || search(win[i])) { + path.unshift(i); + return true; + } + } + } + + search(top); + return path.join('-'); +} + +function findTarget(target) { + try { + var path = target.split('-'), + target = top; + for (var i = 0, j; j = path[i]; ++i) { target = target[j]; } + return target && target.postMessage ? target : null; + } catch(e) { + logger.error(e, 'Could not find iframe target:', target, '(possibly a security error)'); + return null; + } +} + exports.Listener = Class(net.interfaces.Listener, function(supr) { var ID = 0; @@ -85,6 +115,8 @@ exports.Listener = Class(net.interfaces.Listener, function(supr) { this.onResize(); } + this.findFrame = function() { return findFrame(); } + this.getDOM = function() { return this._serverContent; } @@ -194,6 +226,7 @@ exports.Listener = Class(net.interfaces.Listener, function(supr) { var name = evt.source.name; var target = this._clients[name]; var data = eval('(' + evt.data + ')'); + switch (data.type) { case 'open': this._clients[name] = new exports.Transport(evt.source); @@ -214,8 +247,16 @@ exports.Listener = Class(net.interfaces.Listener, function(supr) { exports.Connector = Class(net.interfaces.Connector, function() { this.connect = function() { + var target; + if (this._opts.target) { + target = findTarget(this._opts.target); + } else { + target = top; + } + + var self = findFrame(); $.onEvent(window, 'message', bind(this, '_onMessage')); - window.parent.postMessage(JSON.stringify({type:"open"}), '*'); + target.postMessage('{"type":"open"}', '*'); } this._onMessage = function(evt) {
add support to run iframe protocol over sibling iframes using findFrame and findTarget
gameclosure_js.io
train
6d931c0b6b2fcee383ecdd793022beef993cc19d
diff --git a/lib/openstax_utilities.rb b/lib/openstax_utilities.rb index <HASH>..<HASH> 100644 --- a/lib/openstax_utilities.rb +++ b/lib/openstax_utilities.rb @@ -63,7 +63,7 @@ module OpenStax @standard_date_format = "%b %d, %Y" @standard_datetime_format = "%b %d, %Y %l:%M %p %Z" @standard_time_format = "%l:%M %p %Z" - @status_authenticate = -> {} + @status_authenticate = -> { head :forbidden } super end end
Deny access to the status page by default
openstax_openstax_utilities
train
85b4e423cc23d7ba316d44b665b42a71f1324670
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,12 @@ jobs: include: - - name: "20.04 pycodestyle and native run (py3.8)" + - name: "20.04 flake8 and native run (py3.8)" os: linux dist: focal language: shell install: sudo apt-get update; sudo apt install pycodestyle python3-pexpect; script: - - "pycodestyle sos tests bin/*" + - "flake8 sos tests bin/*" - "sudo ./tests/simple.sh" - name: "18.04 pycodestyle and native run (py3.6)" os: linux diff --git a/sos/component.py b/sos/component.py index <HASH>..<HASH> 100644 --- a/sos/component.py +++ b/sos/component.py @@ -18,6 +18,7 @@ import sys from argparse import SUPPRESS from datetime import datetime from shutil import rmtree +from pathlib import Path from sos import __version__ from sos.archive import TarFileArchive from sos.options import SoSOptions diff --git a/tests/archive_tests.py b/tests/archive_tests.py index <HASH>..<HASH> 100644 --- a/tests/archive_tests.py +++ b/tests/archive_tests.py @@ -8,7 +8,6 @@ import unittest import os import tarfile -import zipfile import tempfile import shutil diff --git a/tests/cleaner_tests.py b/tests/cleaner_tests.py index <HASH>..<HASH> 100644 --- a/tests/cleaner_tests.py +++ b/tests/cleaner_tests.py @@ -62,7 +62,6 @@ class CleanerMapTests(unittest.TestCase): self.assertTrue(_test.ip in _net.network) def test_ip_map_get_same_with_or_without_cidr(self): - _net = self.ip_map.get('192.168.4.0/24') _hostwsub = self.ip_map.get('192.168.4.1/24') _hostnosub = self.ip_map.get('192.168.4.1') self.assertEqual(_hostwsub.split('/')[0], _hostnosub) diff --git a/tests/plugin_tests.py b/tests/plugin_tests.py index <HASH>..<HASH> 100644 --- a/tests/plugin_tests.py +++ b/tests/plugin_tests.py @@ -15,7 +15,6 @@ from io import StringIO from sos.report.plugins import Plugin, regex_findall, _mangle_command from sos.archive import TarFileArchive from sos.policies import LinuxPolicy, InitSystem -import sos.policies PATH = os.path.dirname(__file__) @@ -196,7 +195,7 @@ class PluginTests(unittest.TestCase): }) self.assertEquals(p.get_description(), "<no description available>") - def test_plugin_no_descrip(self): + def test_plugin_has_descrip(self): p = NamedMockPlugin({ 'sysroot': self.sysroot, 'policy': LinuxPolicy(init=InitSystem(), probe_runtime=False), @@ -339,7 +338,6 @@ class AddCopySpecTests(unittest.TestCase): 'cmdlineopts': MockOptions(), 'policy': LinuxPolicy(init=InitSystem(), probe_runtime=False), 'sysroot': os.getcwd(), - 'cmdlineopts': MockOptions(), 'devices': {} }) self.mp.archive = MockArchive() @@ -383,8 +381,8 @@ class AddCopySpecTests(unittest.TestCase): def test_glob_file_limit_no_limit(self): self.mp.sysroot = '/' tmpdir = tempfile.mkdtemp() - fn = create_file(2, dir=tmpdir) - fn2 = create_file(2, dir=tmpdir) + create_file(2, dir=tmpdir) + create_file(2, dir=tmpdir) self.mp.add_copy_spec(tmpdir + "/*") self.assertEquals(len(self.mp.copy_paths), 2) shutil.rmtree(tmpdir) @@ -392,8 +390,8 @@ class AddCopySpecTests(unittest.TestCase): def test_glob_file_over_limit(self): self.mp.sysroot = '/' tmpdir = tempfile.mkdtemp() - fn = create_file(2, dir=tmpdir) - fn2 = create_file(2, dir=tmpdir) + create_file(2, dir=tmpdir) + create_file(2, dir=tmpdir) self.mp.add_copy_spec(tmpdir + "/*", 1) self.assertEquals(len(self.mp.copy_strings), 1) content, fname = self.mp.copy_strings[0] diff --git a/tests/report_tests.py b/tests/report_tests.py index <HASH>..<HASH> 100644 --- a/tests/report_tests.py +++ b/tests/report_tests.py @@ -6,7 +6,6 @@ # # See the LICENSE file in the source distribution for further information. import unittest -import os try: import json diff --git a/tests/sosreport_pexpect.py b/tests/sosreport_pexpect.py index <HASH>..<HASH> 100644 --- a/tests/sosreport_pexpect.py +++ b/tests/sosreport_pexpect.py @@ -8,7 +8,6 @@ import unittest import pexpect -from re import search, escape from os import kill from signal import SIGINT
[general] flake8 first pass
sosreport_sos
train
5ac2bb6c011a3d682dde6ef9b8589682a2177e03
diff --git a/src/components/RefinementList/RefinementList.js b/src/components/RefinementList/RefinementList.js index <HASH>..<HASH> 100644 --- a/src/components/RefinementList/RefinementList.js +++ b/src/components/RefinementList/RefinementList.js @@ -130,8 +130,10 @@ class RefinementList extends React.Component { const limit = this.state.isShowMoreOpen ? this.props.limitMax : this.props.limitMin; let displayedFacetValues = this.props.facetValues.slice(0, limit); const displayShowMore = this.props.showMore === true && + // "Show more" this.props.facetValues.length > displayedFacetValues.length || - this.state.isShowMoreOpen === true; + // "Show less", but hide it if the result set changed + this.state.isShowMoreOpen && displayedFacetValues.length > this.props.limitMin; const showMoreBtn = displayShowMore ? <Template diff --git a/src/components/RefinementList/__tests__/RefinementList-test.js b/src/components/RefinementList/__tests__/RefinementList-test.js index <HASH>..<HASH> 100644 --- a/src/components/RefinementList/__tests__/RefinementList-test.js +++ b/src/components/RefinementList/__tests__/RefinementList-test.js @@ -240,6 +240,28 @@ describe('RefinementList', () => { expect(actual.length).toEqual(0); }); + it('no showMore when: state = open -> values change -> values <= limitMin ', () => { + // Given + let props = { + facetValues: [ + {name: 'foo'}, + {name: 'bar'}, + {name: 'baz'} + ], + showMore: true, + limitMin: 2, + limitMax: 5 + }; + + // When + let root = shallowRender(props); + root.instance().handleClickShowMore(); + root.setProps({facetValues: props.facetValues.slice(2)}); + + // Then + expect(root.find({templateKey: 'show-more-active'}).length).toEqual(0); + }); + it('does not add a showMore link when the facet values length is equal to the minLimit', () => { // Given let props = {
fix(showMore): hide "show less" when nothing to hide
algolia_instantsearch.js
train
fc3922fd58eb9f10ace5cf5e1acfc6c423f60846
diff --git a/pyhaversion/pypi.py b/pyhaversion/pypi.py index <HASH>..<HASH> 100644 --- a/pyhaversion/pypi.py +++ b/pyhaversion/pypi.py @@ -2,6 +2,8 @@ from dataclasses import dataclass from aiohttp.client import ClientTimeout +from aiohttp.hdrs import IF_NONE_MATCH + from awesomeversion import AwesomeVersion from .base import HaVersionBase @@ -12,7 +14,7 @@ from .consts import ( DEFAULT_HEADERS, HaVersionChannel, ) -from .exceptions import HaVersionInputException +from .exceptions import HaVersionInputException, HaVersionNotModifiedException URL = "https://pypi.org/pypi/homeassistant/json" @@ -28,11 +30,20 @@ class HaVersionPypi(HaVersionBase): async def fetch(self, **kwargs): """Logic to fetch new version data.""" + headers = DEFAULT_HEADERS + if (etag := kwargs.get("etag")) is not None: + headers[IF_NONE_MATCH] = f'W/"{etag}"' + request = await self.session.get( url=URL, - headers=DEFAULT_HEADERS, + headers=headers, timeout=ClientTimeout(total=self.timeout), ) + self._etag = request.headers.get("etag") + + if request.status == 304: + raise HaVersionNotModifiedException + self._data = await request.json() def parse(self): diff --git a/tests/test_haio.py b/tests/test_haio.py index <HASH>..<HASH> 100644 --- a/tests/test_haio.py +++ b/tests/test_haio.py @@ -27,7 +27,7 @@ async def test_haio(aresponses): @pytest.mark.asyncio async def test_etag(aresponses): - """Test hassio etag.""" + """Test haio etag.""" aresponses.add( "www.home-assistant.io", "/version.json", diff --git a/tests/test_pypi.py b/tests/test_pypi.py index <HASH>..<HASH> 100644 --- a/tests/test_pypi.py +++ b/tests/test_pypi.py @@ -4,9 +4,14 @@ from unittest.mock import patch import aiohttp import pytest -from pyhaversion import HaVersion -from pyhaversion.consts import HaVersionChannel, HaVersionSource -from pyhaversion.exceptions import HaVersionInputException +from pyhaversion import ( + HaVersion, + HaVersionInputException, + HaVersionNotModifiedException, + HaVersionChannel, + HaVersionSource, +) + from tests.common import fixture from .const import BETA_VERSION, HEADERS, STABLE_VERSION, STABLE_VERSION_BETA_WEEK @@ -70,3 +75,31 @@ async def test_stable_version_beta_week(aresponses): async def test_input_exception(HaVersion): with pytest.raises(HaVersionInputException): HaVersion(source=HaVersionSource.PYPI) + + [email protected] +async def test_etag(aresponses): + """Test pypi etag.""" + aresponses.add( + "pypi.org", + "/pypi/homeassistant/json", + "get", + aresponses.Response( + text=fixture("pypi/default", False), + status=200, + headers={**HEADERS, "etag": "test"}, + ), + ) + aresponses.add( + "pypi.org", + "/pypi/homeassistant/json", + "get", + aresponses.Response(status=304, headers=HEADERS), + ) + async with aiohttp.ClientSession() as session: + haversion = HaVersion(session=session, source=HaVersionSource.PYPI) + await haversion.get_version(etag=haversion.etag) + assert haversion.version == STABLE_VERSION + + with pytest.raises(HaVersionNotModifiedException): + await haversion.get_version(etag=haversion.etag)
Add Etag support for HaVersionPypi (#<I>)
ludeeus_pyhaversion
train
809d0310a67356b95e4cdc8aef738ef1fc1837a2
diff --git a/lib/apidiesel/handlers/mock_response.rb b/lib/apidiesel/handlers/mock_response.rb index <HASH>..<HASH> 100644 --- a/lib/apidiesel/handlers/mock_response.rb +++ b/lib/apidiesel/handlers/mock_response.rb @@ -3,10 +3,12 @@ module Apidiesel module MockResponse class RequestHandler def run(request, api_config) - return request unless request.action.respond_to?(:mock_response) + action = request.action - file_name = request.action.mock_response[:file] - parser = request.action.mock_response[:parser] + return request unless action.respond_to?(:mock_response) && action.mock_response + + file_name = action.mock_response[:file] + parser = action.mock_response[:parser] file = File.read(file_name) request.response_body = if parser
Checks for mock_response being nil
janfoeh_apidiesel
train
a873e83e2d0c0182fda37c61315dae7348cd625f
diff --git a/lib/composable_operations/composed_operation.rb b/lib/composable_operations/composed_operation.rb index <HASH>..<HASH> 100644 --- a/lib/composable_operations/composed_operation.rb +++ b/lib/composable_operations/composed_operation.rb @@ -46,7 +46,11 @@ module ComposableOperations def execute self.class.operations.inject(input) do |data, operation| - operation = operation.create(self, *data) + operation = if data.respond_to?(:to_ary) + operation.create(self, *data) + else + operation.create(self, data) + end operation.perform if operation.failed?
ComposedOperation input splatting Input splatting is now only done for objects that have a natural array representation – meaning objects that respond to #to_ary
t6d_composable_operations
train
e54c508f2717378a7760af48369572bd0b4243f4
diff --git a/src/ZF2EntityAudit/Mapping/Driver/AuditDriver.php b/src/ZF2EntityAudit/Mapping/Driver/AuditDriver.php index <HASH>..<HASH> 100644 --- a/src/ZF2EntityAudit/Mapping/Driver/AuditDriver.php +++ b/src/ZF2EntityAudit/Mapping/Driver/AuditDriver.php @@ -11,6 +11,7 @@ use Zend\Code\Generator\MethodGenerator; use Zend\Code\Generator\PropertyGenerator; use Doctrine\ORM\Mapping\Builder\ClassMetadataBuilder; +use Doctrine\ORM\Mapping\Builder\AssociationBuilder; final class AuditDriver implements MappingDriver { @@ -28,7 +29,7 @@ final class AuditDriver implements MappingDriver $auditManager = $serviceManager->get('auditManager'); $config = $auditManager->getConfiguration(); - $cmf = $entityManager->getMetadataFactory(); + $metadataFactory = $entityManager->getMetadataFactory(); // Revision is managed here rather than a separate namespace and driver if ($className == 'ZF2EntityAudit\\Entity\\Revision') { @@ -37,7 +38,12 @@ final class AuditDriver implements MappingDriver $builder->addField('comment', 'text'); $builder->addField('timestamp', 'datetime'); - $builder->addManyToOne('user', \ZF2EntityAudit\Module::getZfcUserEntity()); + // Add assoication between ZfcUser and Revision + $zfcUserMetadata = $metadataFactory->getMetadataFor(\ZF2EntityAudit\Module::getZfcUserEntity()); + $builder + ->createManyToOne('user', $zfcUserMetadata->getName()) + ->addJoinColumn('user_id', $zfcUserMetadata->getSingleIdentifierColumnName()) + ->build(); $metadata->setTableName($config->getRevisionTableName()); return; @@ -47,17 +53,19 @@ final class AuditDriver implements MappingDriver $metadataClassName = $metadata->getName(); $metadataClass = new $metadataClassName(); - if (!$cmf->hasMetadataFor($metadataClass->getAuditedEntityClass())) + // Verify the metadata for the target class has been loaded + if (!$metadataFactory->hasMetadataFor($metadataClass->getAuditedEntityClass())) throw new \Exception('Metadata is not loaded for ' . $metadataClass->getAuditedEntityClass() . ' Is the auditing module last to load? It should be...'); - $auditedClassMetadata = $cmf->getMetadataFor($metadataClass->getAuditedEntityClass()); + $auditedClassMetadata = $metadataFactory->getMetadataFor($metadataClass->getAuditedEntityClass()); $builder = new ClassMetadataBuilder($metadata); $builder->addManyToOne($config->getRevisionFieldName(), 'ZF2EntityAudit\Entity\Revision'); $identifiers = array($config->getRevisionFieldName()); + // Add fields from target to audit entity foreach ($auditedClassMetadata->getFieldNames() as $fieldName) { $builder->addField($fieldName, $auditedClassMetadata->getTypeOfField($fieldName)); if ($auditedClassMetadata->isIdentifier($fieldName)) $identifiers[] = $fieldName;
Creation of Revision table correctly creates relation to ZfcUser table
API-Skeletons_zf-doctrine-audit
train
9390674a05edf53cf2e08084f0f602b323788191
diff --git a/builtin/providers/aws/resource_aws_elb.go b/builtin/providers/aws/resource_aws_elb.go index <HASH>..<HASH> 100644 --- a/builtin/providers/aws/resource_aws_elb.go +++ b/builtin/providers/aws/resource_aws_elb.go @@ -9,6 +9,7 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/service/ec2" "github.com/aws/aws-sdk-go/service/elb" "github.com/hashicorp/terraform/helper/hashcode" "github.com/hashicorp/terraform/helper/resource" @@ -74,6 +75,11 @@ func resourceAwsElb() *schema.Resource { Computed: true, }, + "source_security_group_id": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + }, + "subnets": &schema.Schema{ Type: schema.TypeSet, Elem: &schema.Schema{Type: schema.TypeString}, @@ -300,6 +306,18 @@ func resourceAwsElbRead(d *schema.ResourceData, meta interface{}) error { d.Set("security_groups", lb.SecurityGroups) if lb.SourceSecurityGroup != nil { d.Set("source_security_group", lb.SourceSecurityGroup.GroupName) + + // Manually look up the ELB Security Group ID, since it's not provided + var elbVpc string + if lb.VPCId != nil { + elbVpc = *lb.VPCId + } + sgId, err := sourceSGIdByName(meta, *lb.SourceSecurityGroup.GroupName, elbVpc) + if err != nil { + log.Printf("[WARN] Error looking up ELB Security Group ID: %s", err) + } else { + d.Set("source_security_group_id", sgId) + } } d.Set("subnets", lb.Subnets) d.Set("idle_timeout", lbAttrs.ConnectionSettings.IdleTimeout) @@ -594,3 +612,52 @@ func validateElbName(v interface{}, k string) (ws []string, errors []error) { return } + +func sourceSGIdByName(meta interface{}, sg, vpcId string) (string, error) { + conn := meta.(*AWSClient).ec2conn + var filters []*ec2.Filter + var sgFilterName, sgFilterVPCID *ec2.Filter + sgFilterName = &ec2.Filter{ + Name: aws.String("group-name"), + Values: []*string{aws.String(sg)}, + } + + if vpcId != "" { + sgFilterVPCID = &ec2.Filter{ + Name: aws.String("vpc-id"), + Values: []*string{aws.String(vpcId)}, + } + } + + filters = append(filters, sgFilterName) + + if sgFilterVPCID != nil { + filters = append(filters, sgFilterVPCID) + } + + req := &ec2.DescribeSecurityGroupsInput{ + Filters: filters, + } + resp, err := conn.DescribeSecurityGroups(req) + if err != nil { + if ec2err, ok := err.(awserr.Error); ok { + if ec2err.Code() == "InvalidSecurityGroupID.NotFound" || + ec2err.Code() == "InvalidGroup.NotFound" { + resp = nil + err = nil + } + } + + if err != nil { + log.Printf("Error on ELB SG look up: %s", err) + return "", err + } + } + + if resp == nil || len(resp.SecurityGroups) == 0 { + return "", fmt.Errorf("No security groups found for name %s and vpc id %s", sg, vpcId) + } + + group := resp.SecurityGroups[0] + return *group.GroupId, nil +}
providers/aws: Provide source security group id for ELBs
hashicorp_terraform
train
77c5a25c394be2c9cf66da95eb09f2d12670bf7b
diff --git a/lib/rugged/repository.rb b/lib/rugged/repository.rb index <HASH>..<HASH> 100644 --- a/lib/rugged/repository.rb +++ b/lib/rugged/repository.rb @@ -140,11 +140,7 @@ module Rugged when Rugged::Object target = sha_or_ref.oid else - if (ref = Rugged::Reference.lookup(self, sha_or_ref)) - target = ref.resolve.target - else - target = Rugged::Commit.lookup(self, sha_or_ref) - end + target = rev_parse_oid(sha_or_ref) end Branch.create(self, name, target)
Use Repository#rev_parse_oid in Repository#create_branch This is the purpose-built API to lookup an arbitrary treeish, and since git_lookup_reference() returns a new error code, it messed up the original two-stage lookup.
libgit2_rugged
train
2c8b5f5bd483a127584ee26387790fcb139b6fe0
diff --git a/authz.go b/authz.go index <HASH>..<HASH> 100644 --- a/authz.go +++ b/authz.go @@ -3,6 +3,7 @@ package influxdb import ( "errors" "fmt" + "os" "path/filepath" ) @@ -220,6 +221,13 @@ type Permission struct { // Matches returns whether or not one permission matches the other. func (p Permission) Matches(perm Permission) bool { + if _, set := os.LookupEnv("MATCHER_BEHAVIOR"); set { + return p.matchesV2(perm) + } + return p.matchesV1(perm) +} + +func (p Permission) matchesV1(perm Permission) bool { if p.Action != perm.Action { return false } @@ -232,6 +240,13 @@ func (p Permission) Matches(perm Permission) bool { return true } + if p.Resource.OrgID != nil && perm.Resource.OrgID != nil && p.Resource.ID != nil && perm.Resource.ID != nil { + if *p.Resource.OrgID != *perm.Resource.OrgID && *p.Resource.ID == *perm.Resource.ID { + fmt.Printf("Old match used: p.Resource.OrgID=%s perm.Resource.OrgID=%s p.Resource.ID=%s", + *p.Resource.OrgID, *perm.Resource.OrgID, *p.Resource.ID) + } + } + if p.Resource.OrgID != nil && p.Resource.ID == nil { pOrgID := *p.Resource.OrgID if perm.Resource.OrgID != nil { @@ -255,6 +270,46 @@ func (p Permission) Matches(perm Permission) bool { return false } +func (p Permission) matchesV2(perm Permission) bool { + if p.Action != perm.Action { + return false + } + + if p.Resource.Type != perm.Resource.Type { + return false + } + + if p.Resource.OrgID == nil && p.Resource.ID == nil { + return true + } + + if p.Resource.OrgID != nil { + if perm.Resource.OrgID != nil { + if *p.Resource.OrgID == *perm.Resource.OrgID { + if p.Resource.ID == nil { + return true + } + if perm.Resource.ID != nil { + return *p.Resource.ID == *perm.Resource.ID + } + } + return false + } + } + + if p.Resource.ID != nil { + pID := *p.Resource.ID + if perm.Resource.ID != nil { + permID := *perm.Resource.ID + if pID == permID { + return true + } + } + } + + return false +} + func (p Permission) String() string { return fmt.Sprintf("%s:%s", p.Action, p.Resource) }
chore(auth): new match behavior (#<I>) * fix(<I>): match permission orgIDs if specified * chore(auth): log old match behavior * fix(auth): log format
influxdata_influxdb
train
d262f0620c12cf205550c2a1d9f221b1b129fb74
diff --git a/src/Config/Config.php b/src/Config/Config.php index <HASH>..<HASH> 100644 --- a/src/Config/Config.php +++ b/src/Config/Config.php @@ -128,9 +128,9 @@ abstract class Config { */ public function load($source, $cached=true) { try { - if( class_exists('Orpheus\Cache\FSCache', true) ) { + if( class_exists('\Orpheus\Cache\FSCache', true) ) { // strtr fix an issue with FSCache, FSCache does not allow path, so no / and \ - $cache = new Orpheus\Cache\FSCache('config', strtr($source, '/\\', '--'), filemtime(static::getFilePath($source))); + $cache = new \Orpheus\Cache\FSCache('config', strtr($source, '/\\', '--'), filemtime(static::getFilePath($source))); if( !static::$caching || !$cached || !$cache->get($parsed) ) { $parsed = static::parse($source); $cache->set($parsed);
Config is now able to load from packages
Sowapps_orpheus-core
train
b2469283a77133fc36382281eca7ea8d4d56c7ed
diff --git a/activesupport/lib/active_support/notifications.rb b/activesupport/lib/active_support/notifications.rb index <HASH>..<HASH> 100644 --- a/activesupport/lib/active_support/notifications.rb +++ b/activesupport/lib/active_support/notifications.rb @@ -1,5 +1,3 @@ -require 'active_support/core_ext/module/delegation' - module ActiveSupport # Notifications provides an instrumentation API for Ruby. To instrument an # action in Ruby you just need to do:
Removing unnecessary require, solve 'circular require considered harmful' warning.
rails_rails
train
4eea2e3e954261200dc8ad9b724eeebbb03f75e8
diff --git a/lib/mongodb-schema.js b/lib/mongodb-schema.js index <HASH>..<HASH> 100644 --- a/lib/mongodb-schema.js +++ b/lib/mongodb-schema.js @@ -169,6 +169,8 @@ function schema(documents, options) { } else { if (Object.keys(d).length < options.data.maxCardinality) { d[val] = 1; + } else { + d[$o] = $o in d ? d[$o] + 1 : 1; } } break; @@ -330,7 +332,7 @@ function schema(documents, options) { options.flat = options.flat === null ? true : options.flat; options.data = options.data || false; options.filter = options.filter || null; - options.metavars = options.metavars || {count: '$count', type: '$type', data: '$data', array: '$array', prob: '$prob'}; + options.metavars = options.metavars || {count: '$count', type: '$type', data: '$data', array: '$array', prob: '$prob', other: '$other'}; var metavar_names = _getObjectValues(options.metavars); @@ -339,7 +341,8 @@ function schema(documents, options) { $t = options.metavars.type, $d = options.metavars.data, $a = options.metavars.array, - $p = options.metavars.prob; + $p = options.metavars.prob, + $o = options.metavars.other; // nested options.data if (options.data) { diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -96,7 +96,7 @@ describe('mongodb-schema', function() { var docs = [ { a: "foo", - b: 1, + b: [1, 2, 3], c: true, d: new Date(2014, 1, 1), e: null, @@ -231,6 +231,43 @@ describe('mongodb-schema', function() { assert.deepEqual(result.a['$type'], expected); }); + it('should let you change the meta-variable names', function () { + var result = schema_sync([ + {a: 1}, + {a: [-2, -3]} + ], { + data: true, + metavars: { + count: '#count', + type: '#type', + data: '#data', + array: '#array', + prob: '#prob' + } + }); + + var expected = { + "#count": 3, + "#type": "number", + "#data": { + "min": -3, + "max": 1 + }, + "#array": true, + "#prob": 1.5 + }; + + assert.deepEqual(result.a, expected); + }); + + it('should collect categories in $other when maxCardinality is reached', function () { + var result = schema_sync([ + {a: "a"}, {a: "a"}, {a: "b"}, {a: "c"}, {a: "d"}, {a: "e"}, {a: "f"} + ], {data: {maxCardinality: 3}}); + + assert.ok('$other' in result.a['$data']); + }); + }); describe ('schema', function() {
added $other and tests for metavars and maxCardinality
mongodb-js_mongodb-schema
train
915619ef361d84f966d4af24cf7363608cdff559
diff --git a/test/Role/CommandHandlerTest.php b/test/Role/CommandHandlerTest.php index <HASH>..<HASH> 100644 --- a/test/Role/CommandHandlerTest.php +++ b/test/Role/CommandHandlerTest.php @@ -7,6 +7,7 @@ use Broadway\EventHandling\EventBusInterface; use Broadway\EventStore\EventStoreInterface; use CultuurNet\UDB3\Role\Commands\AddPermission; use CultuurNet\UDB3\Role\Commands\CreateRole; +use CultuurNet\UDB3\Role\Commands\DeleteRole; use CultuurNet\UDB3\Role\Commands\RemovePermission; use CultuurNet\UDB3\Role\Commands\RenameRole; use CultuurNet\UDB3\Role\Commands\SetConstraint; @@ -16,6 +17,7 @@ use CultuurNet\UDB3\Role\Events\ConstraintUpdated; use CultuurNet\UDB3\Role\Events\PermissionAdded; use CultuurNet\UDB3\Role\Events\PermissionRemoved; use CultuurNet\UDB3\Role\Events\RoleCreated; +use CultuurNet\UDB3\Role\Events\RoleDeleted; use CultuurNet\UDB3\Role\Events\RoleRenamed; use CultuurNet\UDB3\Role\ValueObjects\Permission; use ValueObjects\Identity\UUID; @@ -83,6 +85,11 @@ class CommandHandlerTest extends CommandHandlerScenarioTestCase */ private $constraintRemoved; + /** + * @var RoleDeleted + */ + private $roleDeleted; + public function setUp() { parent::setUp(); @@ -126,6 +133,10 @@ class CommandHandlerTest extends CommandHandlerScenarioTestCase $this->constraintRemoved = new ConstraintRemoved( $this->uuid ); + + $this->roleDeleted = new RoleDeleted( + $this->uuid + ); } /** @@ -247,4 +258,18 @@ class CommandHandlerTest extends CommandHandlerScenarioTestCase )) ->then([$this->constraintRemoved]); } + + /** + * @test + */ + public function it_handles_deleteRole_by_deleting_the_role() + { + $this->scenario + ->withAggregateId($this->uuid) + ->given([$this->roleCreated]) + ->when(new DeleteRole( + $this->uuid + )) + ->then([$this->roleDeleted]); + } }
III-<I>: Add test for handleDeleteRole
cultuurnet_udb3-php
train
6f15d8c54d834cba503865f69174868edfd725e9
diff --git a/liquibase-core/src/main/java/liquibase/structure/core/StoredDatabaseLogic.java b/liquibase-core/src/main/java/liquibase/structure/core/StoredDatabaseLogic.java index <HASH>..<HASH> 100644 --- a/liquibase-core/src/main/java/liquibase/structure/core/StoredDatabaseLogic.java +++ b/liquibase-core/src/main/java/liquibase/structure/core/StoredDatabaseLogic.java @@ -2,6 +2,8 @@ package liquibase.structure.core; import liquibase.structure.AbstractDatabaseObject; import liquibase.structure.DatabaseObject; +import liquibase.util.StreamUtil; +import liquibase.util.StringUtils; import java.util.Date; @@ -51,4 +53,23 @@ public abstract class StoredDatabaseLogic<T extends StoredDatabaseLogic> extends setAttribute("body", body); return (T) this; } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + + StoredDatabaseLogic that = (StoredDatabaseLogic) obj; + + if (this.getSchema() != null && that.getSchema() != null) { + return this.getSchema().toString().equalsIgnoreCase(that.getSchema().toString()); + } + + return getName().equalsIgnoreCase(that.getName()); + } + + @Override + public int hashCode() { + return StringUtils.trimToEmpty(this.getName()).toLowerCase().hashCode(); + } }
CORE-<I> Multi-schema snapshot bugfixes Equals needs to take into account schemaName
liquibase_liquibase
train
5756d3238c8a5dab9680880945df0a48da3b7556
diff --git a/lib/sensu.rb b/lib/sensu.rb index <HASH>..<HASH> 100644 --- a/lib/sensu.rb +++ b/lib/sensu.rb @@ -2,7 +2,11 @@ module Sensu VERSION = "0.8.19" def self.write_pid(pid_file) - File.open(pid_file, 'w') { |f| f.write(Process.pid.to_s + "\n") } + begin + File.open(pid_file, 'w') { |f| f.write(Process.pid.to_s + "\n") } + rescue ::Exception => e + raise 'could not write to pid file: ' + pid_file + ': ' + e + end end def self.daemonize diff --git a/lib/sensu/api.rb b/lib/sensu/api.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/api.rb +++ b/lib/sensu/api.rb @@ -1,7 +1,5 @@ require File.join(File.dirname(__FILE__), 'config') -require File.join(File.dirname(__FILE__), '..', 'sensu') - require 'sinatra/async' require 'redis' diff --git a/lib/sensu/client.rb b/lib/sensu/client.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/client.rb +++ b/lib/sensu/client.rb @@ -1,7 +1,5 @@ require File.join(File.dirname(__FILE__), 'config') -require File.join(File.dirname(__FILE__), '..', 'sensu') - module Sensu class Client attr_accessor :options diff --git a/lib/sensu/config.rb b/lib/sensu/config.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/config.rb +++ b/lib/sensu/config.rb @@ -9,6 +9,8 @@ gem 'eventmachine', '~> 1.0.0.beta.4' require 'optparse' require 'json' require 'hashie' +require File.join(File.dirname(__FILE__), '..', 'sensu') + require 'amqp' require 'cabin' require 'cabin/outputs/em-stdlib-logger' diff --git a/lib/sensu/server.rb b/lib/sensu/server.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/server.rb +++ b/lib/sensu/server.rb @@ -2,8 +2,6 @@ require File.join(File.dirname(__FILE__), 'config') require 'redis' -require File.join(File.dirname(__FILE__), '..', 'sensu') - require File.join(File.dirname(__FILE__), 'helpers', 'redis') module Sensu
consolidated loading of sensu.rb into config.rb. wrapped write_pid to provide a little more info if the pid_file cannot be written
sensu_sensu
train
46322ffc6d6a9782be3db06a7fa23f357ac22db5
diff --git a/lib/vestal_versions.rb b/lib/vestal_versions.rb index <HASH>..<HASH> 100644 --- a/lib/vestal_versions.rb +++ b/lib/vestal_versions.rb @@ -13,6 +13,7 @@ module LaserLemon case value when Version: value when Numeric: find_by_number(value.floor) + when Symbol: respond_to?(value) ? send(value) : nil when Date, Time: last(:conditions => ['versions.created_at <= ?', value.to_time.in_time_zone]) end end @@ -32,7 +33,7 @@ module LaserLemon case value when Version: value.number when Numeric: value.floor - when Date, Time: at(value).try(:number) + when Symbol, Date, Time: at(value).try(:number) end end end
Re-added the ability to revert to a Symbol.
laserlemon_vestal_versions
train
a26b7daa5bb501539757e43e4a661e5ff974cba9
diff --git a/src/static-dependency-paths.js b/src/static-dependency-paths.js index <HASH>..<HASH> 100644 --- a/src/static-dependency-paths.js +++ b/src/static-dependency-paths.js @@ -4,6 +4,6 @@ var path = require('path'), module.exports = { chromeDriver: path.resolve(binaryDir, 'chromedriver'), - seleniumJar: path.resolve(binaryDir, 'selenium-server-standalone.jar'), + seleniumJar: process.env.SELENIUM_SERVER_STANDALONE_JAR || path.resolve(binaryDir, 'selenium-server-standalone.jar'), helperJar: path.resolve(__dirname, "java", "webdriversynchelpers", "dist", "webdriversynchelpers.jar") };
Off-topic: Allow ENV var to override selenium path Didn't feel worth branching off this branch to do it, but now users can specify their selenium path with an environment variable.
jsdevel_webdriver-sync
train
bea04e437dc7ed6079380ed704b9c8f6ec565d75
diff --git a/test/.eslintrc.js b/test/.eslintrc.js index <HASH>..<HASH> 100644 --- a/test/.eslintrc.js +++ b/test/.eslintrc.js @@ -23,7 +23,7 @@ module.exports = { 'no-unused-vars': 'off', 'no-useless-escape': 'off', - 'ghost/mocha/no-skipped-tests': 'warn', + 'ghost/mocha/no-skipped-tests': 'error', // TODO: remove these custom rules and fix problems in test files 'ghost/mocha/max-top-level-suites': 'off',
Upgraded no-skipped-test rule to error refs: <URL>
TryGhost_Ghost
train
fb8b1ad42dd22046845fd9fc53eb65481734e677
diff --git a/generators/generator-constants.js b/generators/generator-constants.js index <HASH>..<HASH> 100644 --- a/generators/generator-constants.js +++ b/generators/generator-constants.js @@ -35,7 +35,7 @@ const GRADLE_VERSION = gradleOptions.GRADLE_VERSION; const JIB_VERSION = '3.2.1'; // Libraries version -const JHIPSTER_DEPENDENCIES_VERSION = '7.7.1-SNAPSHOT'; +const JHIPSTER_DEPENDENCIES_VERSION = '7.8.0'; // The spring-boot version should match the one managed by https://mvnrepository.com/artifact/tech.jhipster/jhipster-dependencies/JHIPSTER_DEPENDENCIES_VERSION const SPRING_BOOT_VERSION = '2.6.6'; const LIQUIBASE_VERSION = '4.6.1';
Bump jhipster dependencies to <I>
jhipster_generator-jhipster
train
528bbee80cc36de947a0fd0f9293ae40972b06e6
diff --git a/lib/stealth/logger.rb b/lib/stealth/logger.rb index <HASH>..<HASH> 100644 --- a/lib/stealth/logger.rb +++ b/lib/stealth/logger.rb @@ -39,6 +39,8 @@ module Stealth :green when :previous_session, :back_to_session :yellow + when :interrupt + :magenta when :facebook, :twilio, :bandwidth :blue when :smooch
Color code interrupts in the logs
hellostealth_stealth
train
75f9e8cd2afe14094d3d781a62d1c219d3e4e825
diff --git a/patternmatcher/functions.py b/patternmatcher/functions.py index <HASH>..<HASH> 100644 --- a/patternmatcher/functions.py +++ b/patternmatcher/functions.py @@ -188,13 +188,6 @@ def substitute(expression: Expression, substitution: Substitution) -> Tuple[Unio if isinstance(expression, Variable): if expression.name in substitution: return substitution[expression.name], True - result, replaced = substitute(expression.expression, substitution) - if replaced: - if isinstance(result, list): - if len(result) != 1: - raise ValueError('Invalid substitution resulted in a variable with multiple expressions.') - result = result[0] - return Variable(expression.name, result), True elif isinstance(expression, Operation): any_replaced = False new_operands = [] diff --git a/tests/test_functions.py b/tests/test_functions.py index <HASH>..<HASH> 100644 --- a/tests/test_functions.py +++ b/tests/test_functions.py @@ -545,7 +545,6 @@ class MatchAnywhereTest(unittest.TestCase): self.assertIn(result, results, "Results differ from expected") [email protected]("Takes too long") class LogicReplaceTest(unittest.TestCase): def test_simplify(self): LAnd = Operation.new('and', Arity.variadic, 'LAnd', associative=True, one_identity=True, commutative=True)
Enabled the logic term simplification unit test. Removed some dead code.
HPAC_matchpy
train
93c18096237ee7df2f54b344f32d6282e8fcb047
diff --git a/openquake/commonlib/nrml.py b/openquake/commonlib/nrml.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/nrml.py +++ b/openquake/commonlib/nrml.py @@ -75,6 +75,7 @@ this is a job for the LiteralNode class which can be subclassed and supplemented by a dictionary of validators. """ from __future__ import print_function +import re import sys import logging from openquake.baselib.general import CallableDict @@ -115,8 +116,20 @@ class NRMLFile(object): self._file.close() +def get_tag_version(nrml_node): + """ + Extract from a node of kind `nrml` the tag and the version of the NRML + format. + """ + version, tag = re.search(r'(nrml/[\d\.]+)\}(\w+)', nrml_node.tag).groups() + return tag, version + + nodefactory = CallableDict(keyfunc=striptag) +buildmodel = CallableDict(keyfunc=get_tag_version) +# dictionary of functions with two arguments, node and fname + @nodefactory.add('sourceModel', 'simpleFaultRupture', 'complexFaultRupture', 'singlePlaneRupture', 'multiPlanesRupture') diff --git a/openquake/commonlib/tests/nrml_test.py b/openquake/commonlib/tests/nrml_test.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/tests/nrml_test.py +++ b/openquake/commonlib/tests/nrml_test.py @@ -1,6 +1,6 @@ import unittest import io -from openquake.commonlib.nrml import read, node_to_xml +from openquake.commonlib.nrml import read, node_to_xml, get_tag_version class NrmlTestCase(unittest.TestCase): @@ -33,6 +33,11 @@ class NrmlTestCase(unittest.TestCase): </nrml> """) root = read(xmlfile) + + tag, version = get_tag_version(root[0]) + self.assertEqual(tag, 'exposureModel') + self.assertEqual(version, 'nrml/0.4') + outfile = io.BytesIO() node_to_xml(root, outfile, {}) self.assertEqual(outfile.getvalue(), """\
Initial work for multi-NRML support
gem_oq-engine
train
40a8820afbab109e9e6764633ca80e1d73f41af7
diff --git a/lib/active_scaffold/data_structures/column.rb b/lib/active_scaffold/data_structures/column.rb index <HASH>..<HASH> 100644 --- a/lib/active_scaffold/data_structures/column.rb +++ b/lib/active_scaffold/data_structures/column.rb @@ -306,6 +306,7 @@ module ActiveScaffold::DataStructures @options = {:format => :i18n_number} if self.number? @form_ui = :checkbox if @column and @column.type == :boolean @form_ui = :textarea if @column and @column.type == :text + @form_ui = :number if @column and self.number? @allow_add_existing = true @form_ui = self.class.association_form_ui if @association && self.class.association_form_ui
Set form.ui = :number for number columns by default.
activescaffold_active_scaffold
train
9e450865a92e21ba1a40c494575b0205ed2c14fa
diff --git a/pinax/comments/views.py b/pinax/comments/views.py index <HASH>..<HASH> 100644 --- a/pinax/comments/views.py +++ b/pinax/comments/views.py @@ -25,9 +25,9 @@ class CommentSecureRedirectToMixin(object): if not redirect_to or " " in redirect_to or redirect_to.startswith("http"): try: if object is not None: - url = object.get_absolute_url() + redirect_to = object.get_absolute_url() elif self.object is not None: - url = self.object.content_object.get_absolute_url() + redirect_to = self.object.content_object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( "No URL to redirect to. Either provide a url or define"
Fix bug in CommentSecureRedirectToMixin
pinax_pinax-comments
train
7b5e72ebcac87686e12737673a4c0813c75dc487
diff --git a/figment/cli.py b/figment/cli.py index <HASH>..<HASH> 100644 --- a/figment/cli.py +++ b/figment/cli.py @@ -31,13 +31,13 @@ def new(args): def command(args): - zone = Zone.from_config(args.zone, args.config) + zone = Zone.from_config(args.zone, args.world) zone.enqueue_command(args.entity_id, args.command) @keyboard_interactive def prompt(args): - zone = Zone.from_config(args.zone, args.config) + zone = Zone.from_config(args.zone, args.world) command = raw_input('> ') while command and not command == 'quit': zone.enqueue_command(args.entity_id, command) @@ -46,7 +46,7 @@ def prompt(args): @keyboard_interactive def listen(args): - zone = Zone.from_config(args.zone, args.config) + zone = Zone.from_config(args.zone, args.world) for message in zone.listen(args.entity_id): print(message) @@ -57,7 +57,7 @@ def run(args): log.setLevel(logging.DEBUG) try: - zone = Zone.from_config(args.zone, args.config) + zone = Zone.from_config(args.zone, args.world) if args.ticker: zone.start_ticker() @@ -84,8 +84,8 @@ def cli(): help='name of the target zone' ) parser.add_argument( - '-c', '--config', type=str, default='config.json', - help='path to the config file' + '-w', '--world', type=str, default='.', + help='path to the world' ) subparsers = parser.add_subparsers(dest='command') diff --git a/figment/zone.py b/figment/zone.py index <HASH>..<HASH> 100644 --- a/figment/zone.py +++ b/figment/zone.py @@ -20,6 +20,7 @@ def fatal(message): class Zone(object): def __init__(self): self.id = None + self.world_path = '.' self.entities = {} self.ticking_entities = set() self.tick_interval = 1 @@ -27,13 +28,13 @@ class Zone(object): self.redis = None @classmethod - def from_config(cls, id, config_path='config.json'): + def from_config(cls, id, world_path): self = cls() self.id = id - self.working_dir = '.' + self.world_path = world_path - self.load_config(config_path) + self.load_config() return self @@ -49,16 +50,19 @@ class Zone(object): def import_key(self): return 'zone:%s:imports' % self.id - def load_config(self, path): - full_path = os.path.abspath(os.path.expanduser(path)) + def load_config(self): + config_path = os.path.join( + os.path.abspath(os.path.expanduser(self.world_path)), + 'config.json' + ) try: - with open(full_path) as f: + with open(config_path) as f: config = json.loads(f.read()) except EnvironmentError: - fatal("couldn't read configuration file %s" % path) + fatal("couldn't read configuration file %s" % config_path) except ValueError as e: - fatal("error in configuration file: %s" % e.message) + fatal('error in configuration file: %s' % e.message) if not self.id in config['zones']: fatal("undefined zone '%s'" % self.id) @@ -78,7 +82,6 @@ class Zone(object): fatal("unrecognized persistence mode '%s'" % persistence['mode']) self.config = config - self.working_dir = os.path.dirname(full_path) # TODO: Read redis connection params from config self.redis = StrictRedis() @@ -87,10 +90,13 @@ class Zone(object): def snapshot_path(self): snapshot_path = self.config['persistence']['file'] try: - snapshot_path = snapshot_path % self.id + snapshot_path = snapshot_path.format(id=self.id) except TypeError: pass - return os.path.expanduser(snapshot_path) + return os.path.join( + self.world_path, + os.path.expanduser(snapshot_path) + ) def load_snapshot(self): if not os.path.exists(self.snapshot_path): @@ -124,8 +130,7 @@ class Zone(object): os._exit(os.EX_OK) def load_components(self): - # HACK: add basedir of the config file to the import path - sys.path.append(self.working_dir) + sys.path.append(self.world_path) # As a side effect, Component.ALL gets populated with Component subclasses __import__('components') diff --git a/skel/config.json b/skel/config.json index <HASH>..<HASH> 100644 --- a/skel/config.json +++ b/skel/config.json @@ -5,7 +5,7 @@ }, "persistence": { "mode": "snapshot", - "file": "zones/%s.json", + "file": "zones/{id}.json", "frequency": 60 }, "zones": {
Refactor handling of config specification You must now specify the path to the world directory itself rather than the config file within the world directory.
vreon_figment
train
2fb9ce7722dd9c8281ecc8167bdd35ced74ee4d9
diff --git a/src/compiler.js b/src/compiler.js index <HASH>..<HASH> 100644 --- a/src/compiler.js +++ b/src/compiler.js @@ -499,9 +499,9 @@ var Compiler = Object.extend({ // new ones if necessary lib.each(node.targets, function(target) { var name = target.value; - var id = frame.get(name); + var id = frame.lookup(name); - if (id === null) { + if (id == null) { id = this.tmpid(); // Note: This relies on js allowing scope across @@ -520,10 +520,7 @@ var Compiler = Object.extend({ var id = ids[i]; var name = target.value; - this.emitLine('frame.set("' + name + '", ' + id + ');'); - if (frame.get(name) === null) { - frame.set(name, id); - } + this.emitLine('frame.set("' + name + '", ' + id + ');'); // We are running this for every var, but it's very // uncommon to assign to multiple vars anyway
don't optimize variable lookups in (fixes #<I>)
mozilla_nunjucks
train
c394b750b43b433550e760ae194601096cf0c30f
diff --git a/lib/searchfilter.go b/lib/searchfilter.go index <HASH>..<HASH> 100644 --- a/lib/searchfilter.go +++ b/lib/searchfilter.go @@ -143,20 +143,20 @@ func CompoundFilter(fl ...interface{}) *FilterWrap { } type FilterOp struct { - TermsMap map[string][]interface{} `json:"terms,omitempty"` - TermMap map[string]interface{} `json:"term,omitempty"` - RangeMap map[string]RangeFilter `json:"range,omitempty"` - ExistsProp *PropertyPathMarker `json:"exists,omitempty"` - MissingProp *PropertyPathMarker `json:"missing,omitempty"` - AndFilters []FilterOp `json:"and,omitempty"` - OrFilters []FilterOp `json:"or,omitempty"` - NotFilters []FilterOp `json:"not,omitempty"` - Limit *LimitFilter `json:"limit,omitempty"` - Type *TypeFilter `json:"type,omitempty"` - Ids *IdFilter `json:"ids,omitempty"` - Script *ScriptFilter `json:"script,omitempty"` - GeoDist map[string]interface{} `json:"geo_distance,omitempty"` - GeoDistRange map[string]interface{} `json:"geo_distance_range,omitempty"` + TermsMap map[string]interface{} `json:"terms,omitempty"` + TermMap map[string]interface{} `json:"term,omitempty"` + RangeMap map[string]RangeFilter `json:"range,omitempty"` + ExistsProp *PropertyPathMarker `json:"exists,omitempty"` + MissingProp *PropertyPathMarker `json:"missing,omitempty"` + AndFilters []FilterOp `json:"and,omitempty"` + OrFilters []FilterOp `json:"or,omitempty"` + NotFilters []FilterOp `json:"not,omitempty"` + Limit *LimitFilter `json:"limit,omitempty"` + Type *TypeFilter `json:"type,omitempty"` + Ids *IdFilter `json:"ids,omitempty"` + Script *ScriptFilter `json:"script,omitempty"` + GeoDist map[string]interface{} `json:"geo_distance,omitempty"` + GeoDistRange map[string]interface{} `json:"geo_distance_range,omitempty"` } type PropertyPathMarker struct { @@ -266,15 +266,13 @@ func NewGeoField(field string, latitude float32, longitude float32) GeoField { // Note: you can only have one terms clause in a filter. Use a bool filter to combine func (f *FilterOp) Terms(field string, executionMode TermExecutionMode, values ...interface{}) *FilterOp { //You can only have one terms in a filter - f.TermsMap = make(map[string][]interface{}) + f.TermsMap = make(map[string]interface{}) if executionMode != "" { f.TermsMap["execution"] = executionMode } - for _, val := range values { - f.TermsMap[field] = append(f.TermsMap[field], val) - } + f.TermsMap[field] = values return f }
Forgot this file change for the last commit
mattbaird_elastigo
train
926ddff520ce9fe9130e3092a36aefdb042177bf
diff --git a/go/vt/mysqlctl/xtrabackupengine.go b/go/vt/mysqlctl/xtrabackupengine.go index <HASH>..<HASH> 100644 --- a/go/vt/mysqlctl/xtrabackupengine.go +++ b/go/vt/mysqlctl/xtrabackupengine.go @@ -50,7 +50,7 @@ type XtrabackupEngine struct { var ( // path where backup engine program is located - xtrabackupEnginePath = flag.String("xtrabackup_root_path", "", "directory location of the xtrabackup executable, e.g., /usr/bin") + xtrabackupEnginePath = flag.String("xtrabackup_root_path", "", "directory location of the xtrabackup and xbstream executables, e.g., /usr/bin") // flags to pass through to backup phase xtrabackupBackupFlags = flag.String("xtrabackup_backup_flags", "", "flags to pass to backup command. These should be space separated and will be added to the end of the command") // flags to pass through to prepare phase of restore @@ -580,7 +580,7 @@ func (be *XtrabackupEngine) extractFiles(ctx context.Context, logger logutil.Log case xbstream: // now extract the files by running xbstream - xbstreamProgram := xbstream + xbstreamProgram := path.Join(*xtrabackupEnginePath, xbstream) flagsToExec := []string{"-C", tempDir, "-xv"} if *xbstreamRestoreFlags != "" { flagsToExec = append(flagsToExec, strings.Fields(*xbstreamRestoreFlags)...)
use provided xtrabackup_root_path to find xbstream instead of expecting it to be present in PATH
vitessio_vitess
train
25de304485ef4f1964010b46fa0c97192c7cd9bf
diff --git a/pykube/config.py b/pykube/config.py index <HASH>..<HASH> 100644 --- a/pykube/config.py +++ b/pykube/config.py @@ -18,6 +18,46 @@ class KubeConfig(object): """ @classmethod + def from_service_account(cls): + path = "/var/run/secrets/kubernetes.io/serviceaccount" + with open(os.path.join(path, "token")) as fp: + token = fp.read() + doc = { + "clusters": [ + { + "name": "self", + "cluster": { + "server": "https://{}:{}".format( + os.environ["KUBERNETES_SERVICE_HOST"], + os.environ["KUBERNETES_SERVICE_PORT"], + ), + "certificate-authority": os.path.join(path, "ca.crt"), + }, + }, + ], + "users": [ + { + "name": "self", + "user": { + "token": token, + }, + }, + ], + "contexts": [ + { + "name": "self", + "context": { + "cluster": "self", + "user": "self", + }, + } + ], + "current-context": "self", + } + self = cls(doc) + return self + + @classmethod def from_file(cls, filename): """ Creates an instance of the KubeConfig class from a kubeconfig file.
Added KubeConfig.from_service_account
kelproject_pykube
train
ca3f12b9af741e835fc1741c2a9fdafe1a627690
diff --git a/test/extended/router/metrics.go b/test/extended/router/metrics.go index <HASH>..<HASH> 100644 --- a/test/extended/router/metrics.go +++ b/test/extended/router/metrics.go @@ -59,6 +59,10 @@ var _ = g.Describe("[Conformance][networking][router] openshift router metrics", epts, err := oc.AdminKubeClient().CoreV1().Endpoints("default").Get("router", metav1.GetOptions{}) o.Expect(err).NotTo(o.HaveOccurred()) + if len(epts.Subsets) == 0 || len(epts.Subsets[0].Addresses) == 0 { + e2e.Failf("Unable to run HAProxy router tests, the router reports no endpoints: %#v", epts) + return + } host = epts.Subsets[0].Addresses[0].IP ns = oc.KubeFramework().Namespace.Name
Router tests should not panic when router has no endpoints Fail the test instead so we can debug
openshift_origin
train
13a79c76975435cf348a5016eecca1e2339a0121
diff --git a/dipper/models/Dataset.py b/dipper/models/Dataset.py index <HASH>..<HASH> 100644 --- a/dipper/models/Dataset.py +++ b/dipper/models/Dataset.py @@ -253,7 +253,10 @@ class Dataset: def _set_version_level_triples(self): self.model.addType(self.version_level_curie, self.globaltt['Dataset']) self.graph.addTriple(self.version_level_curie, self.globaltt['title'], - self.ingest_title, True) + self.ingest_title + + " Monarch version " + + self.data_release_version, + True) if self.ingest_description is not None: self.model.addDescription(self.version_level_curie, self.ingest_description) @@ -275,7 +278,10 @@ class Dataset: self.globaltt['distribution']) self.graph.addTriple(self.distribution_level_turtle_curie, self.globaltt['title'], - self.ingest_title, True) + self.ingest_title + + " distribution " + + self.distribution_type, + True) if self.ingest_description is not None: self.model.addDescription(self.distribution_level_turtle_curie, self.ingest_description) diff --git a/tests/test_dataset.py b/tests/test_dataset.py index <HASH>..<HASH> 100755 --- a/tests/test_dataset.py +++ b/tests/test_dataset.py @@ -227,8 +227,13 @@ class DatasetTestCase(unittest.TestCase): def test_version_level_title(self): triples = list(self.dataset.graph.triples( - (self.version_level_IRI, self.iri_title, Literal(self.ingest_title)))) + (self.version_level_IRI, self.iri_title, None))) self.assertTrue(len(triples) == 1, "missing version level title triple") + self.assertEqual(triples[0][2], + Literal(self.ingest_title + + " Monarch version " + + self.data_release_version), + "version level title triple has wrong value") def test_version_level_description(self): triples = list(self.dataset.graph.triples( @@ -308,9 +313,15 @@ class DatasetTestCase(unittest.TestCase): def test_distribution_level_title(self): triples = list(self.dataset.graph.triples( - (self.distribution_level_IRI_ttl, self.iri_title, - Literal(self.ingest_title)))) - self.assertTrue(len(triples) == 1, "missing version level type title triple") + (self.distribution_level_IRI_ttl, self.iri_title, None))) + self.assertTrue(len(triples) == 1, + "missing distribution level type title triple") + self.assertEqual(triples[0][2], + Literal(self.ingest_title + + " distribution " + + self.distribution_type), + "distribution level title triple has wrong value") + def test_distribution_level_description(self): triples = list(self.dataset.graph.triples(
More descriptive ingest titles for version and distribution levels in dataset metadata
monarch-initiative_dipper
train
84d8c4086ea15e74b1c917e78a00648661238e0b
diff --git a/mod/forum/rate.php b/mod/forum/rate.php index <HASH>..<HASH> 100644 --- a/mod/forum/rate.php +++ b/mod/forum/rate.php @@ -36,6 +36,7 @@ continue; } + $postid = (int)$postid; $lastpostid = $postid; if ($oldrating = get_record("forum_ratings", "userid", $USER->id, "post", $postid)) {
cleaning of parameter name before use SC#<I>; merged from MOODLE_<I>_STABLE
moodle_moodle
train
45b31c4e9f86def6db9a9b7d0fddb62a27118b60
diff --git a/sample/src/main/java/io/sweers/barber/sample/MainActivity.java b/sample/src/main/java/io/sweers/barber/sample/MainActivity.java index <HASH>..<HASH> 100644 --- a/sample/src/main/java/io/sweers/barber/sample/MainActivity.java +++ b/sample/src/main/java/io/sweers/barber/sample/MainActivity.java @@ -4,13 +4,13 @@ import android.app.AlertDialog; import android.content.Intent; import android.net.Uri; import android.os.Bundle; -import android.support.v7.app.ActionBarActivity; +import android.support.v7.app.AppCompatActivity; import android.text.Html; import android.view.Menu; import android.view.MenuItem; -public class MainActivity extends ActionBarActivity { +public class MainActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) {
Update to new AppCompatActivity
ZacSweers_barber
train
7b4cb20fc47ccb7f87201ed68e837235f613363c
diff --git a/src/com/opencms/file/CmsResourceTypePage.java b/src/com/opencms/file/CmsResourceTypePage.java index <HASH>..<HASH> 100644 --- a/src/com/opencms/file/CmsResourceTypePage.java +++ b/src/com/opencms/file/CmsResourceTypePage.java @@ -1,7 +1,7 @@ /* * File : $Source: /alkacon/cvs/opencms/src/com/opencms/file/Attic/CmsResourceTypePage.java,v $ -* Date : $Date: 2001/08/07 14:03:16 $ -* Version: $Revision: 1.16 $ +* Date : $Date: 2001/08/16 09:49:02 $ +* Version: $Revision: 1.17 $ * * This library is part of OpenCms - * the Open Source Content Mananagement System @@ -45,7 +45,7 @@ import com.opencms.file.genericSql.*; * Access class for resources of the type "Page". * * @author Alexander Lucas - * @version $Revision: 1.16 $ $Date: 2001/08/07 14:03:16 $ + * @version $Revision: 1.17 $ $Date: 2001/08/16 09:49:02 $ */ public class CmsResourceTypePage implements I_CmsResourceType, Serializable, I_CmsConstants, com.opencms.workplace.I_CmsWpConstants { @@ -422,7 +422,11 @@ public class CmsResourceTypePage implements I_CmsResourceType, Serializable, I_C //CmsFile bodyFile = cms.doCreateFile(bodyFolder, name, (C_DEFAULTBODY_START + new String(contents) + C_DEFAULTBODY_END).getBytes(), I_CmsConstants.C_TYPE_BODY_NAME, new Hashtable()); CmsFile bodyFile = cms.doCreateFile(bodyFolder, name, (C_DEFAULTBODY_START + new String(contents) + C_DEFAULTBODY_END).getBytes(), I_CmsConstants.C_TYPE_PLAIN_NAME, new Hashtable()); cms.doLockResource(bodyFolder + name, true); - cms.chmod(bodyFile.getAbsolutePath(), bodyFile.getAccessFlags() + C_ACCESS_INTERNAL_READ); + int flags = bodyFile.getAccessFlags(); + if ((flags & C_ACCESS_INTERNAL_READ) ==0 ) { + flags += C_ACCESS_INTERNAL_READ; + } + cms.chmod(bodyFile.getAbsolutePath(), flags); return file; }
Bugfix: A new created page with default flag internal set results now in an body-file with set internal, too.
alkacon_opencms-core
train