hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
47c5ac12b07e3ff9394fb290de8d4337057e37c8
diff --git a/cumulusci/__init__.py b/cumulusci/__init__.py index <HASH>..<HASH> 100644 --- a/cumulusci/__init__.py +++ b/cumulusci/__init__.py @@ -3,7 +3,7 @@ import sys __import__("pkg_resources").declare_namespace("cumulusci") -__version__ = "3.0.3.dev1" +__version__ = "3.0.3.dev2" __location__ = os.path.dirname(os.path.realpath(__file__)) diff --git a/cumulusci/salesforce_api/metadata.py b/cumulusci/salesforce_api/metadata.py index <HASH>..<HASH> 100644 --- a/cumulusci/salesforce_api/metadata.py +++ b/cumulusci/salesforce_api/metadata.py @@ -33,7 +33,6 @@ from urllib3.contrib import pyopenssl pyopenssl.extract_from_urllib3() retry_policy = Retry(backoff_factor=0.3) -http_adapter = HTTPAdapter(max_retries=retry_policy) class BaseMetadataApiCall(object): @@ -111,6 +110,7 @@ class BaseMetadataApiCall(object): session_id = self.task.org_config.access_token auth_envelope = envelope.replace("###SESSION_ID###", session_id) session = requests.Session() + http_adapter = HTTPAdapter(max_retries=retry_policy) session.mount("https://", http_adapter) response = session.post( self._build_endpoint_url(), diff --git a/setup.cfg b/setup.cfg index <HASH>..<HASH> 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [bumpversion] -current_version = 3.0.3.dev1 +current_version = 3.0.3.dev2 commit = True tag = False diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ with open("requirements_dev.txt") as dev_requirements_file: setup( name="cumulusci", - version="3.0.3.dev1", + version="3.0.3.dev2", description="Build and release tools for Salesforce developers", long_description=readme + u"\n\n" + history, long_description_content_type="text/x-rst",
Try to avoid reusing the connection pool
SFDO-Tooling_CumulusCI
train
9035871108828be6fe2810fa83945f4d9ff7bfbd
diff --git a/packages/ember-handlebars/lib/helpers/view.js b/packages/ember-handlebars/lib/helpers/view.js index <HASH>..<HASH> 100644 --- a/packages/ember-handlebars/lib/helpers/view.js +++ b/packages/ember-handlebars/lib/helpers/view.js @@ -40,7 +40,11 @@ function makeBindings(options) { } } else { if (hashType === 'ID') { - hash[prop + 'Binding'] = view._getBindingForStream(value); + if (prop === 'class') { + hash.classBinding = value; + } else { + hash[prop + 'Binding'] = view._getBindingForStream(value); + } delete hash[prop]; delete hashTypes[prop]; }
Ensure that {{view class=someProp}} is not streamified early.
emberjs_ember.js
train
ae0e21dae2acb9aef905d8134dbff53387d13582
diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -271,4 +271,5 @@ texinfo_documents = [ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} -sphinxgallery_conf = {'doc_module' : 'numpy'} +sphinxgallery_conf = {'doc_module' : ('sphinxgallery', 'numpy'), + 'reference_url': {'sphinxgallery' : ''}} diff --git a/doc/index.rst b/doc/index.rst index <HASH>..<HASH> 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -11,13 +11,18 @@ It is extracted from the scikit-learn project and aims to be an independent general purpose extension. Contents: +--------- .. toctree:: :maxdepth: 2 getting_started advanced_configuration - auto_examples/index + reference + +Sphinx-Gallery Show: :ref:`examples-index` +'''''''''''''''''''''''''''''''''''''''''' + Indices and tables diff --git a/sphinxgallery/docs_resolv.py b/sphinxgallery/docs_resolv.py index <HASH>..<HASH> 100644 --- a/sphinxgallery/docs_resolv.py +++ b/sphinxgallery/docs_resolv.py @@ -324,12 +324,16 @@ def embed_code_links(app, exception): gallery_conf = app.config.sphinxgallery_conf # Add resolvers for the packages for which we want to show links doc_resolvers = {} - doc_resolvers[gallery_conf['doc_module']] = SphinxDocLinkResolver(app.builder.outdir, - relative=True) - for this_module, url in gallery_conf['resolver_urls'].items(): + for this_module, url in gallery_conf['reference_url'].items(): try: - doc_resolvers[this_module] = SphinxDocLinkResolver(url) + if url == '': + doc_resolvers[this_module] = SphinxDocLinkResolver( + app.builder.outdir, + relative=True) + else: + doc_resolvers[this_module] = SphinxDocLinkResolver(url) + except HTTPError as e: print("The following HTTP Error has occurred:\n") print(e.code) diff --git a/sphinxgallery/gen_gallery.py b/sphinxgallery/gen_gallery.py index <HASH>..<HASH> 100644 --- a/sphinxgallery/gen_gallery.py +++ b/sphinxgallery/gen_gallery.py @@ -19,7 +19,9 @@ def generate_gallery_rst(app): if not plot_gallery: return - gallery_conf.update(app.config.sphinxgallery_conf) + tmp_conf = app.config.sphinxgallery_conf + gallery_conf['reference_url'].update(tmp_conf.pop('reference_url')) + gallery_conf.update(tmp_conf) # this assures I can call the config in other places app.config.sphinxgallery_conf = gallery_conf @@ -38,8 +40,8 @@ def generate_gallery_rst(app): .. _examples-index: -Examples -======== +Gallery of Examples +=================== """) # Here we don't use an os.walk, but we recurse only twice: flat is @@ -56,8 +58,8 @@ gallery_conf = { 'root_dir' : '../examples', 'examples_gallery' : 'auto_examples', 'mod_generated' : 'modules/generated', - 'doc_module' : 'sphinxgallery', - 'resolver_urls' : { + 'doc_module' : (), + 'reference_url' : { 'matplotlib': 'http://matplotlib.org', 'numpy': 'http://docs.scipy.org/doc/numpy-1.9.1', 'scipy': 'http://docs.scipy.org/doc/scipy-0.15.1/reference'}
Configuration setup for documenting modules and references to external docs as backrefences from local modules used in examples
sphinx-gallery_sphinx-gallery
train
089a945bbad94b3c720f312c04a9cab41d8dbaf1
diff --git a/lib/nuts.js b/lib/nuts.js index <HASH>..<HASH> 100644 --- a/lib/nuts.js +++ b/lib/nuts.js @@ -1,6 +1,6 @@ var _ = require('lodash'); var Q = require('q'); -var url = require('url'); +var urljoin = require('urljoin.js'); var Understudy = require('understudy'); var express = require('express'); var useragent = require('express-useragent'); @@ -230,7 +230,7 @@ Nuts.prototype.onUpdate = function(req, res, next) { var releaseNotes = notes.merge(versions.slice(0, -1), { includeTag: false }); res.status(200).send({ - "url": url.resolve(fullUrl, "/download/version/"+latest.tag+"/"+platform+"?filetype="+filetype), + "url": urljoin(fullUrl, "/../../../", "/download/version/"+latest.tag+"/"+platform+"?filetype="+filetype), "name": latest.tag, "notes": releaseNotes, "pub_date": latest.published_at.toISOString() @@ -278,7 +278,7 @@ Nuts.prototype.onUpdateWin = function(req, res, next) { // Change filename to use download proxy .map(function(entry) { - entry.filename = url.resolve(fullUrl, '/download/'+entry.semver+'/'+entry.filename); + entry.filename = urljoin(fullUrl, "/../../../../", '/download/'+entry.semver+'/'+entry.filename); return entry; })
Use urljoin instead of url.resolve to build correct download URLs Fixes #<I>
GitbookIO_nuts
train
18ead67603497226867190ad0dc5dcf256454824
diff --git a/phypno/widgets/channels.py b/phypno/widgets/channels.py index <HASH>..<HASH> 100644 --- a/phypno/widgets/channels.py +++ b/phypno/widgets/channels.py @@ -79,10 +79,14 @@ class Channels(QGroupBox): self.current = self.list_grp.currentText() self.l0 = QListWidget() + self.create_list(self.l0) self.l1 = QListWidget() + self.create_list(self.l1) rerefButton = QPushButton('Average Ref') rerefButton.clicked.connect(self.average_reference) + rerefButton.setToolTip('Use the average of all the channels being ' + + 'plotted as reference.') self.hpEdit = QLineEdit('None') self.lpEdit = QLineEdit('None') @@ -119,28 +123,22 @@ class Channels(QGroupBox): self.update_list_grp() def update_list_grp(self): - """Update the list containing the channels. - - TODO: this should probably update the filter settings. - - """ + """Update the list containing the channels.""" current = self.list_grp.currentText() idx = [x['name'] for x in self.groups].index(current) - self.create_list(self.l0, self.groups[idx]['chan_to_plot']) - self.create_list(self.l1, self.groups[idx]['ref_chan']) + self.highlight_list(self.l0, self.groups[idx]['chan_to_plot']) + self.highlight_list(self.l1, self.groups[idx]['ref_chan']) self.hpEdit.setText(str(self.groups[idx]['filter']['low_cut'])) self.lpEdit.setText(str(self.groups[idx]['filter']['high_cut'])) self.current = current # update index - def create_list(self, l, selected_chan): + def create_list(self, l): """Create list of channels (one for those to plot, one for ref). Parameters ---------- l : instance of QListWidget one of the two lists (chan_to_plot or ref_chan) - selected_chan : list of str - channels to indicate as selected. """ ExtendedSelection = QAbstractItemView.SelectionMode(3) @@ -148,11 +146,31 @@ class Channels(QGroupBox): for chan in self.chan_name: item = QListWidgetItem(chan) l.addItem(item) - if chan in selected_chan: + + def highlight_list(self, l, selected_chan): + """Highlight channels in the list of channels. + + Parameters + ---------- + selected_chan : list of str + channels to indicate as selected. + + """ + for row in range(l.count()): + item = l.item(row) + if item.text() in selected_chan: item.setSelected(True) else: item.setSelected(False) + def average_reference(self): + """Select in the reference all the channels in the main selection.""" + selectedItems = self.l0.selectedItems() + chan_to_plot = [] + for selected in selectedItems: + chan_to_plot.append(selected.text()) + self.highlight_list(self.l1, chan_to_plot) + def update_chan_grp(self): """Read the GUI and update the channel groups.""" selectedItems = self.l0.selectedItems() @@ -249,7 +267,3 @@ class Channels(QGroupBox): self.list_grp.removeItem(idx) self.groups.pop(idx) self.update_list_grp() - - def average_reference(self): - """Select in the reference all the channels in the main selection.""" - pass
reorganize even more cleanly the channels, and create a button to make average reference
wonambi-python_wonambi
train
42ae3f3215e504126c049b500e5b361653798bae
diff --git a/classification/classify_sensor_data.py b/classification/classify_sensor_data.py index <HASH>..<HASH> 100644 --- a/classification/classify_sensor_data.py +++ b/classification/classify_sensor_data.py @@ -54,13 +54,17 @@ _CATEGORY_ENCODER_PARAMS = { "categoryList": range(NUM_CATEGORIES) } -_SEQ_CLASSIFIER_PARAMS = {"implementation": "py", - "clVerbosity": _VERBOSITY} +_SEQ_CLASSIFIER_PARAMS = { + "implementation": "py", + "clVerbosity": _VERBOSITY +} -_CLA_CLASSIFIER_PARAMS = {"steps": "0,1", - "implementation": "py", - "numCategories": NUM_CATEGORIES, - "clVerbosity": _VERBOSITY} +_CLA_CLASSIFIER_PARAMS = { + "steps": "0,1", + "implementation": "py", + "numCategories": NUM_CATEGORIES, + "clVerbosity": _VERBOSITY +} _KNN_CLASSIFIER_PARAMS = { "k": 1, @@ -69,7 +73,6 @@ _KNN_CLASSIFIER_PARAMS = { } - def run(net, numRecords, partitions, outFile): """ Run the network and write classification results output. @@ -84,7 +87,8 @@ def run(net, numRecords, partitions, outFile): temporalMemoryRegion = net.regions["TM"] classifierRegion = net.regions["classifier"] - phaseInfo = "-> Training SP. Index=0. LEARNING: SP is ON | TM is OFF | Classifier is OFF \n" + phaseInfo = ("-> Training SP. Index=0. LEARNING: SP is ON | TM is OFF | " + "Classifier is OFF \n") outFile.write(phaseInfo) print phaseInfo @@ -102,19 +106,23 @@ def run(net, numRecords, partitions, outFile): # by the FileRecordStream instance. actualValue = sensorRegion.getOutputData("categoryOut")[0] - outFile.write("=> INDEX=%s | actualValue=%s | anomalyScore=%s \n" % (i, actualValue, anomalyScore)) + outFile.write("=> INDEX=%s | actualValue=%s | anomalyScore=%s \n" % ( + i, actualValue, anomalyScore)) # SP has been trained. Now start training the TM too. if i == partitions[0]: temporalMemoryRegion.setParameter("learningMode", True) - phaseInfo = "-> Training TM. Index=%s. LEARNING: SP is ON | TM is ON | Classifier is OFF \n" % i + phaseInfo = ( + "-> Training TM. Index=%s. LEARNING: SP is ON | TM is ON | Classifier " + "is OFF \n" % i) outFile.write(phaseInfo) print phaseInfo # Start training the classifier as well. elif i == partitions[1]: classifierRegion.setParameter("learningMode", True) - phaseInfo = "-> Training Classifier. Index=%s. LEARNING: SP is OFF | TM is ON | Classifier is ON \n" % i + phaseInfo = ("-> Training Classifier. Index=%s. LEARNING: SP is OFF | " + "TM is ON | Classifier is ON \n" % i) outFile.write(phaseInfo) print phaseInfo @@ -123,14 +131,15 @@ def run(net, numRecords, partitions, outFile): spatialPoolerRegion.setParameter("learningMode", False) temporalMemoryRegion.setParameter("learningMode", False) classifierRegion.setParameter("learningMode", False) - phaseInfo = "-> Test. Index=%s. LEARNING: SP is OFF | TM is OFF | Classifier is OFF \n" % i + phaseInfo = ("-> Test. Index=%s. LEARNING: SP is OFF | TM is OFF | " + "Classifier is OFF \n" % i) outFile.write(phaseInfo) print phaseInfo # Evaluate the predictions on the test set. if i >= partitions[2]: - inferredValue = classifierRegion.getOutputData("classificationResults")[0] + inferredValue = classifierRegion.getOutputData("categoriesOut")[0] outFile.write(" inferredValue=%s \n" % inferredValue) if actualValue == inferredValue: @@ -140,8 +149,10 @@ def run(net, numRecords, partitions, outFile): predictionAccuracy = 100.0 * numCorrect / numTestRecords - results = "RESULTS: accuracy=%s | %s correctly predicted records out of %s test records \n" % ( - predictionAccuracy, numCorrect, numTestRecords) + results = ("RESULTS: accuracy=%s | %s correctly predicted records out of %s " + "test records \n" % (predictionAccuracy, + numCorrect, + numTestRecords)) outFile.write(results) print results @@ -170,7 +181,7 @@ if __name__ == "__main__": SIGNAL_AMPLITUDE, SIGNAL_MEAN, SIGNAL_PERIOD) - + outFile.write(expParams) print expParams @@ -191,8 +202,8 @@ if __name__ == "__main__": "py.RecordSensor", encoders, NUM_CATEGORIES, - "py.SequenceClassifierRegion", - _SEQ_CLASSIFIER_PARAMS) + "py.CLAClassifierRegion", + _CLA_CLASSIFIER_PARAMS) # Need to init the network before it can run. network.initialize()
fixed lines over <I> chars
numenta_htmresearch
train
939d8aa4fbb0f2b5e5e9c0e92c23f82eb1833d03
diff --git a/lib/node_modules/@stdlib/assert/is-native-function/benchmark/benchmark.js b/lib/node_modules/@stdlib/assert/is-native-function/benchmark/benchmark.js index <HASH>..<HASH> 100644 --- a/lib/node_modules/@stdlib/assert/is-native-function/benchmark/benchmark.js +++ b/lib/node_modules/@stdlib/assert/is-native-function/benchmark/benchmark.js @@ -48,7 +48,7 @@ bench( pkg, function benchmark( b ) { new RegExp( '.*' ), new Date(), function noop() {}, - Math.sqrt, + Math.sqrt, // eslint-disable-line stdlib/no-builtin-math Date, RegExp ]; diff --git a/lib/node_modules/@stdlib/assert/is-symbol/benchmark/benchmark.js b/lib/node_modules/@stdlib/assert/is-symbol/benchmark/benchmark.js index <HASH>..<HASH> 100644 --- a/lib/node_modules/@stdlib/assert/is-symbol/benchmark/benchmark.js +++ b/lib/node_modules/@stdlib/assert/is-symbol/benchmark/benchmark.js @@ -382,4 +382,3 @@ bench( pkg+'::objects,symbols:isObject', opts, function benchmark( b ) { } b.end(); }); - diff --git a/lib/node_modules/@stdlib/blas/base/daxpy/benchmark/benchmark.memory_reuse2.js b/lib/node_modules/@stdlib/blas/base/daxpy/benchmark/benchmark.memory_reuse2.js index <HASH>..<HASH> 100644 --- a/lib/node_modules/@stdlib/blas/base/daxpy/benchmark/benchmark.memory_reuse2.js +++ b/lib/node_modules/@stdlib/blas/base/daxpy/benchmark/benchmark.memory_reuse2.js @@ -286,7 +286,6 @@ function main() { f = createBenchmark2( x, y ); bench( pkg+'::memory-reuse,shared-array,native:len='+len, opts, f ); - f = createBenchmark3( x, y ); bench( pkg+'::memory-reuse,shared-array,wasm,set-value:len='+len, f );
Remove empty lines and disable lint rule
stdlib-js_stdlib
train
f76dccb4b1a5e5ac922f8a2daa0d1d7304d15b4a
diff --git a/tensorboard/plugins/hparams/hparams_plugin.py b/tensorboard/plugins/hparams/hparams_plugin.py index <HASH>..<HASH> 100644 --- a/tensorboard/plugins/hparams/hparams_plugin.py +++ b/tensorboard/plugins/hparams/hparams_plugin.py @@ -66,6 +66,9 @@ class HParamsPlugin(base_plugin.TBPlugin): @wrappers.Request.application def get_experiment_route(self, request): try: + if not self.is_active(): + raise error.HParamsError("HParams plugin is not active.") + return http_util.Respond(request, json_format.MessageToJson( self._context.experiment()), @@ -77,6 +80,8 @@ class HParamsPlugin(base_plugin.TBPlugin): @wrappers.Request.application def list_session_groups_route(self, request): try: + if not self.is_active(): + raise error.HParamsError("HParams plugin is not active.") # args.get() returns the request unquoted. request_proto = request.args.get('request') if request_proto is None: diff --git a/tensorboard/plugins/hparams/list_session_groups.py b/tensorboard/plugins/hparams/list_session_groups.py index <HASH>..<HASH> 100644 --- a/tensorboard/plugins/hparams/list_session_groups.py +++ b/tensorboard/plugins/hparams/list_session_groups.py @@ -376,6 +376,10 @@ def _list_value_to_python_list(list_value): return [_value_to_python(value) for value in list_value.values] +# WARNING: This class uses python PCRE-compatible regex which have exponential- +# time inputs, which is a security vulnerability (an attacker can make the +# server use a large amount of CPU). +# TODO(erez): Replace the regexp routines with a polynomial implementation. class _SessionGroupRegexFilter(_SessionGroupFilter): def __init__(self, regex, extractor, include_missing_values): super(_SessionGroupRegexFilter, self).__init__(extractor,
Disable endpoints when hparams is not active
tensorflow_tensorboard
train
e46ef22092886e78b1a6cc9781b77f191f7c6a38
diff --git a/fleetspeak/src/e2etesting/lib/setup_components.go b/fleetspeak/src/e2etesting/lib/setup_components.go index <HASH>..<HASH> 100644 --- a/fleetspeak/src/e2etesting/lib/setup_components.go +++ b/fleetspeak/src/e2etesting/lib/setup_components.go @@ -264,6 +264,7 @@ func (cc *ComponentCmds) start(tempPath string, fsFrontendPort, fsAdminPort, msP cc.ServiceCmd = exec.Command( "python", "frr_python/frr_server.py", + fmt.Sprintf("--master_server_address=localhost:%v", msPort), fmt.Sprintf("--fleetspeak_message_listen_address=localhost:%v", fsFrontendPort), fmt.Sprintf("--fleetspeak_server=localhost:%v", fsAdminPort)) startCommand(cc.ServiceCmd) diff --git a/frr_python/frr_server.py b/frr_python/frr_server.py index <HASH>..<HASH> 100644 --- a/frr_python/frr_server.py +++ b/frr_python/frr_server.py @@ -8,6 +8,7 @@ import time import grpc from absl import app +from absl import flags from fleetspeak.server_connector.connector import InsecureGRPCServiceClient # TODO(Alexandr-TS): Add setup.py to compile fleetspeak_frr protos. @@ -16,35 +17,45 @@ from fleetspeak.src.inttesting.frr.proto.fleetspeak_frr.frr_pb2 import MessageIn from fleetspeak.src.inttesting.frr.proto.fleetspeak_frr.frr_pb2_grpc import MasterStub -# TODO(Alexandr-TS): Make master server port passed in a flag. -channel = grpc.insecure_channel('localhost:6059') -stub = MasterStub(channel) +FLAGS = flags.FLAGS -def Listener(message, context): - """Receives a message from a client, prints it and forwards to master server.""" +flags.DEFINE_string( + name="master_server_address", + default="localhost:6059", + help="Address of master server to forward clients' messages") - del context # Unused - if message.message_type != "TrafficResponse": - logging.info(f"Unknown message type: {message.message_type}") - return +class Listener: + """Connects to master server and processes messages from clients""" - response_data = TrafficResponseData() - message.data.Unpack(response_data) - logging.info( - f"RESPONSE - master_id: {response_data.master_id}, " - f"request_id: {response_data.request_id}, " - f"response_index: {response_data.response_index}, " - f"text: {response_data.data}") + def __init__(self): + channel = grpc.insecure_channel(FLAGS.master_server_address) + self.stub = MasterStub(channel) - stub.RecordTrafficResponse(MessageInfo(client_id=message.source.client_id, data=response_data)) + def __call__(self, message, context): + del context # Unused + + if message.message_type != "TrafficResponse": + logging.info(f"Unknown message type: {message.message_type}") + return + + response_data = TrafficResponseData() + message.data.Unpack(response_data) + logging.info( + f"RESPONSE - master_id: {response_data.master_id}, " + f"request_id: {response_data.request_id}, " + f"response_index: {response_data.response_index}, " + f"text: {response_data.data}") + + self.stub.RecordTrafficResponse( + MessageInfo(client_id=message.source.client_id, data=response_data)) def main(argv=None): del argv # Unused. service_client = InsecureGRPCServiceClient("FRR") - service_client.Listen(Listener) + service_client.Listen(Listener()) while True: time.sleep(1)
Add flag for master server address in frr_server.py (#<I>) * Add flag for master server address in frr_server.py * Add class for frr_server Listener
google_fleetspeak
train
25860d20662f62852f3a547fc63f482734d79ec5
diff --git a/cathub/reaction_networks.py b/cathub/reaction_networks.py index <HASH>..<HASH> 100644 --- a/cathub/reaction_networks.py +++ b/cathub/reaction_networks.py @@ -363,12 +363,12 @@ def proton_hydroxide_free_energy(temperature, pressure, pH): """ H2 = GasMolecule('H2') H2O = GasMolecule('H2O') - G_H2 = H2.get_free_energy(temperature = temperature, pressure = pressure) - G_H2O = H2O.get_free_energy(temperature = temperature) + G_H2 = H2.get_free_energy(temperature=temperature, pressure=pressure) + G_H2O = H2O.get_free_energy(temperature=temperature) G_H = (0.5*G_H2) - ((R*temperature)/(z*F))*ln10*pH G_OH = G_H2O - G_H # Do not need Kw when water equilibrated - return G_H, G_OH + return G_H, G_OH, G_H2O def get_FEC(molecule_list,
Implemented submodule for the estimation of proton-transfer activation energies and charge transfers based on diabatic state interceptions.
SUNCAT-Center_CatHub
train
f77c0b17cee18b3c145ae4ee4635703989cd7439
diff --git a/lib/brainstem/presenter_collection.rb b/lib/brainstem/presenter_collection.rb index <HASH>..<HASH> 100644 --- a/lib/brainstem/presenter_collection.rb +++ b/lib/brainstem/presenter_collection.rb @@ -225,21 +225,24 @@ module Brainstem end def run_filters(scope, options) - extract_filters(options).each do |filter_name, arg| + extracted_filters = extract_filters(options, param_opts: true) + extracted_filters.each do |filter_name, filter_opts| + arg = filter_opts[0] + include_params = filter_opts[1] next if arg.nil? filter_lambda = options[:presenter].filters[filter_name][1] if filter_lambda - scope = filter_lambda.call(scope, arg) + scope = include_params ? filter_lambda.call(scope, arg, extracted_filters) : filter_lambda.call(scope, arg) else - scope = scope.send(filter_name, arg) + scope = include_params ? scope.send(filter_name, arg, extracted_filters) : scope.send(filter_name, arg) end end scope end - def extract_filters(options) + def extract_filters(options, param_opts=false) filters_hash = {} run_defaults = options.fetch(:apply_default_filters) { true } @@ -250,7 +253,12 @@ module Brainstem filter_options = filter[0] args = run_defaults && requested.nil? ? filter_options[:default] : requested - filters_hash[filter_name] = args unless args.nil? + include_params = filter_options[:include_params] + if param_opts + filters_hash[filter_name] = [args, include_params] + else + filters_hash[filter_name] = args unless args.nil? + end end filters_hash diff --git a/spec/brainstem/presenter_collection_spec.rb b/spec/brainstem/presenter_collection_spec.rb index <HASH>..<HASH> 100644 --- a/spec/brainstem/presenter_collection_spec.rb +++ b/spec/brainstem/presenter_collection_spec.rb @@ -502,6 +502,19 @@ describe Brainstem::PresenterCollection do expect(result[:workspaces].keys).to eq(%w[2 4]) end end + + context "with include_params" do + it "passes the params into the filter block" do + WorkspacePresenter.filter :filter_with_param, :include_params => true do |scope, option, params| + expect(params["owned_by"]).to be_present + expect(params["title"]).to be_present + expect(params["filter_with_param"]).to be_present + scope + end + + @presenter_collection.presenting("workspaces", :params => { :filter_with_param => "1" }) { Workspace.where(nil) } + end + end end describe "search" do
Add the option for filters to be passed the params The use case for this is if you have a filter that might conditionally change depending on other filters you are also using. Usage: filter :cool_filter, :include_params => true do |scope, opts, params| # ... end
mavenlink_brainstem
train
8f46cf0ce86021bbd8aa6ac3785a3c24fc4be206
diff --git a/httpx/middleware/header.go b/httpx/middleware/header.go index <HASH>..<HASH> 100644 --- a/httpx/middleware/header.go +++ b/httpx/middleware/header.go @@ -28,6 +28,8 @@ func (h *Header) ServeHTTPContext(ctx context.Context, w http.ResponseWriter, r value := e(r) ctx = httpx.WithHeader(ctx, h.key, value) + r = r.WithContext(ctx) + return h.handler.ServeHTTPContext(ctx, w, r) } diff --git a/httpx/middleware/header_test.go b/httpx/middleware/header_test.go index <HASH>..<HASH> 100644 --- a/httpx/middleware/header_test.go +++ b/httpx/middleware/header_test.go @@ -6,6 +6,7 @@ import ( "testing" "context" + "github.com/remind101/pkg/httpx" ) @@ -24,7 +25,11 @@ func TestHeader(t *testing.T) { m := ExtractHeader( httpx.HandlerFunc(func(ctx context.Context, w http.ResponseWriter, r *http.Request) error { data := httpx.Header(ctx, tt.key) + if got, want := data, tt.val; got != want { + t.Fatalf("%s => %s; want %s", tt.key, got, want) + } + data = httpx.Header(r.Context(), tt.key) if got, want := data, tt.val; got != want { t.Fatalf("%s => %s; want %s", tt.key, got, want) } diff --git a/httpx/middleware/logger.go b/httpx/middleware/logger.go index <HASH>..<HASH> 100644 --- a/httpx/middleware/logger.go +++ b/httpx/middleware/logger.go @@ -43,7 +43,10 @@ func LogTo(h httpx.Handler, g loggerGenerator) httpx.Handler { func InsertLogger(h httpx.Handler, g loggerGenerator) httpx.Handler { return httpx.HandlerFunc(func(ctx context.Context, w http.ResponseWriter, r *http.Request) error { l := g(ctx, r) + ctx = logger.WithLogger(ctx, l) + r = r.WithContext(ctx) + return h.ServeHTTPContext(ctx, w, r) }) } diff --git a/httpx/middleware/opentracing.go b/httpx/middleware/opentracing.go index <HASH>..<HASH> 100644 --- a/httpx/middleware/opentracing.go +++ b/httpx/middleware/opentracing.go @@ -45,6 +45,7 @@ func (h *OpentracingTracer) ServeHTTPContext(ctx context.Context, w http.Respons defer span.Finish() ctx = opentracing.ContextWithSpan(ctx, span) + r = r.WithContext(ctx) rw := NewResponseWriter(w) reqErr := h.handler.ServeHTTPContext(ctx, rw, r) diff --git a/httpx/middleware/reporter.go b/httpx/middleware/reporter.go index <HASH>..<HASH> 100644 --- a/httpx/middleware/reporter.go +++ b/httpx/middleware/reporter.go @@ -26,6 +26,8 @@ func (m *Reporter) ServeHTTPContext(ctx context.Context, w http.ResponseWriter, // Add the request id to reporter context. ctx = errors.WithInfo(ctx, "request_id", httpx.RequestID(ctx)) + r = r.WithContext(ctx) + return m.handler.ServeHTTPContext(ctx, w, r) } diff --git a/httpx/middleware/request_id.go b/httpx/middleware/request_id.go index <HASH>..<HASH> 100644 --- a/httpx/middleware/request_id.go +++ b/httpx/middleware/request_id.go @@ -4,6 +4,7 @@ import ( "net/http" "context" + "github.com/remind101/pkg/httpx" ) @@ -39,5 +40,7 @@ func (h *RequestID) ServeHTTPContext(ctx context.Context, w http.ResponseWriter, requestID := e(r) ctx = httpx.WithRequestID(ctx, requestID) + r = r.WithContext(ctx) + return h.handler.ServeHTTPContext(ctx, w, r) } diff --git a/httpx/middleware/request_id_test.go b/httpx/middleware/request_id_test.go index <HASH>..<HASH> 100644 --- a/httpx/middleware/request_id_test.go +++ b/httpx/middleware/request_id_test.go @@ -5,8 +5,9 @@ import ( "net/http/httptest" "testing" - "github.com/remind101/pkg/httpx" "context" + + "github.com/remind101/pkg/httpx" ) func TestRequestID(t *testing.T) { @@ -23,7 +24,12 @@ func TestRequestID(t *testing.T) { m := &RequestID{ handler: httpx.HandlerFunc(func(ctx context.Context, w http.ResponseWriter, r *http.Request) error { requestID := httpx.RequestID(ctx) + if got, want := requestID, tt.id; got != want { + t.Fatalf("RequestID => %s; want %s", got, want) + } + // From request.Context() + requestID = httpx.RequestID(r.Context()) if got, want := requestID, tt.id; got != want { t.Fatalf("RequestID => %s; want %s", got, want) }
Ensure context arg is the same as request Context
remind101_pkg
train
de247283b95ab092bbf5f6271b0d98d1bc44f9c7
diff --git a/bin/nnsyslog.js b/bin/nnsyslog.js index <HASH>..<HASH> 100644 --- a/bin/nnsyslog.js +++ b/bin/nnsyslog.js @@ -31,12 +31,13 @@ libSyslog = require('../lib/syslog'), optParser, opt, syslog, config = {}; +console.log('node version: %s', process.version); /** * Uncaught exception */ process.on('uncaughtException', function (exception) { - if (exception.code === "EACCES") { + if (exception.code === "EACCES" || exception.code === "EACCESS") { // node V4/V5 compat process.exit(1); } console.error('Process uncaught exception: ', exception.message); diff --git a/lib/syslog.js b/lib/syslog.js index <HASH>..<HASH> 100644 --- a/lib/syslog.js +++ b/lib/syslog.js @@ -86,7 +86,7 @@ Syslog.prototype.start = function() { try { this._server.bind(this.port); } catch(err) { - if (err.code === "EACCES") { + if (err.code === "EACCES" || err.code === "EACCESS") { // node V4/V5 compat console.log("Not enought privilege to listen on port %d (run as root?)", this.port); throw err; }
Compat node V5 EACCESS/EACCES
sdolard_node-netasqsyslog
train
ec121562760c6308ca67cf84a3cb1490878cdad5
diff --git a/django_any/fields.py b/django_any/fields.py index <HASH>..<HASH> 100644 --- a/django_any/fields.py +++ b/django_any/fields.py @@ -87,32 +87,48 @@ def any_field(field, **kwargs): return xunit.any_int(min_value=1, max_value=9999) -@multimethod(models.DecimalField) +@multimethod(models.CharField) def any_field(field, **kwargs): """ - Decimal value + Return random value for CharField - >>> result = any_field(models.DecimalField(max_digits=5, decimal_places=2)) + >>> result = any_field(models.CharField(max_length=10)) >>> type(result) - <class 'decimal.Decimal'> + <type 'str'> """ - min_value = 0 - max_value = Decimal('%s.%s' % ('9'*(field.max_digits-field.decimal_places), - '9'*field.decimal_places)) - return xunit.any_decimal(min_value=min_value, max_value=max_value, - decimal_places = field.decimal_places) + return xunit.any_string(min_length=1, max_length=field.max_length) -@multimethod(models.CharField) +@multimethod(models.CommaSeparatedIntegerField) def any_field(field, **kwargs): """ Return random value for CharField - >>> result = any_field(models.CharField(max_length=10)) + >>> result = any_field(models.CommaSeparatedIntegerField(max_length=10)) >>> type(result) <type 'str'> + >>> [int(num) for num in result.split(',')] and 'OK' + 'OK' """ - return xunit.any_string(min_length=1, max_length=field.max_length) + nums_count = field.max_length/2 + nums = [str(xunit.any_int(min_value=0, max_value=9)) for _ in xrange(0, nums_count)] + return ",".join(nums) + + +@multimethod(models.DecimalField) +def any_field(field, **kwargs): + """ + Decimal value + + >>> result = any_field(models.DecimalField(max_digits=5, decimal_places=2)) + >>> type(result) + <class 'decimal.Decimal'> + """ + min_value = 0 + max_value = Decimal('%s.%s' % ('9'*(field.max_digits-field.decimal_places), + '9'*field.decimal_places)) + return xunit.any_decimal(min_value=min_value, max_value=max_value, + decimal_places = field.decimal_places) @multimethod(models.DateField)
Add comma separated integer field support
kmmbvnr_django-any
train
fdc07e8ea7df60cd350a647af1acc053e7bbfdef
diff --git a/elytron/src/main/java/org/wildfly/extension/elytron/CredentialStoreAliasDefinition.java b/elytron/src/main/java/org/wildfly/extension/elytron/CredentialStoreAliasDefinition.java index <HASH>..<HASH> 100644 --- a/elytron/src/main/java/org/wildfly/extension/elytron/CredentialStoreAliasDefinition.java +++ b/elytron/src/main/java/org/wildfly/extension/elytron/CredentialStoreAliasDefinition.java @@ -131,7 +131,14 @@ class CredentialStoreAliasDefinition extends SimpleResourceDefinition { private static void storeSecret(CredentialStore credentialStore, String alias, String secretValue) throws CredentialStoreException { char[] secret = secretValue != null ? secretValue.toCharArray() : new char[0]; credentialStore.store(alias, createCredentialFromPassword(secret)); - credentialStore.flush(); + try { + credentialStore.flush(); + } catch (CredentialStoreException e) { + // operation fails, remove the entry from the store, to avoid an inconsistency between + // the store on the FS and in the memory + credentialStore.remove(alias, PasswordCredential.class); + throw e; + } } private static class AddHandler extends BaseAddHandler { @@ -189,8 +196,17 @@ class CredentialStoreAliasDefinition extends SimpleResourceDefinition { protected void performRuntime(ModelNode result, OperationContext context, ModelNode operation, CredentialStoreService credentialStoreService) throws OperationFailedException { try { CredentialStore credentialStore = credentialStoreService.getValue(); - credentialStore.remove(context.getCurrentAddressValue(), PasswordCredential.class); - credentialStore.flush(); + String currentAddress = context.getCurrentAddressValue(); + PasswordCredential retrieved = credentialStore.retrieve(currentAddress, PasswordCredential.class); + credentialStore.remove(currentAddress, PasswordCredential.class); + try { + credentialStore.flush(); + } catch (CredentialStoreException e) { + // the operation fails, return removed entry back to the store to avoid an inconsistency + // between the store on the FS and in the memory + credentialStore.store(currentAddress, retrieved); + throw e; + } } catch (CredentialStoreException e) { throw new OperationFailedException(e); }
WFCORE-<I> Credential store flush failure creates inconsistency... ... between the the version on the filesystem and in the memory
wildfly_wildfly-core
train
ee690118b958c7ece4bf850293b0d4a658984d17
diff --git a/builtin/credential/ldap/path_config.go b/builtin/credential/ldap/path_config.go index <HASH>..<HASH> 100644 --- a/builtin/credential/ldap/path_config.go +++ b/builtin/credential/ldap/path_config.go @@ -1,6 +1,7 @@ package ldap import ( + "crypto/tls" "fmt" "net" "net/url" @@ -31,6 +32,10 @@ func pathConfig(b *backend) *framework.Path { Type: framework.TypeString, Description: "Attribute used for users (default: cn)", }, + "sslverify": &framework.FieldSchema{ + Type: framework.TypeBool, + Description: "Verify LDAP server SSL Certificate?", + }, }, Callbacks: map[logical.Operation]framework.OperationFunc{ @@ -72,10 +77,11 @@ func (b *backend) pathConfigRead( return &logical.Response{ Data: map[string]interface{}{ - "url": cfg.Url, - "userdn": cfg.UserDN, - "groupdn": cfg.GroupDN, - "userattr": cfg.UserAttr, + "url": cfg.Url, + "userdn": cfg.UserDN, + "groupdn": cfg.GroupDN, + "userattr": cfg.UserAttr, + "sslverify": cfg.SSLVerify, }, }, nil } @@ -100,6 +106,12 @@ func (b *backend) pathConfigWrite( if groupdn != "" { cfg.GroupDN = groupdn } + sslverify := d.Get("sslverify").(bool) + if sslverify { + cfg.SSLVerify = sslverify + } else { + cfg.SSLVerify = false + } // Try to connect to the LDAP server, to validate the URL configuration // We can also check the URL at this stage, as anything else would probably @@ -122,10 +134,11 @@ func (b *backend) pathConfigWrite( } type ConfigEntry struct { - Url string - UserDN string - GroupDN string - UserAttr string + Url string + UserDN string + GroupDN string + UserAttr string + SSLVerify bool } func (c *ConfigEntry) DialLDAP() (*ldap.Conn, error) { @@ -150,7 +163,13 @@ func (c *ConfigEntry) DialLDAP() (*ldap.Conn, error) { if port == "" { port = "636" } - conn, err = ldap.DialTLS("tcp", host+":"+port, nil) + tlsConfig := tls.Config{} + if c.SSLVerify { + tlsConfig = tls.Config{InsecureSkipVerify: false} + } else { + tlsConfig = tls.Config{InsecureSkipVerify: true} + } + conn, err = ldap.DialTLS("tcp", host+":"+port, &tlsConfig) default: return nil, fmt.Errorf("invalid LDAP scheme") }
allow skipping SSL verification on ldap auth
hashicorp_vault
train
c7df6c50a6b893a7fe101f5b70480a9f2a05c6fd
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1 +1 @@ -require 'lib/impressionist' +
Rspec added to impressionist dir, so we can test in isolation
charlotte-ruby_impressionist
train
2c4c3df0138aaff032d82d570f24577574788efa
diff --git a/lib/slim/compiler.rb b/lib/slim/compiler.rb index <HASH>..<HASH> 100644 --- a/lib/slim/compiler.rb +++ b/lib/slim/compiler.rb @@ -69,13 +69,13 @@ module Slim on_slim_output(escape, tmp, [:multi])] end - # Handle directive expression `[:slim, :directive, type]` + # Handle directive expression `[:slim, :directive, type, args]` # # @param [String] type Directive type # @return [Array] Compiled temple expression - def on_slim_directive(type) - if type =~ /^doctype/i - [:html, :doctype, $'.strip] + def on_slim_directive(type, args) + if type == 'doctype' + [:html, :doctype, args] end end diff --git a/lib/slim/parser.rb b/lib/slim/parser.rb index <HASH>..<HASH> 100644 --- a/lib/slim/parser.rb +++ b/lib/slim/parser.rb @@ -205,7 +205,8 @@ module Slim stacks << block when ?! # Found a directive (currently only used for doctypes) - stacks.last << [:slim, :directive, line[1..-1].strip] + directive = line[1..-1].strip.split(/\s+/, 2) + stacks.last << [:slim, :directive, directive[0].downcase, directive[1]] else if line =~ /^(\w+):\s*$/ # Embedded template detected. It is treated as block.
slim directive expression has type and args before: [:slim, :directive, directive] now: [:slim, :directive, type, args]
slim-template_slim
train
60af42ae8755e621d1c89ff1489aa8ce7ca4cee4
diff --git a/penaltymodel/classes/binary_quadratic_model.py b/penaltymodel/classes/binary_quadratic_model.py index <HASH>..<HASH> 100644 --- a/penaltymodel/classes/binary_quadratic_model.py +++ b/penaltymodel/classes/binary_quadratic_model.py @@ -4,6 +4,8 @@ BinaryQuadraticModel """ from __future__ import absolute_import +import itertools + from six import itervalues, iteritems from penaltymodel.classes.vartypes import Vartype @@ -240,9 +242,24 @@ class BinaryQuadraticModel(object): to new ones. If an incomplete mapping is provided, variables will keep their labels copy (bool, default): If True, return a copy of BinaryQuadraticModel - with the variables relabelled, otherwise apply the relabelling in + with the variables relabeled, otherwise apply the relabeling in place. + Returns: + :class:`.BinaryQuadraticModel`: A BinaryQuadraticModel with the + variables relabelled. If copy=False, returns itself. + + Examples: + >>> model = pm.BinaryQuadraticModel({0: 0., 1: 1.}, {(0, 1): -1}, 0.0, vartype=pm.SPIN) + >>> new_model = model.relabel_variables({0: 'a'}) + >>> new_model.quadratic + {('a', 1): -1} + >>> new_model = model.relabel_variables({0: 'a', 1: 'b'}, copy=False) + >>> model.quadratic + {('a', 'b'): -1} + >>> new_model is model + True + """ try: old_labels = set(mapping.keys()) @@ -256,15 +273,48 @@ class BinaryQuadraticModel(object): for (u, v), bias in iteritems(self.quadratic)}, self.offset, self.vartype) else: - if old_labels & new_labels: - raise ValueError(("new and old labels cannot overlap for in-place relabelling, use copy=True " - "instead. Note that nodes not explicitly referenced in mapping are mapped " - "to themselves")) + shared = old_labels & new_labels + if shared: + # in this case relabel to a new intermediate labeling, then map from the intermediate + # labeling to the desired labeling + + # counter will be used to generate the intermediate labels, as an easy optimization + # we start the counter with a high number because often variables are labeled by + # integers starting from 0 + counter = itertools.count(2 * len(self)) + + old_to_intermediate = {} + intermediate_to_new = {} + + for old, new in iteritems(mapping): + if old == new: + # we can remove self-labels + continue + + if old in new_labels or new in old_labels: + + # try to get a new unique label + lbl = next(counter) + while lbl in new_labels or lbl in old_labels: + lbl = next(counter) + + # add it to the mapping + old_to_intermediate[old] = lbl + intermediate_to_new[lbl] = new + + else: + old_to_intermediate[old] = new + # don't need to add it to intermediate_to_new because it is a self-label + + self.relabel_variables(old_to_intermediate, copy=False) + self.relabel_variables(intermediate_to_new, copy=False) + return self linear = self.linear quadratic = self.quadratic adj = self.adj + # rebuild linear and adj with the new labels for old in list(linear): if old not in mapping: continue diff --git a/penaltymodel/classes/tests/test_binary_quadratic_model.py b/penaltymodel/classes/tests/test_binary_quadratic_model.py index <HASH>..<HASH> 100644 --- a/penaltymodel/classes/tests/test_binary_quadratic_model.py +++ b/penaltymodel/classes/tests/test_binary_quadratic_model.py @@ -293,6 +293,41 @@ class TestBinaryQuadraticModel(unittest.TestCase): self.assertEqual(model.adj, testmodel.adj) + def test_relabel_with_overlap(self): + linear = {v: .1 * v for v in range(-5, 4)} + quadratic = {(u, v): .1 * u * v for u, v in itertools.combinations(linear, 2)} + offset = 1.2 + vartype = pm.SPIN + model = pm.BinaryQuadraticModel(linear, quadratic, offset, vartype) + + partial_overlap_mapping = {v: -v for v in linear} # has variables mapped to other old labels + + # construct a test model by using copy + testmodel = model.relabel_variables(partial_overlap_mapping, copy=True) + + # now apply in place + model.relabel_variables(partial_overlap_mapping, copy=False) + + # should have stayed the same + self.assertEqual(testmodel, model) + self.assertEqual(testmodel.adj, model.adj) + + def test_relabel_with_identity(self): + linear = {v: .1 * v for v in range(-5, 4)} + quadratic = {(u, v): .1 * u * v for u, v in itertools.combinations(linear, 2)} + offset = 1.2 + vartype = pm.SPIN + model = pm.BinaryQuadraticModel(linear, quadratic, offset, vartype) + old_model = model.copy() + + identity_mapping = {v: v for v in linear} + + model.relabel_variables(identity_mapping, copy=False) + + # should have stayed the same + self.assertEqual(old_model, model) + self.assertEqual(old_model.adj, model.adj) + def test_partial_relabel_copy(self): linear = {v: .1 * v for v in range(-5, 5)} quadratic = {(u, v): .1 * u * v for u, v in itertools.combinations(linear, 2)}
Allow in-place relabelling with label overlap
dwavesystems_penaltymodel
train
c3590c7b9e4e5b0eb6492fd08281cccf45c13749
diff --git a/ait/server/client.py b/ait/server/client.py index <HASH>..<HASH> 100644 --- a/ait/server/client.py +++ b/ait/server/client.py @@ -27,6 +27,7 @@ class Client(gevent.Greenlet): log.info('{} {} open to recieving messages'.format(self.type, self.name)) while True: + gevent.sleep(0) string = self.sub.recv() topic, messagedata = string.split() log.info('%s %s recieved message \"%s\" from %s' diff --git a/ait/server/plugin.py b/ait/server/plugin.py index <HASH>..<HASH> 100644 --- a/ait/server/plugin.py +++ b/ait/server/plugin.py @@ -5,21 +5,15 @@ from client import Client import ait -class Plugin(Client, gevent.Greenlet): +class Plugin(Client): def __init__(self, inputs, zmq_args=None, **kwargs): - if zmq_args is None: - zmq_args = {'context': ait.broker.context, - 'XSUB_URL': ait.broker.XSUB_URL, - 'XPUB_URL': ait.broker.XPUB_URL} - self.type = 'Plugin' self.name = type(self).__name__ self.inputs = inputs for key, value in kwargs.items(): setattr(self, key, value) - gevent.Greenlet.__init__(self) super(Plugin, self).__init__(zmq_args) def __repr__(self): @@ -31,15 +25,3 @@ class Plugin(Client, gevent.Greenlet): 'that inherits from this abstract plugin. This abstract Plugin ' 'class should not be instantiated. This process method will be ' 'called whenever a message is received by the plugin.')) - - def _run(self): - while True: - self.run() - gevent.sleep(1) - - def run(self): - raise NotImplementedError(( - 'This run method can be implemented by a custom plugin class ' - 'that inherits from this abstract plugin. This abstract Plugin ' - 'class should not be instantiated. This run method will be ' - 'run indefinitely in a while True loop in a Greenlet.')) diff --git a/ait/server/plugins/ait_gui_plugin.py b/ait/server/plugins/ait_gui_plugin.py index <HASH>..<HASH> 100644 --- a/ait/server/plugins/ait_gui_plugin.py +++ b/ait/server/plugins/ait_gui_plugin.py @@ -232,8 +232,10 @@ class AitGuiPlugin(Plugin): pass def init_and_wait(self): + print("initializing gui plugin") self.init() - self.wait() + #self.wait() + print("done initializing gui plugin") def getBrowserName(browser): return getattr(browser, 'name', getattr(browser, '_name', '(none)')) diff --git a/ait/server/server.py b/ait/server/server.py index <HASH>..<HASH> 100644 --- a/ait/server/server.py +++ b/ait/server/server.py @@ -26,6 +26,7 @@ class AitServer(object): self.start_all_greenlets() def start_all_greenlets(self): + print("starting greenlets") for greenlet in self.greenlets: greenlet.start() @@ -35,7 +36,7 @@ class AitServer(object): def main(): server = AitServer() - + print("done creating server") try: import time diff --git a/ait/server/stream.py b/ait/server/stream.py index <HASH>..<HASH> 100644 --- a/ait/server/stream.py +++ b/ait/server/stream.py @@ -6,11 +6,6 @@ from client import Client class Stream(Client): def __init__(self, name, input_, handlers, zmq_args=None): - if zmq_args is None: - zmq_args = {'context': ait.broker.context, - 'XSUB_URL': ait.broker.XSUB_URL, - 'XPUB_URL': ait.broker.XPUB_URL} - self.name = name self.input_ = input_ self.handlers = handlers
GUI Issue #<I> - Further cleanup
NASA-AMMOS_AIT-Core
train
c6ada32047079c3ebd93c4dc42621b961b572d82
diff --git a/lib/player.js b/lib/player.js index <HASH>..<HASH> 100644 --- a/lib/player.js +++ b/lib/player.js @@ -657,8 +657,14 @@ Player.prototype.importUrl = function(urlString, cb) { ytdl.getInfo(urlString, gotYouTubeInfo); } else { var remoteFilename = path.basename(parsedUrl.pathname); + var decodedFilename; + try { + decodedFilename = decodeURI(remoteFilename); + } catch (err) { + decodedFilename = remoteFilename; + } var req = superagent.get(urlString); - handleDownload(req, remoteFilename); + handleDownload(req, decodedFilename); } function gotYouTubeInfo(err, info) {
importURL: URI decode filename Fixes _<I> etc showing up in filenames.
andrewrk_groovebasin
train
affa6b98bfa22542e6bb9a88254c4e5dcbbb5326
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import find_packages, setup from teletype import VERSION -with open("readme.md", "r") as fp: +with open("readme.md", "r", encoding="utf8") as fp: LONG_DESCRIPTION = fp.read() setup(
Explicitly specifies file encoding as UTF-8 for Windows
jkwill87_teletype
train
645c1b8573dd051da208ccd38c3bc4c5517fbccb
diff --git a/src/Api/Platform.php b/src/Api/Platform.php index <HASH>..<HASH> 100644 --- a/src/Api/Platform.php +++ b/src/Api/Platform.php @@ -43,7 +43,7 @@ class Platform extends Base { { $type = ucfirst(strtolower($type)); if (!isset(self::$instances[$type])) { - $class = 'Platform_' . $type; + $class = 'JFusion\\Api\\Platform\\' . $type; self::$instances[$type] = new $class(); } return self::$instances[$type];
Changed: class layout to match correct auto loader.
jfusion_org.jfusion.framework
train
39be5cde2cdb943fceb65a100edf15a0b4f8e200
diff --git a/lib/drafter/draft.rb b/lib/drafter/draft.rb index <HASH>..<HASH> 100644 --- a/lib/drafter/draft.rb +++ b/lib/drafter/draft.rb @@ -14,11 +14,15 @@ class Draft < ActiveRecord::Base # serialize :data, Hash + # Approve a draft, setting the attributes of the draftable object + # to contain the draft content, saving the draftable, and + # destroying the draft. + # def approve! - draftable = self.draftable_type.constantize.new + draftable = build_draftable draftable_columns.each do |key| draftable.send("#{key}=", self.data[key]) - end + end draftable.save! self.destroy draftable @@ -26,6 +30,10 @@ class Draft < ActiveRecord::Base private + def build_draftable + draftable.nil? ? self.draftable_type.constantize.new : draftable + end + def draftable_columns self.data.keys - ['id', 'created_at', 'updated_at'] end diff --git a/test/drafter/test_draft.rb b/test/drafter/test_draft.rb index <HASH>..<HASH> 100644 --- a/test/drafter/test_draft.rb +++ b/test/drafter/test_draft.rb @@ -45,15 +45,22 @@ class TestDraft < Minitest::Unit::TestCase end end - describe "for an object which already exists" do + describe "for an article which already exists" do before do @article.save + @article_count = Article.count @article.text = "some draft text" - @article.save_draft + @draft = @article.save_draft + @draft.approve! + end + + it "shouldn't do anything mental, like creating a new object" do + assert_equal(@article_count, Article.count) end - it "should save the object" - it "should properly populate all the attributes" + it "should properly populate all the attributes" do + assert_equal("some draft text", @article.text) + end end end
We can now retrieve the draft and save it happily.
futurechimp_drafter
train
a0e98829d5a14895d6ce974115da905a3b525793
diff --git a/lib/puppet/file_system.rb b/lib/puppet/file_system.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/file_system.rb +++ b/lib/puppet/file_system.rb @@ -157,6 +157,15 @@ module Puppet::FileSystem @impl.directory?(assert_path(path)) end + # Determines if a file is a file. + # + # @return [Boolean] true if the given file is a file. + # + # @api public + def self.file?(path) + @impl.file?(assert_path(path)) + end + # Determines if a file is executable. # # @todo Should this take into account extensions on the windows platform? diff --git a/lib/puppet/file_system/file_impl.rb b/lib/puppet/file_system/file_impl.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/file_system/file_impl.rb +++ b/lib/puppet/file_system/file_impl.rb @@ -87,6 +87,10 @@ class Puppet::FileSystem::FileImpl ::File.directory?(path) end + def file?(path) + ::File.file?(path) + end + def executable?(path) ::File.executable?(path) end diff --git a/lib/puppet/file_system/memory_impl.rb b/lib/puppet/file_system/memory_impl.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/file_system/memory_impl.rb +++ b/lib/puppet/file_system/memory_impl.rb @@ -11,6 +11,10 @@ class Puppet::FileSystem::MemoryImpl path.directory? end + def file?(path) + path.file? + end + def executable?(path) path.executable? end diff --git a/spec/unit/file_system_spec.rb b/spec/unit/file_system_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/file_system_spec.rb +++ b/spec/unit/file_system_spec.rb @@ -149,6 +149,10 @@ describe "Puppet::FileSystem" do Puppet::FileSystem.exist?(file).should be_true end + it "should return true for file? on a present file" do + Puppet::FileSystem.file?(file).should be_true + end + it "should return false for exist? on a non-existant file" do Puppet::FileSystem.exist?(missing_file).should be_false end
(maint) Implement FileSystem.file? for consistency The filesystem abstraction implements `.directory?` but not `.file?` which is somewhat inconsistent. This commit adds that method and uses the same behavior implemented by `.directory?`.
puppetlabs_puppet
train
7373406f5ee05921d837963e2483200f7bec91e4
diff --git a/src/Analysers/EnvironmentAnalyser.php b/src/Analysers/EnvironmentAnalyser.php index <HASH>..<HASH> 100644 --- a/src/Analysers/EnvironmentAnalyser.php +++ b/src/Analysers/EnvironmentAnalyser.php @@ -43,7 +43,7 @@ class EnvironmentAnalyser return ''; } - return $this->versionResolver->pollForVersion( + return $this->versionResolver->pollForExecutableVersion( $binaryPaths[$platformCode], $this->pluginConfig->getBrowserVersionPollingConfig() ); diff --git a/src/Analysers/ProjectAnalyser.php b/src/Analysers/ProjectAnalyser.php index <HASH>..<HASH> 100644 --- a/src/Analysers/ProjectAnalyser.php +++ b/src/Analysers/ProjectAnalyser.php @@ -49,11 +49,6 @@ class ProjectAnalyser * @var \Vaimo\WebDriverBinaryDownloader\Utils\DataUtils */ private $dataUtils; - - /** - * @var \Vaimo\WebDriverBinaryDownloader\Utils\StringUtils - */ - private $stringUtils; /** * @var \Composer\Package\CompletePackage @@ -85,7 +80,6 @@ class ProjectAnalyser $this->systemUtils = new \Vaimo\WebDriverBinaryDownloader\Utils\SystemUtils(); $this->dataUtils = new \Vaimo\WebDriverBinaryDownloader\Utils\DataUtils(); - $this->stringUtils = new \Vaimo\WebDriverBinaryDownloader\Utils\StringUtils(); } public function resolvePlatformSupport() @@ -131,7 +125,7 @@ class ProjectAnalyser ); } - $installedVersion = $this->versionResolver->pollForVersion( + $installedVersion = $this->versionResolver->pollForExecutableVersion( $binaries, $this->pluginConfig->getDriverVersionPollingConfig() ); @@ -170,27 +164,11 @@ class ProjectAnalyser ConfigInterface::REQUEST_VERSION, array() ); - - $versionCheckUrls = $this->dataUtils->assureArrayValue($versionCheckUrls); - - $browserVersion = $this->resolveBrowserVersion(); - $variables = array( - 'major' => $this->stringUtils->strTokOffset($browserVersion, 1), - 'major-minor' => $this->stringUtils->strTokOffset($browserVersion, 2) + $version = $this->versionResolver->pollForDriverVersion( + $versionCheckUrls, + $this->resolveBrowserVersion() ); - - foreach ($versionCheckUrls as $versionCheckUrl) { - if ($version) { - break; - } - - $version = trim( - @file_get_contents( - $this->stringUtils->stringFromTemplate($versionCheckUrl, $variables) - ) - ); - } } if (!$version) { diff --git a/src/Resolvers/VersionResolver.php b/src/Resolvers/VersionResolver.php index <HASH>..<HASH> 100644 --- a/src/Resolvers/VersionResolver.php +++ b/src/Resolvers/VersionResolver.php @@ -13,6 +13,11 @@ class VersionResolver private $versionParser; /** + * @var \Vaimo\WebDriverBinaryDownloader\Utils\StringUtils + */ + private $stringUtils; + + /** * @var \Vaimo\WebDriverBinaryDownloader\Utils\DataUtils */ private $dataUtils; @@ -20,11 +25,12 @@ class VersionResolver public function __construct() { $this->versionParser = new \Composer\Package\Version\VersionParser(); - + + $this->stringUtils = new \Vaimo\WebDriverBinaryDownloader\Utils\StringUtils(); $this->dataUtils = new \Vaimo\WebDriverBinaryDownloader\Utils\DataUtils(); } - public function pollForVersion(array $binaryPaths, array $versionPollingConfig) + public function pollForExecutableVersion(array $binaryPaths, array $versionPollingConfig) { $processExecutor = new \Composer\Util\ProcessExecutor(); @@ -74,4 +80,30 @@ class VersionResolver return ''; } + + public function pollForDriverVersion(array $versionCheckUrls, $browserVersion) + { + $versionCheckUrls = $this->dataUtils->assureArrayValue($versionCheckUrls); + + $variables = array( + 'major' => $this->stringUtils->strTokOffset($browserVersion, 1), + 'major-minor' => $this->stringUtils->strTokOffset($browserVersion, 2) + ); + + $version = null; + + foreach ($versionCheckUrls as $versionCheckUrl) { + if ($version) { + break; + } + + $version = trim( + @file_get_contents( + $this->stringUtils->stringFromTemplate($versionCheckUrl, $variables) + ) + ); + } + + return $version; + } } diff --git a/src/Utils/StringUtils.php b/src/Utils/StringUtils.php index <HASH>..<HASH> 100644 --- a/src/Utils/StringUtils.php +++ b/src/Utils/StringUtils.php @@ -26,7 +26,7 @@ class StringUtils public function strTokOffset($value, $offset) { try { - $cutOff = strpos($value, '.', $offset); + $cutOff = strpos($value, '.', $offset); } catch (\Exception $exception) { $cutOff = 0; }
code archtiecture fixed to address issues reported by automatic mess detectors
vaimo_webdriver-binary-downloader
train
bcf8ef3aa4e9eb7c8bf94f4febcf2d5d1a376a00
diff --git a/src/Core/Migration/Migration1554447846NumberRangeTranslationAndConfiguration.php b/src/Core/Migration/Migration1554447846NumberRangeTranslationAndConfiguration.php index <HASH>..<HASH> 100644 --- a/src/Core/Migration/Migration1554447846NumberRangeTranslationAndConfiguration.php +++ b/src/Core/Migration/Migration1554447846NumberRangeTranslationAndConfiguration.php @@ -46,8 +46,8 @@ class Migration1554447846NumberRangeTranslationAndConfiguration extends Migratio 'nameDe' => 'Produkte', 'global' => 1, 'typeId' => $definitionNumberRangeTypes['product']['id'], - 'pattern' => '{n}', - 'start' => 100000, + 'pattern' => 'SW{n}', + 'start' => 10000, ], 'order' => [ 'id' => Uuid::randomHex(), @@ -56,7 +56,7 @@ class Migration1554447846NumberRangeTranslationAndConfiguration extends Migratio 'global' => 1, 'typeId' => $definitionNumberRangeTypes['order']['id'], 'pattern' => '{n}', - 'start' => 1000, + 'start' => 10000, ], 'customer' => [ 'id' => Uuid::randomHex(), @@ -65,7 +65,7 @@ class Migration1554447846NumberRangeTranslationAndConfiguration extends Migratio 'global' => 1, 'typeId' => $definitionNumberRangeTypes['customer']['id'], 'pattern' => '{n}', - 'start' => 1000, + 'start' => 10000, ], ]; @@ -127,7 +127,7 @@ SQL; `id` BINARY(16) NOT NULL, `number_range_id` BINARY(16) NOT NULL, `last_value` INTEGER(8) NOT NULL, - PRIMARY KEY (`id`), + PRIMARY KEY (`number_range_id`), INDEX `idx.number_range_id` (`number_range_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; SQL; diff --git a/src/Core/System/NumberRange/ValueGenerator/NumberRangeValueGenerator.php b/src/Core/System/NumberRange/ValueGenerator/NumberRangeValueGenerator.php index <HASH>..<HASH> 100644 --- a/src/Core/System/NumberRange/ValueGenerator/NumberRangeValueGenerator.php +++ b/src/Core/System/NumberRange/ValueGenerator/NumberRangeValueGenerator.php @@ -84,7 +84,7 @@ class NumberRangeValueGenerator implements NumberRangeValueGeneratorInterface [ new MultiFilter( MultiFilter::CONNECTION_AND, [ - new EqualsFilter('number_range.numberRangeSalesChannels.id', $salesChannelId), + new EqualsFilter('number_range.numberRangeSalesChannels.salesChannelId', $salesChannelId), new EqualsFilter('number_range.type.technicalName', $definition), ] ), @@ -111,7 +111,7 @@ class NumberRangeValueGenerator implements NumberRangeValueGeneratorInterface $criteria->addFilter( new MultiFilter( MultiFilter::CONNECTION_AND, [ - new EqualsFilter('number_range.numberRangeSalesChannels.id', null), + new EqualsFilter('number_range.global', 1), new EqualsFilter('number_range.type.technicalName', $definition), ] )
NTR - bugfix for number range selection
shopware_platform
train
3c2f9d4cc58a11c354cee4994f43e4668babe47b
diff --git a/src/Media/Admin.php b/src/Media/Admin.php index <HASH>..<HASH> 100644 --- a/src/Media/Admin.php +++ b/src/Media/Admin.php @@ -193,11 +193,11 @@ class Admin extends Hookable /** * The AJAX handler for deleting a dynamic image size. * - * @param \WP_Post $post The current attachment. - * @param array $attachment_data The POST data passed from the quest. - * @return \WP_Post + * @param array $post The current attachment. + * @param array $attachment_data The POST data passed from the quest. + * @return array */ - public function handleDeleteIntermediateAjax(WP_Post $post, array $attachment_data): WP_Post + public function handleDeleteIntermediateAjax(array $post, array $attachment_data): array { if (isset($attachment_data['delete-intermediate']) && !empty($attachment_data['delete-intermediate'])) { $sizes = $attachment_data['delete-intermediate']; @@ -207,6 +207,9 @@ class Admin extends Hookable return $post; } + /** + * The ajax handler for deleting images from the dynamic images admin screen. + */ public function handleBulkDeleteSizesAjax() { \set_time_limit(0);
fix minor bug with image size delete handlers
snapwp_snap-core
train
4ee411c63bd7ee5fc02122b560eb36c22064d8ff
diff --git a/src/sap.ui.core/src/sap/ui/core/routing/Targets.js b/src/sap.ui.core/src/sap/ui/core/routing/Targets.js index <HASH>..<HASH> 100644 --- a/src/sap.ui.core/src/sap/ui/core/routing/Targets.js +++ b/src/sap.ui.core/src/sap/ui/core/routing/Targets.js @@ -1,4 +1,6 @@ -// Copyright (c) 2013 SAP SE, All Rights Reserved +/*! + * ${copyright} + */ sap.ui.define(['jquery.sap.global', 'sap/ui/base/EventProvider', './Target', './async/Targets', './sync/Targets'], function($, EventProvider, Target, asyncTargets, syncTargets) { "use strict";
[INTERNAL] sap.ui.core.routing.Targets: fix copyright notice Replace comment with placeholder which will be replaced during the release build. Change-Id: I5b<I>c1e<I>b<I>ca<I>f6b<I>f
SAP_openui5
train
138abeb5f76fcf265529d815dcf11aad32f00d7e
diff --git a/api/acl.go b/api/acl.go index <HASH>..<HASH> 100644 --- a/api/acl.go +++ b/api/acl.go @@ -22,7 +22,7 @@ type ACLEntry struct { Rules string } -// ACLReplicationEntry is used to represent an ACLReplication entry +// ACLReplicationStatus is used to represent the status of ACL replication. type ACLReplicationEntry struct { Enabled bool Running bool @@ -154,7 +154,7 @@ func (a *ACL) List(q *QueryOptions) ([]*ACLEntry, *QueryMeta, error) { } // Replication returns the status of the ACL replication process in the datacenter -func (a *ACL) Replication(q *QueryOptions) (*ACLReplicationEntry, *QueryMeta, error) { +func (a *ACL) Replication(q *QueryOptions) (*ACLReplicationStatus, *QueryMeta, error) { r := a.c.newRequest("GET", "/v1/acl/replication") r.setQueryOptions(q) rtt, resp, err := requireOK(a.c.doRequest(r)) @@ -167,7 +167,7 @@ func (a *ACL) Replication(q *QueryOptions) (*ACLReplicationEntry, *QueryMeta, er parseQueryMeta(resp, qm) qm.RequestTime = rtt - var entries *ACLReplicationEntry + var entries *ACLReplicationStatus if err := decodeBody(resp, &entries); err != nil { return nil, nil, err }
Tweaks ACL replication status struct name.
hashicorp_consul
train
4015c6390d3dbdfe9edd49c1542fd35fc454e040
diff --git a/kube_dns/datadog_checks/kube_dns/kube_dns.py b/kube_dns/datadog_checks/kube_dns/kube_dns.py index <HASH>..<HASH> 100644 --- a/kube_dns/datadog_checks/kube_dns/kube_dns.py +++ b/kube_dns/datadog_checks/kube_dns/kube_dns.py @@ -19,7 +19,7 @@ class KubeDNSCheck(OpenMetricsBaseCheck): # Set up metric_transformers METRIC_TRANSFORMERS = {} - def __init__(self, name, init_config, agentConfig, instances=None): + def __init__(self, name, init_config, instances): # Set up metric_transformers self.METRIC_TRANSFORMERS = { 'kubedns_kubedns_dns_request_count_total': self.kubedns_kubedns_dns_request_count_total, @@ -35,7 +35,7 @@ class KubeDNSCheck(OpenMetricsBaseCheck): if instances is not None: generic_instances = self.create_generic_instances(instances) - super(KubeDNSCheck, self).__init__(name, init_config, agentConfig, instances=generic_instances) + super(KubeDNSCheck, self).__init__(name, init_config, instances=generic_instances) def check(self, instance): endpoint = instance.get('prometheus_endpoint') diff --git a/kube_dns/tests/test_kube_dns.py b/kube_dns/tests/test_kube_dns.py index <HASH>..<HASH> 100644 --- a/kube_dns/tests/test_kube_dns.py +++ b/kube_dns/tests/test_kube_dns.py @@ -63,7 +63,7 @@ class TestKubeDNS: Testing kube_dns check. """ - check = KubeDNSCheck('kube_dns', {}, {}, [instance]) + check = KubeDNSCheck('kube_dns', {}, [instance]) check.check(instance) # check that we then get the count metrics also
Use agent 6 signature (#<I>) * Use agent 6 signature
DataDog_integrations-core
train
c638baf173c84a0dbf056a542f44cef91f028456
diff --git a/src/Expression.php b/src/Expression.php index <HASH>..<HASH> 100644 --- a/src/Expression.php +++ b/src/Expression.php @@ -33,7 +33,10 @@ class Expression implements Arrayable { return new self; } - + /** + * Return a expression + * @return \Sokil\Mongo\Cursor|\Sokil\Mongo\Expression + */ public function where($field, $value) { if(!isset($this->_expression[$field]) || !is_array($value) || !is_array($this->_expression[$field])) {
Changes for autocomplete in IDE
sokil_php-mongo
train
014cecda464fb1171e613676e0a8a85c02ec1674
diff --git a/integration-cli/check_test.go b/integration-cli/check_test.go index <HASH>..<HASH> 100644 --- a/integration-cli/check_test.go +++ b/integration-cli/check_test.go @@ -79,6 +79,9 @@ type DockerSuite struct { } func (s *DockerSuite) OnTimeout(c *check.C) { + if !testEnv.IsLocalDaemon() { + return + } path := filepath.Join(os.Getenv("DEST"), "docker.pid") b, err := ioutil.ReadFile(path) if err != nil { @@ -91,7 +94,7 @@ func (s *DockerSuite) OnTimeout(c *check.C) { } daemonPid := int(rawPid) - if daemonPid > 0 && testEnv.IsLocalDaemon() { + if daemonPid > 0 { daemon.SignalDaemonDump(daemonPid) } }
Only attempt to find pid with local daemon
moby_moby
train
956618c462cad89724c12e95e56e78669f893128
diff --git a/github.js b/github.js index <HASH>..<HASH> 100644 --- a/github.js +++ b/github.js @@ -437,6 +437,21 @@ }); }; + // Create the gist + // -------- + // { + // "description": "the description for this gist", + // "public": true, + // "files": { + // "file1.txt": { + // "content": "String file contents" + // } + // } + // } + + this.create = function(options, cb){ + _request("POST","/gists", options, cb); + }; // Delete the gist // --------
Added ability to create a gist Added ability to create a gist, as like this <URL>
github-tools_github
train
15ffd962bfedbaa4614d61b91f0d9fbcaa28594e
diff --git a/lib/sandbox.js b/lib/sandbox.js index <HASH>..<HASH> 100644 --- a/lib/sandbox.js +++ b/lib/sandbox.js @@ -594,6 +594,7 @@ Sandbox.prototype.updateWebtask = function (options, cb) { * * @param {Object} options - Options * @param {String} [options.container] - Set the webtask container. Defaults to the profile's container. + * @param {Boolean} [options.fetch_code] - Include the webtask's code in the listing response. * @param {Function} [cb] - Optional callback function for node-style callbacks. * @return {Promise} A Promise that will be fulfilled with an array of Webtasks */ @@ -614,12 +615,18 @@ Sandbox.prototype.listWebtasks = function (options, cb) { request.query({ meta: m + ':' + options.meta[m] }); } } + if (options.fetch_code) request.query({ fetch_code: true }); var self = this; var promise = this.issueRequest(request) .get('body') .map(function (webtask) { - return new Webtask(self, webtask.token, { name: webtask.name, meta: webtask.meta, webtask_url: webtask.webtask_url }); + return new Webtask(self, webtask.token, { + code: webtask.code, + name: webtask.name, + meta: webtask.meta, + webtask_url: webtask.webtask_url, + }); }); return cb ? promise.nodeify(cb) : promise;
feat: Allow fetching code when listing webtasks
auth0_sandboxjs
train
9c3ac285410964e5f66e19668e1225bd60cd3c7b
diff --git a/executor/prepared.go b/executor/prepared.go index <HASH>..<HASH> 100644 --- a/executor/prepared.go +++ b/executor/prepared.go @@ -332,6 +332,7 @@ func CompileExecutePreparedStmt(ctx context.Context, ID uint32, args ...interfac func ResetStmtCtx(ctx context.Context, s ast.StmtNode) { sessVars := ctx.GetSessionVars() sc := new(variable.StatementContext) + sc.TimeZone = sessVars.GetTimeZone() switch s.(type) { case *ast.UpdateStmt, *ast.InsertStmt, *ast.DeleteStmt: sc.IgnoreTruncate = false diff --git a/sessionctx/variable/session.go b/sessionctx/variable/session.go index <HASH>..<HASH> 100644 --- a/sessionctx/variable/session.go +++ b/sessionctx/variable/session.go @@ -329,6 +329,9 @@ type StatementContext struct { foundRows uint64 warnings []error } + + // Copied from SessionVars.TimeZone + TimeZone *time.Location } // AddAffectedRows adds affected rows.
sessionctx/variable: store a copy of timezone in StatementContext (#<I>) There are many place need access to timezone, they come with a StatementContext argument. So put a copy of timezone into StatementContext to achieve the goal with minimal changes.
pingcap_tidb
train
5c11159759b40dfc5a22c658988d22d7a18f4414
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -202,8 +202,8 @@ function randomPutHttp(theUrl, size, callback) { //discard }); res.on('end', function() { - //discard data - callback(null, size); //return original size + // Some cases (like HTTP 413) will interrupt the upload, but still return a response + callback(null, size - toSend); }); });
Handle when the server terminates the upload early. (#<I>) Stopgap
ddsol_speedtest.net
train
244cc15ccf097284f5596d5fa96ec63dcbc33fd0
diff --git a/src/Support/Http/Controllers/GraphQLController.php b/src/Support/Http/Controllers/GraphQLController.php index <HASH>..<HASH> 100644 --- a/src/Support/Http/Controllers/GraphQLController.php +++ b/src/Support/Http/Controllers/GraphQLController.php @@ -2,15 +2,15 @@ namespace Nuwave\Lighthouse\Support\Http\Controllers; +use GraphQL\Executor\ExecutionResult; use Illuminate\Http\Request; use Illuminate\Http\Response; -use Nuwave\Lighthouse\GraphQL; use Illuminate\Routing\Controller; -use GraphQL\Executor\ExecutionResult; +use Nuwave\Lighthouse\GraphQL; use Nuwave\Lighthouse\Schema\Context; -use Nuwave\Lighthouse\Schema\MiddlewareRegistry; -use Nuwave\Lighthouse\Schema\Extensions\ExtensionRequest; use Nuwave\Lighthouse\Schema\Extensions\ExtensionRegistry; +use Nuwave\Lighthouse\Schema\Extensions\ExtensionRequest; +use Nuwave\Lighthouse\Schema\MiddlewareRegistry; class GraphQLController extends Controller { @@ -18,7 +18,7 @@ class GraphQLController extends Controller protected $graphQL; /** @var bool */ - protected $batched; + protected $batched = false; /** * Inject middleware into request. @@ -35,30 +35,32 @@ class GraphQLController extends Controller GraphQL $graphQL ) { $this->graphQL = $graphQL; - $this->batched = isset($request[0]) && config('lighthouse.batched_queries', true); - - $extensionRegistry->requestDidStart( - new ExtensionRequest($request, $this->batched) - ); - $graphQL->prepSchema(); + if ($request->route()) { + $this->batched = isset($request[0]) && config('lighthouse.batched_queries', true); - $middleware = ! $this->batched - ? $middlewareRegistry->forRequest($request->input('query')) - : array_reduce( - $request->toArray(), - function ($middleware, $req) use ($middlewareRegistry) { - $query = array_get($req, 'query', ''); - - return array_merge( - $middleware, - $middlewareRegistry->forRequest($query) - ); - }, - [] + $extensionRegistry->requestDidStart( + new ExtensionRequest($request, $this->batched) ); - $this->middleware($middleware); + $graphQL->prepSchema(); + $middleware = ! $this->batched + ? $middlewareRegistry->forRequest($request->input('query')) + : array_reduce( + $request->toArray(), + function ($middleware, $req) use ($middlewareRegistry) { + $query = array_get($req, 'query', ''); + + return array_merge( + $middleware, + $middlewareRegistry->forRequest($query) + ); + }, + [] + ); + + $this->middleware($middleware); + } } /**
add sanity check for route
nuwave_lighthouse
train
fba61ffae5fb68ec21e7fc3a8b87f67ceecea4b1
diff --git a/JR6_magic.py b/JR6_magic.py index <HASH>..<HASH> 100755 --- a/JR6_magic.py +++ b/JR6_magic.py @@ -6,7 +6,7 @@ def main(command_line=True, **kwargs): JR6_magic.py DESCRIPTION - converts JR6 format files to magic_measurements format files + converts JR6 .txt format files to magic_measurements format files SYNTAX JR6_magic.py [command line options] @@ -39,7 +39,6 @@ def main(command_line=True, **kwargs): """ # initialize some stuff noave=0 - volume=10 inst="" samp_con,Z='1',"" missing=1 @@ -51,7 +50,6 @@ def main(command_line=True, **kwargs): specnum=-1 MagRecs=[] version_num=pmag.get_version() - Samps=[] # keeps track of sample orientations user="" mag_file=""
Removed extra variables. Other minor changes
PmagPy_PmagPy
train
a4b5551cf7586c20845bffc6829e9dd227a2316e
diff --git a/src/main/java/com/squareup/javawriter/JavaWriter.java b/src/main/java/com/squareup/javawriter/JavaWriter.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/squareup/javawriter/JavaWriter.java +++ b/src/main/java/com/squareup/javawriter/JavaWriter.java @@ -27,6 +27,7 @@ import static javax.lang.model.element.Modifier.ABSTRACT; /** A utility class which aids in generating Java source files. */ public class JavaWriter implements Closeable { + private static final Pattern TYPE_TRAILER = Pattern.compile("(.*?)(\\.\\.\\.|(?:\\[\\])+)$"); private static final Pattern TYPE_PATTERN = Pattern.compile("(?:[\\w$]+\\.)*([\\w\\.*$]+)"); private static final int MAX_SINGLE_LINE_ATTRIBUTES = 3; private static final String INDENT = " "; @@ -164,6 +165,11 @@ public class JavaWriter implements Closeable { /** Try to compress a fully-qualified class name to only the class name. */ public String compressType(String type) { + Matcher trailer = TYPE_TRAILER.matcher(type); + if (trailer.matches()) { + type = trailer.group(1); + } + StringBuilder sb = new StringBuilder(); if (this.packagePrefix == null) { throw new IllegalStateException(); @@ -201,6 +207,10 @@ public class JavaWriter implements Closeable { } pos = m.end(); } + + if (trailer.matches()) { + sb.append(trailer.group(2)); + } return sb.toString(); } diff --git a/src/test/java/com/squareup/javawriter/JavaWriterTest.java b/src/test/java/com/squareup/javawriter/JavaWriterTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/squareup/javawriter/JavaWriterTest.java +++ b/src/test/java/com/squareup/javawriter/JavaWriterTest.java @@ -863,6 +863,22 @@ public final class JavaWriterTest { assertThat(actual).isEqualTo("java.lang.annotation.Annotation"); } + @Test public void compressVarargsType() throws IOException { + javaWriter.emitPackage("com.blah"); + javaWriter.emitImports("java.util.File"); + String actual = javaWriter.compressType("java.util.File..."); + assertThat(actual).isEqualTo("File..."); + } + + @Test public void compressArrayType() throws IOException { + javaWriter.emitPackage("com.blah"); + javaWriter.emitImports("java.util.File"); + String actual1 = javaWriter.compressType("java.util.File[]"); + assertThat(actual1).isEqualTo("File[]"); + String actual2 = javaWriter.compressType("java.util.File[][][]"); + assertThat(actual2).isEqualTo("File[][][]"); + } + @Test public void configurableIndent() throws IOException { javaWriter.setIndent(" "); javaWriter.emitPackage("com.squareup");
Automatically compress varargs and array types.
square_javapoet
train
0ff87a8929e852f102e06ee0729258b006ff93f9
diff --git a/integration/main_test.go b/integration/main_test.go index <HASH>..<HASH> 100644 --- a/integration/main_test.go +++ b/integration/main_test.go @@ -364,7 +364,7 @@ func Randomize(str string) string { func KillProcess(name string) error { var command []string if goruntime.GOOS == "windows" { - command = []string{"tskill", strings.TrimSuffix(name, ".exe")} + command = []string{"taskkill", "/IM", name, "/F"} } else { command = []string{"pkill", "-x", fmt.Sprintf("^%s$", name)} }
Replace tskill with taskkill This offers a more reliable way of killing a process. The /IM flag allows us to specify the "image name" of the process we're killing. This means we can use wildcards, foce kill a process and all the child processes it may have spawned.
containerd_containerd
train
274a05b21f6284f63fb3acbc943a7664c62fc1f8
diff --git a/regions/shapes/rectangle.py b/regions/shapes/rectangle.py index <HASH>..<HASH> 100644 --- a/regions/shapes/rectangle.py +++ b/regions/shapes/rectangle.py @@ -11,7 +11,7 @@ from ..core import PixCoord, PixelRegion, SkyRegion, RegionMask, BoundingBox from .._geometry import rectangular_overlap_grid from .._utils.wcs_helpers import skycoord_to_pixel_scale_angle from ..core.attributes import ScalarPix, ScalarLength, QuantityLength, ScalarSky -from .polygon import PixelPolygonRegion +from .polygon import PolygonPixelRegion __all__ = ['RectanglePixelRegion', 'RectangleSkyRegion'] diff --git a/regions/shapes/tests/test_rectangle.py b/regions/shapes/tests/test_rectangle.py index <HASH>..<HASH> 100644 --- a/regions/shapes/tests/test_rectangle.py +++ b/regions/shapes/tests/test_rectangle.py @@ -25,6 +25,21 @@ def wcs(): header = fits.getheader(filename) return WCS(header) +def test_corners(): + + xc,yc = 2,2 + angle = 30*u.deg + width = 2 + height = 1 + reg = RectanglePixelRegion(center=PixCoord(xc, yc), + width=width, height=height, angle=angle) + + y1 = yc + np.cos(angle) * height/2 + np.sin(angle) * width/2 + x1 = xc + np.cos(angle) * width/2 - np.sin(angle) * height/2 + + assert (x1, y1) in reg.corners + + class TestRectanglePixelRegion(BaseTestPixelRegion):
add a test. Test proves I can no longer do basic geometry.
astropy_regions
train
a4aca44997bdd8127fd2667b2a1a0ce9a52dc48c
diff --git a/circleparse/replay.py b/circleparse/replay.py index <HASH>..<HASH> 100644 --- a/circleparse/replay.py +++ b/circleparse/replay.py @@ -16,10 +16,10 @@ class ReplayEvent(object): class Replay(object): - __BYTE = 1 - __SHORT = 2 - __INT = 4 - __LONG = 8 + BYTE = 1 + SHORT = 2 + INT = 4 + LONG = 8 def __init__(self, replay_data, pure_lzma): self.offset = 0 @@ -74,10 +74,10 @@ class Replay(object): self.offset += struct.calcsize(format_specifier) @staticmethod - def __parse_as_int(bytestring): + def parse_as_int(bytestring): return int.from_bytes(bytestring, byteorder='little') - def __decode(self, binarystream): + def decode(self, binarystream): result = 0 shift = 0 while True: @@ -94,10 +94,10 @@ class Replay(object): def parse_string(self, replay_data): if replay_data[self.offset] == 0x00: - self.offset += Replay.__BYTE + self.offset += Replay.BYTE elif replay_data[self.offset] == 0x0b: - self.offset += Replay.__BYTE - string_length = self.__decode(replay_data) + self.offset += Replay.BYTE + string_length = self.decode(replay_data) offset_end = self.offset + string_length string = replay_data[self.offset:offset_end].decode("utf-8") self.offset = offset_end @@ -117,12 +117,12 @@ class Replay(object): def parse_timestamp_and_replay_length(self, replay_data): format_specifier = "<qi" - (t, self.__replay_length) = struct.unpack_from(format_specifier, replay_data, self.offset) + (t, self.replay_length) = struct.unpack_from(format_specifier, replay_data, self.offset) self.timestamp = datetime.datetime.min + datetime.timedelta(microseconds=t/10) self.offset += struct.calcsize(format_specifier) def parse_play_data(self, replay_data): - offset_end = self.offset+self.__replay_length + offset_end = self.offset+self.replay_length if self.game_mode != GameMode.Standard: self.play_data = None else:
don't use unecessarily private methods
kszlim_osu-replay-parser
train
ee732c423b06b012bcf81c6d9ba615a5606150f2
diff --git a/glam.gemspec b/glam.gemspec index <HASH>..<HASH> 100644 --- a/glam.gemspec +++ b/glam.gemspec @@ -1,6 +1,6 @@ Gem::Specification.new do |spec| spec.name = "glam" - spec.version = "0.0.1" + spec.version = "0.1.0" spec.authors = ["Paul Barry"] spec.email = ["[email protected]"] spec.description = %q{An HTML pretty printer} diff --git a/lib/glam.rb b/lib/glam.rb index <HASH>..<HASH> 100644 --- a/lib/glam.rb +++ b/lib/glam.rb @@ -1,7 +1,7 @@ require 'glam/glamorizer' module Glam - VERSION = '0.0.1' + VERSION = '0.1.0' end # Pretty-print the HTML diff --git a/lib/glam/cli.rb b/lib/glam/cli.rb index <HASH>..<HASH> 100644 --- a/lib/glam/cli.rb +++ b/lib/glam/cli.rb @@ -32,7 +32,19 @@ module Glam glam = Glam::Glamorizer.new(@options) - puts glam.glamorize(ARGV[0] ? File.read(ARGV[0]) : STDIN.read) + html = if ARGV[0] + file = File.expand_path(ARGV[0]) + if File.exists?(file) + File.read(file) + else + $stderr.puts "#{file} does not exist" + exit 2 + end + else + STDIN.read + end + + puts glam.glamorize(html) end end end
Give a better error message if given a file that does not exist
pjb3_glam
train
b60ace8649448196f11e1b9cfead58adf079b996
diff --git a/templates/connect.php b/templates/connect.php index <HASH>..<HASH> 100755 --- a/templates/connect.php +++ b/templates/connect.php @@ -241,7 +241,7 @@ ?></p> <?php if ( $require_license_key ) : ?> <div class="fs-license-key-container"> - <input id="fs_license_key" name="fs_key" type="text" required maxlength="32" + <input id="fs_license_key" name="fs_key" type="text" required maxlength="<?php echo $fs->apply_filters('license_key_maxlength', 32) ?>" placeholder="<?php fs_esc_attr_echo_inline( 'License key', 'license-key', $slug ) ?>" tabindex="1"/> <i class="dashicons dashicons-admin-network"></i> <a class="show-license-resend-modal show-license-resend-modal-<?php echo $fs->get_unique_affix() ?>"
[license-activation] [enrich] Added a filter to modify the license key input field maxlength attribute.
Freemius_wordpress-sdk
train
2a244932786ad1eb892152ff7f3c41ca4234704e
diff --git a/configdns-v2/service.go b/configdns-v2/service.go index <HASH>..<HASH> 100644 --- a/configdns-v2/service.go +++ b/configdns-v2/service.go @@ -10,7 +10,7 @@ var ( Config edgegrid.Config ) -// Init sets the FastDNS edgegrid Config +// Init sets the DNSv2 edgegrid Config func Init(config edgegrid.Config) { Config = config }
[AT3][Change] Remove fastDNS reference
akamai_AkamaiOPEN-edgegrid-golang
train
585944204ec38ab8d3bbcd9e21079a700f41786e
diff --git a/FlowCytometryTools/core/containers.py b/FlowCytometryTools/core/containers.py index <HASH>..<HASH> 100755 --- a/FlowCytometryTools/core/containers.py +++ b/FlowCytometryTools/core/containers.py @@ -8,8 +8,10 @@ TODO: from FlowCytometryTools import parse_fcs from bases import Measurement, MeasurementCollection, OrderedCollection from GoreUtilities.util import to_list as to_iter +from GoreUtilities.graph import plot_ndpanel from itertools import cycle import graph +from pandas import DataFrame import inspect import numpy as np from FlowCytometryTools.core.transforms import Transformation @@ -76,9 +78,10 @@ class FCMeasurement(Measurement): except: raise Exception("The keyword '{}' does not exist in the following FCS file: {}".format(ID_field, self.datafile)) + @doc_replacer def plot(self, channel_names, transform=(None, None), kind='histogram', - gates=None, transform_first=True, apply_gates=True, plot_gates=True, + gates=None, transform_first=True, apply_gates=False, plot_gates=True, gate_colors=None, **kwargs): """ Plots the flow cytometry data associated with the sample on the current axis. @@ -154,6 +157,29 @@ class FCMeasurement(Measurement): return plot_output + def matplot(self, channel_names='auto', kind='histogram', + gates=None,apply_gates=False, plot_gates=True, + gate_colors=None, **kwargs): + """ + Generates a cross plot. + """ + if channel_names == 'auto': + channel_names = list(self.channel_names) + + def plot_region(channels, **kwargs): + if channels[0] == channels[1]: + channels = channels[0] + + self.plot(channels, kind=kind, gates=gates, + apply_gates=apply_gates, plot_gates=plot_gates, + gate_colors=gate_colors, autolabel=False) + + channel_list = np.array(list(channel_names), dtype=object) + channel_mat = [[(x, y) for x in channel_list] for y in channel_list] + channel_mat = DataFrame(channel_mat, columns=channel_list, index=channel_list) + return plot_ndpanel(channel_mat, plot_region, **kwargs) + + def view(self): ''' Loads the current FCS sample viewer
ENHC: Partially implemented function to produce a cross plot of a sample
eyurtsev_FlowCytometryTools
train
3e28437ecb670f210ee152f7131e8582536324a5
diff --git a/unit_tests/stereotype_client_tests.js b/unit_tests/stereotype_client_tests.js index <HASH>..<HASH> 100644 --- a/unit_tests/stereotype_client_tests.js +++ b/unit_tests/stereotype_client_tests.js @@ -42,7 +42,7 @@ describe('Stereotype client', function () { canEdit: true, }]; - nockRequest.get(`/v1/templates`) + nockRequest.get(`/v1/templates?public=false`) .reply(200, templList, { 'content-type': 'application/json', });
Fixing mock in a test.
Cimpress_stereotype-client
train
072b047556b22cf5f03ccc71b90aaa2c7e96caff
diff --git a/src/Extension/PhiremockProcess.php b/src/Extension/PhiremockProcess.php index <HASH>..<HASH> 100644 --- a/src/Extension/PhiremockProcess.php +++ b/src/Extension/PhiremockProcess.php @@ -18,7 +18,6 @@ namespace Codeception\Extension; use Symfony\Component\Process\Process; -use Symfony\Component\Process\ProcessBuilder; /** * Manages the current running WireMock process. @@ -38,11 +37,6 @@ class PhiremockProcess private $process; /** - * @var resource[] - */ - private $pipes; - - /** * Starts a wiremock process. * * @param string $jarPath @@ -53,19 +47,23 @@ class PhiremockProcess */ public function start($ip, $port, $path, $logsPath, $debug) { - $builder = new ProcessBuilder(['-i', $ip, '-p', $port]); + $phiremockPath = is_file($path) ? $path : "{$path}/phiremock"; + $this->process = new Process( + $this->getCommandPrefix() + . "{$phiremockPath} -i {$ip} -p {$port}" + . ($debug? ' -d' : '') + ); if ($debug) { - $builder->add('-d'); + echo 'Executing: ' . $this->process->getCommandLine() . PHP_EOL; } - $builder->setPrefix("{$path}/phiremock"); - $builder->enableOutput(); - $builder->setOption('bypass_shell', true); - - $this->process = $builder->getProcess(); $logFile = $logsPath . DIRECTORY_SEPARATOR . self::LOG_FILE_NAME; $this->process->start(function ($type, $buffer) use ($logFile) { file_put_contents($logFile, $buffer, FILE_APPEND); }); + $this->process->setEnhanceSigchildCompatibility(true); + if ($this->isWindows()) { + $this->process->setEnhanceWindowsCompatibility(true); + } } /** @@ -73,7 +71,8 @@ class PhiremockProcess */ public function stop() { - $this->process->stop(3, SIGTERM); + $this->process->signal(SIGTERM); + $this->process->stop(3, SIGKILL); } /** @@ -81,9 +80,11 @@ class PhiremockProcess */ private function getCommandPrefix() { - if (PHP_OS == 'WIN32' || PHP_OS == 'WINNT' || PHP_OS == 'Windows') { - return ''; - } - return 'exec '; + return $this->isWindows() ? '' : 'exec '; + } + + private function isWindows() + { + return PHP_OS == 'WIN32' || PHP_OS == 'WINNT' || PHP_OS == 'Windows'; } }
Update PhiremockProcess.php
mcustiel_phiremock-codeception-extension
train
fbb23cd3071e0409506993b0f9a85a3d2cb98a71
diff --git a/minidns/dns.py b/minidns/dns.py index <HASH>..<HASH> 100644 --- a/minidns/dns.py +++ b/minidns/dns.py @@ -146,7 +146,7 @@ class RuntimeAuthority(FileAuthority): else: values['ttl'] = values['ttl'].encode('utf-8') - print "Setting", name, "=", values + log.msg("Setting %s = %s" % (name, values)) # have to special case data type for txt records, grrr if 'data' in values: @@ -198,6 +198,12 @@ class RuntimeAuthority(FileAuthority): for r in self.records[fullname]: data.append(self.get_record_details(fullname, r)) return data + + def get_records_by_type(self, type_): + data = [] + for k,v in self.records.items(): + data.extend([self.get_record_details(k, item) for item in v if mapper.get_typestring(item)== type_]) + return data def delete_record(self, name): del self.records["%s.%s" % (name, self.domain)] @@ -252,7 +258,7 @@ class MiniDNSServerFactory(DNSServerFactory): os.mkdir(self.savedir) os.chown(self.savedir, ent.pw_uid, ent.pw_gid) self.resolver = MiniDNSResolverChain([forward_resolver], self.savedir) - self.verbose = False + self.verbose = True self.load() def doStart(self): diff --git a/minidns/tests/test_dns.py b/minidns/tests/test_dns.py index <HASH>..<HASH> 100644 --- a/minidns/tests/test_dns.py +++ b/minidns/tests/test_dns.py @@ -17,7 +17,7 @@ import json from twisted.trial import unittest from minidns.dns import RuntimeAuthority, MiniDNSResolverChain, Record_A -from mock import MagicMock, patch +from mock import MagicMock, patch class TestRuntimeAuthority(unittest.TestCase): @@ -25,32 +25,50 @@ class TestRuntimeAuthority(unittest.TestCase): self.a = RuntimeAuthority("foo", None) def test_a_records(self): - foo_value = MagicMock(Record_A) - bar_value = MagicMock(Record_A) + foo_value = MagicMock(Record_A, autospec=True) + bar_value = MagicMock(Record_A, autospec=True) + foo_value.TYPE = 1 + bar_value.TYPE = 1 + foo_value.name = "foo" + bar_value.name = "bar" + foo_value.ttl = None + bar_value.ttl = None + foo_value.compareAttributes = ('address', 'ttl') + bar_value.compareAttributes = ('address', 'ttl') foo_value.dottedQuad.return_value = "192.168.0.1" bar_value.dottedQuad.return_value = "192.168.0.2" self.a.records = { "foo.foo": [foo_value], "bar.foo": [bar_value], } - rv = self.a.a_records() + rv = self.a.get_records_by_type("A") self.assertEqual(sorted(rv), [ - ("A", "bar", "192.168.0.2"), - ("A", "foo", "192.168.0.1"), + ("A", "bar.foo", ["192.168.0.2"]), + ("A", "foo.foo", ["192.168.0.1"]), ]) + + def test_txt_records(self): + pass + + def test_cname_records(self): + pass + + def test_conflict_rules(self): + pass def test_load(self): + from ..scripts.wingdbstub import * os.mkdir("savedir") - json.dump({ - "bar": { + json.dump(( + {"bar": { "type": "A", - "value": "192.168.1.1", - }, - "baz": { + "address": "192.168.1.1", + }}, + {"baz": { "type": "A", - "value": "192.168.1.2", - }, - }, open("savedir/foo", "w")) + "address": "192.168.1.2", + }}, + ), open("savedir/foo", "w")) self.a = RuntimeAuthority("foo", "savedir") self.assertEqual(self.a.records, { "bar.foo": [Record_A(address="192.168.1.1")], @@ -62,16 +80,16 @@ class TestRuntimeAuthority(unittest.TestCase): def test_save(self): os.mkdir("savedir") self.a = RuntimeAuthority("foo", "savedir") - self.a.set_record("bar", "192.168.1.1") - self.a.set_record("baz", "192.168.1.2") - self.assertEqual(json.load(open("savedir/foo")), { - "bar": { + self.a.set_record("baz", "A", {'address': "192.168.1.2"}, True) + self.a.set_record("bar", "A", {'address': "192.168.1.1"}, True) + self.assertEqual(json.load(open("savedir/foo")), [ + {"bar": { "type": "A", - "value": "192.168.1.1", - }, - "baz": { + "address": "192.168.1.1", + }}, + {"baz": { "type": "A", - "value": "192.168.1.2", - } - }) + "address": "192.168.1.2", + }} + ])
Updated some DNS tests, fixed set/get record format bugs, and added a get_record_by_type method
yaybu_callsign
train
568e21ed1580808ef8a8135a6552603b29330794
diff --git a/lib/flatrack/template.rb b/lib/flatrack/template.rb index <HASH>..<HASH> 100644 --- a/lib/flatrack/template.rb +++ b/lib/flatrack/template.rb @@ -17,7 +17,7 @@ class Flatrack private def find_by_type(type, file) - if File.exists?(file) + if File.exist?(file) file else Dir[File.join type.to_s.pluralize, "#{file}*"].first
Move away from deprecated exists?
jwaldrip_flatrack
train
7cc5d5d6e121f1fe72aeabb9dc70d96648186384
diff --git a/guava-gwt/src-super/com/google/common/collect/super/com/google/common/collect/FluentIterable.java b/guava-gwt/src-super/com/google/common/collect/super/com/google/common/collect/FluentIterable.java index <HASH>..<HASH> 100644 --- a/guava-gwt/src-super/com/google/common/collect/super/com/google/common/collect/FluentIterable.java +++ b/guava-gwt/src-super/com/google/common/collect/super/com/google/common/collect/FluentIterable.java @@ -16,11 +16,12 @@ package com.google.common.collect; +import static com.google.common.base.Preconditions.checkNotNull; + import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.base.Function; import com.google.common.base.Optional; -import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import java.util.Comparator; @@ -76,7 +77,7 @@ public abstract class FluentIterable<E> implements Iterable<E> { } FluentIterable(Iterable<E> iterable) { - this.iterable = Preconditions.checkNotNull(iterable); + this.iterable = checkNotNull(iterable); } /** @@ -103,7 +104,7 @@ public abstract class FluentIterable<E> implements Iterable<E> { */ @Deprecated public static <E> FluentIterable<E> from(FluentIterable<E> iterable) { - return Preconditions.checkNotNull(iterable); + return checkNotNull(iterable); } /** diff --git a/guava/src/com/google/common/collect/FluentIterable.java b/guava/src/com/google/common/collect/FluentIterable.java index <HASH>..<HASH> 100644 --- a/guava/src/com/google/common/collect/FluentIterable.java +++ b/guava/src/com/google/common/collect/FluentIterable.java @@ -16,12 +16,13 @@ package com.google.common.collect; +import static com.google.common.base.Preconditions.checkNotNull; + import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Function; import com.google.common.base.Optional; -import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import java.util.Comparator; @@ -77,7 +78,7 @@ public abstract class FluentIterable<E> implements Iterable<E> { } FluentIterable(Iterable<E> iterable) { - this.iterable = Preconditions.checkNotNull(iterable); + this.iterable = checkNotNull(iterable); } /** @@ -104,7 +105,7 @@ public abstract class FluentIterable<E> implements Iterable<E> { */ @Deprecated public static <E> FluentIterable<E> from(FluentIterable<E> iterable) { - return Preconditions.checkNotNull(iterable); + return checkNotNull(iterable); } /**
Internal shuffling in FluentIterable with some public spillover. ------------- Created by MOE: <URL>
google_guava
train
e63ad921ed0a5385cb69693e554d8af028e9c0b0
diff --git a/dispatch/themes/ubyssey/views.py b/dispatch/themes/ubyssey/views.py index <HASH>..<HASH> 100644 --- a/dispatch/themes/ubyssey/views.py +++ b/dispatch/themes/ubyssey/views.py @@ -21,7 +21,7 @@ class UbysseyTheme(DefaultTheme): def get_article_meta(self, article): return { - 'title': "%s - %s" % (article.long_headline, self.SITE_TITLE), + 'title': article.long_headline, 'description': article.seo_description if article.seo_description is not None else article.snippet, 'url': article.get_absolute_url, 'image': article.featured_image.image.get_absolute_url(), @@ -48,9 +48,12 @@ class UbysseyTheme(DefaultTheme): popular = Article.objects.get_popular()[:5] + title = "%s - UBC's official student newspaper" % self.SITE_TITLE + context = { + 'title': title, 'meta': { - 'title': "%s - UBC's official student newspaper" % self.SITE_TITLE, + 'title': title, 'description': 'Weekly student newspaper of the University of British Columbia.', 'url': self.SITE_URL, 'image': articles['primary'].featured_image.image.get_absolute_url() @@ -74,6 +77,7 @@ class UbysseyTheme(DefaultTheme): dur = request.GET.get('dur', None) context = { + 'title': "%s - %s" % (article.long_headline, self.SITE_TITLE), 'meta': self.get_article_meta(article), 'article': article, 'reading_list': article.get_reading_list(ref=ref, dur=dur),
remove "The Ubyssey" from og:title
ubyssey_dispatch
train
f352af4b75577be05311cf6f9d7cf47463356a44
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -69,6 +69,8 @@ Concat.prototype.add = function(filePath, content, sourceMap) { }); } } else { + if (sourceMap && sourceMap.sources) + filePath = sourceMap.sources[0]; for (var i = 1; i <= lines; i++) { this._sourceMap.addMapping({ generated: { diff --git a/test/index.js b/test/index.js index <HASH>..<HASH> 100644 --- a/test/index.js +++ b/test/index.js @@ -231,7 +231,7 @@ testCase('should pass on source content when mappings is empty', { ], output: { content: 'AAA\nEEE\nFFF', - sourceMap: '{"version":3,"file":"out.js","sources":["intermediate.js","test2","test3"],"names":[],"mappings":"AAAA;ACAA;ACAA","sourcesContent":["AAA",null,null]}' + sourceMap: '{"version":3,"file":"out.js","sources":["test11","test2","test3"],"names":[],"mappings":"AAAA;ACAA;ACAA","sourcesContent":["AAA",null,null]}' } }); @@ -277,3 +277,21 @@ testCase('should output unix style paths on Windows', { sourceMap: '{"version":3,"file":"test/test/out.js","sources":["test/test1","test/test2","test/test3"],"names":[],"mappings":"AAAA;ACAA;ACAA"}' } }); + +testCase('should keep source in sources with empty mappings', { + separator: '\n', + sourceMapping: true, + outFile: 'out.js', + input: [ + { + content: 'AAA', + sourceMap: '{"version":3,"file":"test1","sources":["testXXX"], "names":[],"mappings":""}' + }, + { content: 'BBB' }, + { content: 'CCC' } + ], + output: { + content: 'AAA\nBBB\nCCC', + sourceMap: '{"version":3,"file":"out.js","sources":["testXXX","test2","test3"],"names":[],"mappings":"AAAA;ACAA;ACAA"}' + } +});
keep source from sourcemap when mappings empty
floridoo_concat-with-sourcemaps
train
67de0e3b98de67fa6efac4a29bbffb45bd9c63e8
diff --git a/pypmc/sampler/importance_sampling.py b/pypmc/sampler/importance_sampling.py index <HASH>..<HASH> 100644 --- a/pypmc/sampler/importance_sampling.py +++ b/pypmc/sampler/importance_sampling.py @@ -10,7 +10,7 @@ from ..tools import History as _History from ..tools.indicator import merge_function_with_indicator as _indmerge def calculate_expectation(samples, f): - r'''Calculates the expectation value of function ``f`` using weighted + r'''Calculate the expectation value of function ``f`` using weighted samples (like the output of an importance-sampling run). Denoting :math:`x_n` as the sample n and :math:`w_n` as its (normalized) @@ -39,7 +39,7 @@ def calculate_expectation(samples, f): return out/normalization def calculate_mean(samples): - r'''Calculates the mean of weighted samples (like the output of an + r'''Calculate the mean of weighted samples (like the output of an importance-sampling run). :param samples:
[doc] "calculates ..." -> "calculate ..."
fredRos_pypmc
train
27537086daac9b96481438040dda00e39bd4c505
diff --git a/lib/danger/danger_core/plugins/dangerfile_git_plugin.rb b/lib/danger/danger_core/plugins/dangerfile_git_plugin.rb index <HASH>..<HASH> 100644 --- a/lib/danger/danger_core/plugins/dangerfile_git_plugin.rb +++ b/lib/danger/danger_core/plugins/dangerfile_git_plugin.rb @@ -28,7 +28,7 @@ module Danger # # @example Warn when somebody tries to add nokogiri to the project # - # diff = git.diff_for_file["Gemfile.lock"] + # diff = git.diff_for_file("Gemfile.lock") # if diff && diff.patch =~ "nokogiri" # warn 'Please do not add nokogiri to the project. Thank you.' # end
Example for diff_for_file is incorrect (#<I>)
danger_danger
train
97cc22fbdff2615eea62dd50867beff2b57acb60
diff --git a/lib/coa.js b/lib/coa.js index <HASH>..<HASH> 100644 --- a/lib/coa.js +++ b/lib/coa.js @@ -1,7 +1,8 @@ 'use strict'; var PATH = require('path'), - Q = require('q'); + Q = require('q'), + C = require('cli-color'); module.exports = require('coa').Cmd() .name(PATH.basename(process.argv[1])) @@ -17,11 +18,35 @@ module.exports = require('coa').Cmd() return p.name + ' ' + p.version; }) .end() + .opt() + .name('npm-development') + .title('Pass ' + C.yellow('--development') + ' option to ' + C.blueBright('npm install') + ' (' + C.yellow('--production') + ' is passed by default)') + .long('npm-development') + .flag() + .end() + .opt() + .name('bower-production') + .title('Pass ' + C.yellow('--production') + ' option to ' + C.blueBright('bower install') + ' (' + C.yellow('--development') + ' is passed by default)') + .long('bower-production') + .flag() + .end() + .opt() + .name('force') + .title('Pass ' + C.yellow('--force') + ' option to ' + C.blueBright('bower install')) + .long('force') + .flag() + .end() + .opt() + .name('force-latest') + .title('Pass ' + C.yellow('--force-latest') + ' option to ' + C.blueBright('bower install')) + .long('force-latest') + .flag() + .end() .completable() - .act(function() { + .act(function(opts) { var defer = Q.defer(); - require('./install')() + require('./install')(null, opts) .on('error', defer.reject.bind(defer)) .on('data', function(data) { process.stdout.write(data); diff --git a/lib/install.js b/lib/install.js index <HASH>..<HASH> 100644 --- a/lib/install.js +++ b/lib/install.js @@ -4,11 +4,13 @@ var PATH = require('path'), CP = require('child_process'), B = require('bower'), Q = require('q'), + L = require('lodash'), QFS = require('q-io/fs'), Emitter = require('events').EventEmitter; module.exports = function(paths, options) { - options = options || {}; + options = L.extend({}, options || {}); + options.production = !!options['bower-production']; var emitter = new Emitter(); @@ -28,7 +30,7 @@ module.exports = function(paths, options) { return packages[key]; }), npmBin = process.env.NPM || 'npm', - npmArgs = ['install', '--production'], + npmArgs = ['install', options['npm-development'] ? '--development' : '--production'], promise = Q.resolve(); emitter.emit('paths', paths); diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,9 @@ "dependencies": { "q": "~0.9.6", "coa": "~0.4.0", - "q-io": "~1.9.1" + "q-io": "~1.9.1", + "lodash": "~1.3.1", + "cli-color": "~0.2.2" }, "peerDependencies": { "bower": "~0.10.0"
Add some new command line options to pass to bower install and npm install commands
arikon_bower-npm-install
train
de1df28ceab74cbbc1efc704fe102fa6e894a22d
diff --git a/contextfree/contextfree.py b/contextfree/contextfree.py index <HASH>..<HASH> 100644 --- a/contextfree/contextfree.py +++ b/contextfree/contextfree.py @@ -213,7 +213,11 @@ def get_npimage(transparent=False, y_origin="top"): def rnd(c): - return (random.random() - 0.5) * c + return (random.random() - 0.5) * 2 * c + + +def prnd(c): + return (random.random() * c) def coinflip(sides):
rnd rescaled, positive rnd added
undertherain_pycontextfree
train
a628c505d3945b3d009cc8e84416d450c90f6ccd
diff --git a/h2o-algos/src/main/java/hex/deeplearning/DeepLearning.java b/h2o-algos/src/main/java/hex/deeplearning/DeepLearning.java index <HASH>..<HASH> 100644 --- a/h2o-algos/src/main/java/hex/deeplearning/DeepLearning.java +++ b/h2o-algos/src/main/java/hex/deeplearning/DeepLearning.java @@ -268,6 +268,7 @@ public class DeepLearning extends SupervisedModelBuilder<DeepLearningModel,DeepL train, train.lastVec(), trainSamplingFactors, (long)(mp._max_after_balance_size*train.numRows()), mp._seed, true, false); model._output._modelClassDist = new MRUtils.ClassDist(train.lastVec()).doAll(train.lastVec()).rel_dist(); } + model._output.autoencoder = _parms._autoencoder; model.training_rows = train.numRows(); trainScoreFrame = sampleFrame(train, mp._score_training_samples, mp._seed); //training scoring dataset is always sampled uniformly from the training dataset diff --git a/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java b/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java index <HASH>..<HASH> 100644 --- a/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java +++ b/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java @@ -610,7 +610,11 @@ public class DeepLearningModel extends SupervisedModel<DeepLearningModel,DeepLea public DeepLearningOutput() { super(); } public DeepLearningOutput( DeepLearning b ) { super(b); } Errors errors; + boolean autoencoder; TwoDimTable modelSummary; + @Override public ModelCategory getModelCategory() { + return autoencoder ? ModelCategory.AutoEncoder : super.getModelCategory(); + } } // Default publicly visible Schema is V2 diff --git a/h2o-algos/src/main/java/hex/pca/PCAModel.java b/h2o-algos/src/main/java/hex/pca/PCAModel.java index <HASH>..<HASH> 100644 --- a/h2o-algos/src/main/java/hex/pca/PCAModel.java +++ b/h2o-algos/src/main/java/hex/pca/PCAModel.java @@ -55,7 +55,7 @@ public class PCAModel extends Model<PCAModel,PCAModel.PCAParameters,PCAModel.PCA @Override public int nfeatures() { return _names.length; } @Override public ModelCategory getModelCategory() { - return Model.ModelCategory.Clustering; + return ModelCategory.DimReduction; } } diff --git a/h2o-core/src/main/java/hex/Model.java b/h2o-core/src/main/java/hex/Model.java index <HASH>..<HASH> 100644 --- a/h2o-core/src/main/java/hex/Model.java +++ b/h2o-core/src/main/java/hex/Model.java @@ -25,7 +25,9 @@ public abstract class Model<M extends Model<M,P,O>, P extends Model.Parameters, Binomial, Multinomial, Regression, - Clustering + Clustering, + AutoEncoder, + DimReduction } public boolean isSupervised() { return false; } @@ -174,7 +176,7 @@ public abstract class Model<M extends Model<M,P,O>, P extends Model.Parameters, return cns==null ? 1 : cns.length; } - // Note: Clustering algorithms MUST redefine this method to return ModelCategory.Clustering: + // Note: some algorithms MUST redefine this method to return other model categories public ModelCategory getModelCategory() { return (isClassifier() ? (nclasses() > 2 ? ModelCategory.Multinomial : ModelCategory.Binomial) : diff --git a/h2o-core/src/main/java/water/api/ModelOutputSchema.java b/h2o-core/src/main/java/water/api/ModelOutputSchema.java index <HASH>..<HASH> 100644 --- a/h2o-core/src/main/java/water/api/ModelOutputSchema.java +++ b/h2o-core/src/main/java/water/api/ModelOutputSchema.java @@ -18,7 +18,7 @@ abstract public class ModelOutputSchema<O extends Model.Output, S extends ModelO @API(help="Domains for categorical (enum) columns.", direction=API.Direction.OUTPUT) public String[][] domains; - @API(help="Category of the model (e.g., Binomial).", values={"Unknown", "Binomial", "Multinomial", "Regression", "Clustering"}, direction=API.Direction.OUTPUT) + @API(help="Category of the model (e.g., Binomial).", values={"Unknown", "Binomial", "Multinomial", "Regression", "Clustering", "AutoEncoder", "DimReduction"}, direction=API.Direction.OUTPUT) public Model.ModelCategory model_category; @API(help="Help information for output fields", direction=API.Direction.OUTPUT)
Add new model categories: AutoEncoder, DimReduction
h2oai_h2o-3
train
c9f37af8b9ddcb68292841de7f5f006054e562ea
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -31,6 +31,25 @@ var readManagerTemplate = _.once(function() { var validUpdateModes = ['xhr', 'fs']; var updateModesNeedingUrl = ['xhr']; +function makeIdentitySourceMap(content, resourcePath) { + var map = new sm.SourceMapGenerator(); + map.setSourceContent(resourcePath, content); + content.split('\n').map(function(line, index) { + map.addMapping({ + source: resourcePath, + original: { + line: index+1, + column: 0 + }, + generated: { + line: index+1, + column: 0 + } + }); + }); + return map.toJSON(); +} + module.exports = function(bundle, opts) { if (!opts) opts = {}; var updateMode = opts.mode||opts.m||'xhr'; @@ -108,34 +127,23 @@ module.exports = function(bundle, opts) { var header = '_hmr.initModule('+JSON.stringify(row.file)+', module);\n(function(){\n'; var footer = '\n}).call(this, arguments);\n'; - var inputMapConsumer; - var inputMap = convert.fromSource(row.source); - if (inputMap) { - inputMapConsumer = new sm.SourceMapConsumer(inputMap.toObject()); + var inputMapCV = convert.fromSource(row.source); + var inputMap; + if (inputMapCV) { + inputMap = inputMapCV.toObject(); row.source = convert.removeComments(row.source); + } else { + inputMap = makeIdentitySourceMap(row.source, row.file); } - var outputMapGenerator = new sm.SourceMapGenerator({ - file: row.file - }); - var lines = row.source.split('\n').length; - for (var i=1; i<=lines; i++) { - outputMapGenerator.addMapping({ - generated: {line:i+2,column:0}, - original: {line:i,column:0}, - source: row.file - }); - } - outputMapGenerator.setSourceContent(row.file, row.source); - if (inputMapConsumer) { - outputMapGenerator.applySourceMap(inputMapConsumer); - } - var mergedMap = outputMapGenerator.toJSON(); - if (inputMap) { - mergedMap.sources = inputMap.sources || [inputMap.file ? inputMap.file : row.file]; - mergedMap.file = inputMap.file; - } - row.source = header + row.source + footer + - '\n' + convert.fromObject(mergedMap).toComment(); + + var node = new sm.SourceNode(null, null, null, [ + new sm.SourceNode(null, null, null, header), + sm.SourceNode.fromStringWithSourceMap(row.source, new sm.SourceMapConsumer(inputMap)), + new sm.SourceNode(null, null, null, footer) + ]); + + var result = node.toStringWithSourceMap(); + row.source = result.code + convert.fromObject(result.map.toJSON()).toComment(); next(null, row); } }));
Fix sourcemap handling. The old code worked in simple cases by chance but made pretty mangled sourcemaps if there was an input source map from other transforms.
Macil_browserify-hmr
train
5d1eb0638fdb55e67aae4d1de4da946751616b0e
diff --git a/app/models/booking_template.rb b/app/models/booking_template.rb index <HASH>..<HASH> 100644 --- a/app/models/booking_template.rb +++ b/app/models/booking_template.rb @@ -152,6 +152,9 @@ class BookingTemplate < ActiveRecord::Base acts_as_taggable_on :include_in_saldo # Importer + # ======== + attr_accessible :matcher + def self.import(struct) templates = self.all.inject([]) do |found, template| puts "matcher: " + template.matcher
Marc BookingTemplate.matcher as accessible.
huerlisi_has_accounts
train
43c5803a25a7ca9ffc499ad7044502ab14bb19f3
diff --git a/agent/consul/state/catalog_events.go b/agent/consul/state/catalog_events.go index <HASH>..<HASH> 100644 --- a/agent/consul/state/catalog_events.go +++ b/agent/consul/state/catalog_events.go @@ -45,8 +45,8 @@ func serviceHealthSnapshot(s *Store, topic stream.Topic) stream.SnapshotFunc { defer tx.Abort() connect := topic == topicServiceHealthConnect - // TODO(namespace-streaming): plumb entMeta through from SubscribeRequest - idx, nodes, err := checkServiceNodesTxn(tx, nil, req.Key, connect, nil) + entMeta := structs.EnterpriseMetaInitializer(req.Namespace) + idx, nodes, err := checkServiceNodesTxn(tx, nil, req.Key, connect, &entMeta) if err != nil { return 0, err } @@ -349,8 +349,7 @@ func getPayloadCheckServiceNode(payload stream.Payload) *structs.CheckServiceNod // parseCheckServiceNodes but is more efficient since we know they are all on // the same node. func newServiceHealthEventsForNode(tx ReadTxn, idx uint64, node string) ([]stream.Event, error) { - // TODO(namespace-streaming): figure out the right EntMeta and mystery arg. - services, err := catalogServiceListByNode(tx, node, nil, false) + services, err := catalogServiceListByNode(tx, node, structs.WildcardEnterpriseMeta(), true) if err != nil { return nil, err } @@ -384,8 +383,7 @@ func getNodeAndChecks(tx ReadTxn, node string) (*structs.Node, serviceChecksFunc } n := nodeRaw.(*structs.Node) - // TODO(namespace-streaming): work out what EntMeta is needed here, wildcard? - iter, err := catalogListChecksByNode(tx, node, nil) + iter, err := catalogListChecksByNode(tx, node, structs.WildcardEnterpriseMeta()) if err != nil { return nil, nil, err } diff --git a/agent/consul/subscribe_backend.go b/agent/consul/subscribe_backend.go index <HASH>..<HASH> 100644 --- a/agent/consul/subscribe_backend.go +++ b/agent/consul/subscribe_backend.go @@ -1,13 +1,13 @@ package consul import ( - "github.com/hashicorp/consul/agent/structs" "google.golang.org/grpc" "github.com/hashicorp/consul/acl" "github.com/hashicorp/consul/agent/consul/stream" agentgrpc "github.com/hashicorp/consul/agent/grpc" "github.com/hashicorp/consul/agent/rpc/subscribe" + "github.com/hashicorp/consul/agent/structs" ) type subscribeBackend struct {
state: use enterprise meta for creating events
hashicorp_consul
train
ca556b7d241960c609bd549f736a293a3807e410
diff --git a/lib/shapewear/wsdl.rb b/lib/shapewear/wsdl.rb index <HASH>..<HASH> 100644 --- a/lib/shapewear/wsdl.rb +++ b/lib/shapewear/wsdl.rb @@ -19,26 +19,26 @@ module Shapewear::WSDL xtypes.schema 'xmlns' => namespaces['xsd'], 'targetNamespace' => namespaces['xsd1'] do |xschema| # define elements for each defined method - instance_methods(false).each do |m| - build_type_elements_for_method(m, xschema) + operations.each do |m, op_opts| + build_type_elements_for_method(m, op_opts, xschema) end end end - instance_methods(false).each do |m| - xdef.message :name => "#{m.camelize}Input" do |xmsg| - xmsg.part :name => :body, :element => "xsd1:#{m.camelize}Request" + operations.each do |_, op_opts| + xdef.message :name => "#{op_opts[:public_name]}Input" do |xmsg| + xmsg.part :name => :body, :element => "xsd1:#{op_opts[:public_name]}Request" end unless instance_method(m).arity == 0 - xdef.message :name => "#{m.camelize}Output" do |xmsg| - xmsg.part :name => :body, :element => "xsd1:#{m.camelize}Response" + xdef.message :name => "#{op_opts[:public_name]}Output" do |xmsg| + xmsg.part :name => :body, :element => "xsd1:#{op_opts[:public_name]}Response" end end xdef.portType :name => "#{self.name}PortType" do |xpt| - instance_methods(false).each do |m| - xpt.operation :name => m.camelize do |xop| - xop.input :message => "tns:#{m.camelize}Input" unless instance_method(m).arity == 0 - xop.output :message => "tns:#{m.camelize}Output" + operations.each do |_, op_opts| + xpt.operation :name => op_opts[:public_name] do |xop| + xop.input :message => "tns:#{op_opts[:public_name]}Input" unless instance_method(m).arity == 0 + xop.output :message => "tns:#{op_opts[:public_name]}Output" end end end @@ -65,10 +65,9 @@ module Shapewear::WSDL end end - def build_type_elements_for_method(m, xschema) + def build_type_elements_for_method(m, op_options, xschema) # element for method arguments um = instance_method(m) - op_options = options[:operations][m.to_sym] rescue nil if um.arity > 0 xschema.element :name => "#{op_options[:public_name]}Request" do |xreq|
Removed lasts calls of 'instance_methods'.
elementar_shapewear
train
db3e01e76bb697b62fbd21e22b4426a5cbd97d0e
diff --git a/lib/travis/github/services/sync_user/repository.rb b/lib/travis/github/services/sync_user/repository.rb index <HASH>..<HASH> 100644 --- a/lib/travis/github/services/sync_user/repository.rb +++ b/lib/travis/github/services/sync_user/repository.rb @@ -30,11 +30,11 @@ module Travis private def find - ::Repository.where(:owner_name => owner_name, :name => name).first + ::Repository.where(:github_id => github_id).first end def create - ::Repository.create!(:owner_name => owner_name, :name => name) + ::Repository.create!(:owner_name => owner_name, :name => name, github_id: github_id) end # instrument :create, :level => :debug @@ -86,6 +86,10 @@ module Travis data['name'] end + def github_id + data['id'] + end + def permission_data data['permissions'] end diff --git a/spec/travis/github/services/sync_user/repository_spec.rb b/spec/travis/github/services/sync_user/repository_spec.rb index <HASH>..<HASH> 100644 --- a/spec/travis/github/services/sync_user/repository_spec.rb +++ b/spec/travis/github/services/sync_user/repository_spec.rb @@ -7,15 +7,15 @@ describe Travis::Github::Services::SyncUser::Repository do let(:run) { lambda { described_class.new(user, repo).run } } describe 'find or create repository' do - let(:repo) { { 'name' => 'minimal', 'owner' => { 'login' => 'sven' }, 'permissions' => { 'admin' => false, 'push' => false, 'pull' => true } } } + let(:repo) { { 'id' => 100, 'name' => 'minimal', 'owner' => { 'login' => 'sven' }, 'permissions' => { 'admin' => false, 'push' => false, 'pull' => true } } } it 'creates a new repository per record if not yet present' do run.call - Repository.find_by_owner_name_and_name('sven', 'minimal').should be_present + Repository.find_by_github_id(100).should be_present end it 'does not create a new repository' do - Repository.create!(:owner_name => 'sven', :name => 'minimal') + Repository.create!(:owner_name => 'sven-1', :name => 'minimal-2', :github_id => 100) run.should_not change(Repository, :count) end end
Search for repos by github_id on user sync
travis-ci_travis-core
train
178a8f87f313fd38969c2a1f07963e24e45238ff
diff --git a/pkg/kubectl/cmd/util/openapi/BUILD b/pkg/kubectl/cmd/util/openapi/BUILD index <HASH>..<HASH> 100644 --- a/pkg/kubectl/cmd/util/openapi/BUILD +++ b/pkg/kubectl/cmd/util/openapi/BUILD @@ -36,6 +36,7 @@ go_test( deps = [ ":go_default_library", "//pkg/kubectl/cmd/util/openapi/testing:go_default_library", + "//vendor/github.com/googleapis/gnostic/OpenAPIv2:go_default_library", "//vendor/github.com/onsi/ginkgo:go_default_library", "//vendor/github.com/onsi/ginkgo/config:go_default_library", "//vendor/github.com/onsi/ginkgo/types:go_default_library", diff --git a/pkg/kubectl/cmd/util/openapi/openapi_getter_test.go b/pkg/kubectl/cmd/util/openapi/openapi_getter_test.go index <HASH>..<HASH> 100644 --- a/pkg/kubectl/cmd/util/openapi/openapi_getter_test.go +++ b/pkg/kubectl/cmd/util/openapi/openapi_getter_test.go @@ -19,27 +19,36 @@ package openapi_test import ( "fmt" + openapi_v2 "github.com/googleapis/gnostic/OpenAPIv2" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "k8s.io/kubernetes/pkg/kubectl/cmd/util/openapi" - tst "k8s.io/kubernetes/pkg/kubectl/cmd/util/openapi/testing" ) +// FakeCounter returns a "null" document and the specified error. It +// also counts how many times the OpenAPISchema method has been called. +type FakeCounter struct { + Calls int + Err error +} + +func (f *FakeCounter) OpenAPISchema() (*openapi_v2.Document, error) { + f.Calls = f.Calls + 1 + return nil, f.Err +} + var _ = Describe("Getting the Resources", func() { - var client *tst.FakeClient - var expectedData openapi.Resources + var client FakeCounter var instance openapi.Getter + var expectedData openapi.Resources BeforeEach(func() { - client = tst.NewFakeClient(&fakeSchema) - d, err := fakeSchema.OpenAPISchema() + client = FakeCounter{} + instance = openapi.NewOpenAPIGetter(&client) + var err error + expectedData, err = openapi.NewOpenAPIData(nil) Expect(err).To(BeNil()) - - expectedData, err = openapi.NewOpenAPIData(d) - Expect(err).To(BeNil()) - - instance = openapi.NewOpenAPIGetter(client) }) Context("when the server returns a successful result", func() { diff --git a/pkg/kubectl/cmd/util/openapi/testing/openapi.go b/pkg/kubectl/cmd/util/openapi/testing/openapi.go index <HASH>..<HASH> 100644 --- a/pkg/kubectl/cmd/util/openapi/testing/openapi.go +++ b/pkg/kubectl/cmd/util/openapi/testing/openapi.go @@ -65,33 +65,6 @@ func (f *Fake) OpenAPISchema() (*openapi_v2.Document, error) { return f.document, f.err } -// FakeClient implements a dummy OpenAPISchemaInterface that uses the -// fake OpenAPI schema given as a parameter, and count the number of -// call to the function. -type FakeClient struct { - Calls int - Err error - - fake *Fake -} - -// NewFakeClient creates a new FakeClient from the given Fake. -func NewFakeClient(f *Fake) *FakeClient { - return &FakeClient{fake: f} -} - -// OpenAPISchema returns a OpenAPI Document as returned by the fake, but -// it also counts the number of calls. -func (f *FakeClient) OpenAPISchema() (*openapi_v2.Document, error) { - f.Calls = f.Calls + 1 - - if f.Err != nil { - return nil, f.Err - } - - return f.fake.OpenAPISchema() -} - // FakeResources is a wrapper to directly load the openapi schema from a // file, and get the schema for given GVK. This is only for test since // it's assuming that the file is there and everything will go fine.
openapi: Remove FakeClient from testing library And make a simplified version of it where needed.
kubernetes_kubernetes
train
72b6e9278feefa6e73d2ca1ecd6b8f5ac7005934
diff --git a/graylog2-server/src/main/java/org/graylog2/bindings/PersistenceServicesBindings.java b/graylog2-server/src/main/java/org/graylog2/bindings/PersistenceServicesBindings.java index <HASH>..<HASH> 100644 --- a/graylog2-server/src/main/java/org/graylog2/bindings/PersistenceServicesBindings.java +++ b/graylog2-server/src/main/java/org/graylog2/bindings/PersistenceServicesBindings.java @@ -17,6 +17,8 @@ package org.graylog2.bindings; import com.google.inject.AbstractModule; +import org.graylog2.agents.AgentService; +import org.graylog2.agents.AgentServiceImpl; import org.graylog2.alarmcallbacks.AlarmCallbackConfigurationService; import org.graylog2.alarmcallbacks.AlarmCallbackConfigurationServiceImpl; import org.graylog2.alerts.AlertService; @@ -43,10 +45,13 @@ import org.graylog2.security.MongoDBSessionService; import org.graylog2.security.MongoDBSessionServiceImpl; import org.graylog2.security.ldap.LdapSettingsService; import org.graylog2.security.ldap.LdapSettingsServiceImpl; -import org.graylog2.streams.*; +import org.graylog2.shared.users.UserService; +import org.graylog2.streams.StreamRuleService; +import org.graylog2.streams.StreamRuleServiceImpl; +import org.graylog2.streams.StreamService; +import org.graylog2.streams.StreamServiceImpl; import org.graylog2.system.activities.SystemMessageService; import org.graylog2.system.activities.SystemMessageServiceImpl; -import org.graylog2.shared.users.UserService; import org.graylog2.users.UserServiceImpl; /** @@ -72,5 +77,6 @@ public class PersistenceServicesBindings extends AbstractModule { bind(LdapSettingsService.class).to(LdapSettingsServiceImpl.class); bind(MongoDBSessionService.class).to(MongoDBSessionServiceImpl.class); bind(AlarmCallbackConfigurationService.class).to(AlarmCallbackConfigurationServiceImpl.class); + bind(AgentService.class).to(AgentServiceImpl.class); } }
Binding AgentService interface to implementation class.
Graylog2_graylog2-server
train
718c09da0fcf28e333e12d0ae98e1c5f864e0b50
diff --git a/desi.gemspec b/desi.gemspec index <HASH>..<HASH> 100644 --- a/desi.gemspec +++ b/desi.gemspec @@ -17,6 +17,7 @@ an Elastic Search local install for development purposes.} gem.add_dependency "boson" gem.add_dependency "cocaine", "~> 0.5.3" gem.add_dependency "addressable" + gem.add_dependency "semantic", "~> 1.3.0" gem.add_development_dependency "rake" gem.add_development_dependency "rspec" diff --git a/lib/desi/upstream.rb b/lib/desi/upstream.rb index <HASH>..<HASH> 100644 --- a/lib/desi/upstream.rb +++ b/lib/desi/upstream.rb @@ -2,6 +2,7 @@ require "desi/http_client" require "json" +require "semantic" module Desi class Upstream @@ -34,7 +35,7 @@ module Desi protected def sortable_version - version.split('.').map {|c| c.to_i } + @sortable_version ||= Semantic::Version.new(version.sub(%r{.(beta|alpha|rc)}i, '-\1')) end end diff --git a/spec/desi/upstream_spec.rb b/spec/desi/upstream_spec.rb index <HASH>..<HASH> 100644 --- a/spec/desi/upstream_spec.rb +++ b/spec/desi/upstream_spec.rb @@ -7,8 +7,24 @@ describe Desi::Upstream::Release do let(:v1) { Desi::Upstream::Release.new("v1.0.0", "") } let(:v09) { Desi::Upstream::Release.new("v0.90.10", "") } - it "sort the v1.0.0 before v0.90.10" do + it "sorts the v1.0.0 before v0.90.10" do expect([v09, v1].sort).to eql([v1, v09]) end + + it "sorts 1.0.0.RC2 before 1.0.0" do + expect([ + Desi::Upstream::Release.new("v1.0.0.Beta1", ""), + Desi::Upstream::Release.new("v1.0.0.RC2", ""), + Desi::Upstream::Release.new("v1.0.0", ""), + Desi::Upstream::Release.new("v1.0.0.Beta2", ""), + Desi::Upstream::Release.new("v1.0.0.RC1", ""), + ].sort).to eql([ + Desi::Upstream::Release.new("v1.0.0", ""), + Desi::Upstream::Release.new("v1.0.0.RC2", ""), + Desi::Upstream::Release.new("v1.0.0.RC1", ""), + Desi::Upstream::Release.new("v1.0.0.Beta2", ""), + Desi::Upstream::Release.new("v1.0.0.Beta1", "") + ]) + end end end
Fix releases sorting glitch with beta and rc releases closes #<I>
af83_desi
train
072eb1859773bceaff2a42bb02cfa83f08107742
diff --git a/kenl380/pylib/__version__.py b/kenl380/pylib/__version__.py index <HASH>..<HASH> 100644 --- a/kenl380/pylib/__version__.py +++ b/kenl380/pylib/__version__.py @@ -1,4 +1,4 @@ -VERSION = (0, 8, '9b1') +VERSION = (1, 0, '0b1') __version__ = '.'.join(map(str, VERSION))
Take version to <I>b1 in prep for release
kenlowrie_pylib
train
495124e2189d8cd811449d68e541a2f4631386b4
diff --git a/google-daemon/usr/share/google/google_daemon/accounts_manager.py b/google-daemon/usr/share/google/google_daemon/accounts_manager.py index <HASH>..<HASH> 100644 --- a/google-daemon/usr/share/google/google_daemon/accounts_manager.py +++ b/google-daemon/usr/share/google/google_daemon/accounts_manager.py @@ -58,6 +58,8 @@ class AccountsManager(object): os.close(w) r = os.fdopen(r) # turn r into a file object self.desired_accounts.ssh_keys_etag = r.read() + r.close() + logging.debug('New etag: %s', self.desired_accounts.ssh_keys_etag) os.waitpid(pid, 0) else: # we are the child
Added logging for passing etag to parent
GoogleCloudPlatform_compute-image-packages
train
6c719b8f0b6cd635987876e9b35de07a4d2a331b
diff --git a/app/models/unidom/visitor/concerns/as_credential.rb b/app/models/unidom/visitor/concerns/as_credential.rb index <HASH>..<HASH> 100644 --- a/app/models/unidom/visitor/concerns/as_credential.rb +++ b/app/models/unidom/visitor/concerns/as_credential.rb @@ -1,23 +1,32 @@ module Unidom::Visitor::Concerns::AsCredential - extend ActiveSupport::Concern + extend ActiveSupport::Concern + include Unidom::Common::Concerns::ArgumentValidation included do |includer| has_one :authenticating, class_name: 'Unidom::Visitor::Authenticating', as: :credential def authenticate!(it, at: Time.now, flag_code: 'RQRD') + + assert_present! :it, it + assert_present! :at, at + assert_present! :flag_code, flag_code + return authenticating if authenticating.present? create_authenticating! visitor: it, flag_code: flag_code, opened_at: at + end def authenticate?(it, at: Time.now, flag_code: 'RQRD') + return false if authenticating.blank? result = true result &&= it==authenticating.visitor if it.present? result &&= at<=authenticating.closed_at&&at>=authenticating.opened_at if at.present? result &&= flag_code==authenticating.flag_code if flag_code.present? result + end end
1, Improve the As Credential concern for the argument validations of the #authenticate! method.
topbitdu_unidom-visitor
train
6e298ddc9fa5ddb186282b59c5026c8e8208fe68
diff --git a/lib/marathon/health_check.rb b/lib/marathon/health_check.rb index <HASH>..<HASH> 100644 --- a/lib/marathon/health_check.rb +++ b/lib/marathon/health_check.rb @@ -13,20 +13,21 @@ class Marathon::HealthCheck < Marathon::Base } ACCESSORS = %w[ command gracePeriodSeconds intervalSeconds maxConsecutiveFailures - path portIndex protocol timeoutSeconds ] + path portIndex protocol timeoutSeconds ignoreHttp1xx ] # Create a new health check object. # ++hash++: Hash returned by API. def initialize(hash) super(Marathon::Util.merge_keywordized_hash(DEFAULTS, hash), ACCESSORS) - Marathon::Util.validate_choice(:protocol, protocol, %w[HTTP TCP COMMAND]) + Marathon::Util.validate_choice(:protocol, protocol, %w[HTTP TCP COMMAND HTTPS MESOS_HTTP MESOS_HTTPS MESOS_TCP]) end def to_s if protocol == 'COMMAND' "Marathon::HealthCheck { :protocol => #{protocol} :command => #{command} }" - elsif protocol == 'HTTP' - "Marathon::HealthCheck { :protocol => #{protocol} :portIndex => #{portIndex} :path => #{path} }" + elsif %w[HTTP HTTPS MESOS_HTTP MESOS_HTTPS].include? protocol + "Marathon::HealthCheck { :protocol => #{protocol} :portIndex => #{portIndex} :path => #{path}" + + (%w[HTTP HTTPS].include? protocol and !ignoreHttp1xx.nil? ? " :ignoreHttp1xx => #{ignoreHttp1xx}" : '') + " }" else "Marathon::HealthCheck { :protocol => #{protocol} :portIndex => #{portIndex} }" end
Fixes #<I> - Health check should allow HTTPS as the protocol Also add MESOS_* health checks
otto-de_marathon-api
train
eb38e9bbb85935bdee6e9f9f10b0f7a3dfb00e72
diff --git a/test/send.js b/test/send.js index <HASH>..<HASH> 100644 --- a/test/send.js +++ b/test/send.js @@ -403,50 +403,80 @@ describe('send(file).pipe(res)', function () { }) describe('with conditional-GET', function () { - it('should respond with 304 on a match', function (done) { - request(app) + it('should remove Content headers with 304', function (done) { + var server = createServer({root: fixtures}, function (req, res) { + res.setHeader('Content-Language', 'en-US') + res.setHeader('Content-Location', 'http://localhost/name.txt') + res.setHeader('Contents', 'foo') + }) + + request(server) .get('/name.txt') .expect(200, function (err, res) { if (err) return done(err) - request(app) + request(server) .get('/name.txt') .set('If-None-Match', res.headers.etag) + .expect(shouldNotHaveHeader('Content-Language')) + .expect(shouldNotHaveHeader('Content-Length')) + .expect(shouldNotHaveHeader('Content-Type')) + .expect('Content-Location', 'http://localhost/name.txt') + .expect('Contents', 'foo') .expect(304, done) }) }) - it('should respond with 200 otherwise', function (done) { - request(app) - .get('/name.txt') - .expect(200, function (err, res) { - if (err) return done(err) + describe('where "If-Modified-Since" is set', function () { + it('should respond with 304 when unmodified', function (done) { request(app) .get('/name.txt') - .set('If-None-Match', '"123"') - .expect(200, 'tobi', done) + .expect(200, function (err, res) { + if (err) return done(err) + request(app) + .get('/name.txt') + .set('If-Modified-Since', res.headers['last-modified']) + .expect(304, done) + }) + }) + + it('should respond with 200 when modified', function (done) { + request(app) + .get('/name.txt') + .expect(200, function (err, res) { + if (err) return done(err) + var lmod = new Date(res.headers['last-modified']) + var date = new Date(lmod - 60000) + request(app) + .get('/name.txt') + .set('If-Modified-Since', date.toUTCString()) + .expect(200, 'tobi', done) + }) }) }) - it('should remove Content headers', function (done) { - var app = createServer({root: fixtures}, function (req, res) { - res.setHeader('Content-Language', 'en-US') - res.setHeader('Content-Location', 'http://localhost/name.txt') - res.setHeader('Contents', 'foo') + describe('where "If-None-Match" is set', function () { + it('should respond with 304 when ETag matched', function (done) { + request(app) + .get('/name.txt') + .expect(200, function (err, res) { + if (err) return done(err) + request(app) + .get('/name.txt') + .set('If-None-Match', res.headers.etag) + .expect(304, done) + }) }) - request(app) - .get('/name.txt') - .expect(200, function (err, res) { - if (err) return done(err) + it('should respond with 200 when ETag unmatched', function (done) { request(app) .get('/name.txt') - .set('If-None-Match', res.headers.etag) - .expect(shouldNotHaveHeader('Content-Language')) - .expect(shouldNotHaveHeader('Content-Length')) - .expect(shouldNotHaveHeader('Content-Type')) - .expect('Content-Location', 'http://localhost/name.txt') - .expect('Contents', 'foo') - .expect(304, done) + .expect(200, function (err, res) { + if (err) return done(err) + request(app) + .get('/name.txt') + .set('If-None-Match', '"123"') + .expect(200, 'tobi', done) + }) }) }) })
tests: expand the conditional-GET tests
pillarjs_send
train
8a2862df9db42bafff26552fd83e54a612574a01
diff --git a/blockstore/blockstored.py b/blockstore/blockstored.py index <HASH>..<HASH> 100644 --- a/blockstore/blockstored.py +++ b/blockstore/blockstored.py @@ -937,7 +937,6 @@ class BlockstoredRPC(jsonrpc.JSONRPC, object): """ Lookup the blockchain-derived profile for a name. """ - db = get_state_engine() try: @@ -956,6 +955,7 @@ class BlockstoredRPC(jsonrpc.JSONRPC, object): else: return name_record + def jsonrpc_get_name_blockchain_history( self, name, start_block, end_block ): """ Get the sequence of name operations processed for a given name. @@ -1504,7 +1504,23 @@ class BlockstoredRPC(jsonrpc.JSONRPC, object): """ db = get_state_engine() return db.get_consensus_at( block_id ) - + + + def jsonrpc_get_consensus_range( self, block_id_start, block_id_end ): + """ + Get a range of consensus hashes. The range is inclusive. + """ + db = get_state_engine() + ret = [] + for b in xrange(block_id_start, block_id_end+1): + ch = db.get_consensus_at( b ) + if ch is None: + break + + ret.append(ch) + + return ret + def jsonrpc_get_block_from_consensus( self, consensus_hash ): """ @@ -1546,9 +1562,8 @@ def stop_server( from_signal, clean=False, kill=False ): log.info('Caught fatal signal; exiting blockstored server') # stop building new state if we're in the middle of it - db = get_state_engine() - virtualchain.stop_sync_virtualchain( db ) - + virtualchain_hooks.stop_sync_blockchain() + # stop API server if blockstored_api_server is not None: log.debug("Stopping API server") @@ -1729,8 +1744,7 @@ class IndexerThread( threading.Thread ): # bring us up to speed set_indexing( True ) - db = get_state_engine() - virtualchain.sync_virtualchain( bt_opts, current_block, db ) + virtualchain_hooks.sync_blockchain( bt_opts, current_block ) set_indexing( False ) @@ -1744,8 +1758,7 @@ class IndexerThread( threading.Thread ): Request to stop the thread """ self.running = False - db = get_state_engine() - virtualchain.stop_sync_virtualchain( db ) + virtualchain_hooks.stop_sync_blockchain() def start_indexer_thread():
Add get_consensus_range(); some fixups to infrequently-used code paths
blockstack_blockstack-core
train
21f08f494f0c5e4d6b6331416097c00478aac9b0
diff --git a/languagetool-standalone/src/main/java/org/languagetool/dev/RuleOverview.java b/languagetool-standalone/src/main/java/org/languagetool/dev/RuleOverview.java index <HASH>..<HASH> 100644 --- a/languagetool-standalone/src/main/java/org/languagetool/dev/RuleOverview.java +++ b/languagetool-standalone/src/main/java/org/languagetool/dev/RuleOverview.java @@ -29,10 +29,7 @@ import java.io.FileFilter; import java.io.IOException; import java.net.URL; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; +import java.util.*; /** * Command line tool to list supported languages and their number of rules. @@ -41,6 +38,9 @@ import java.util.List; */ public final class RuleOverview { + private static final List<String> LANGUAGES_WITH_MAINTAINER_NEED = + Arrays.asList("en", "ja"); + public static void main(final String[] args) throws IOException { if (args.length != 1) { System.out.println("Usage: " + RuleOverview.class.getName() + " <webRoot>"); @@ -74,7 +74,6 @@ public final class RuleOverview { //setup false friends counting final String falseFriendFile = JLanguageTool.getDataBroker().getRulesDir() + File.separator + "false-friends.xml"; - final URL falseFriendUrl = this.getClass().getResource(falseFriendFile); final String falseFriendRules = StringTools.readStream(Tools.getStream(falseFriendFile), "utf-8") .replaceAll("(?s)<!--.*?-->", "") .replaceAll("(?s)<rules.*?>", ""); @@ -137,19 +136,20 @@ public final class RuleOverview { overallJavaCount++; } - // false friends - if (falseFriendUrl == null) { - System.out.println("<td valign=\"top\" align=\"right\">0</td>"); + // false friends: + final int count = countFalseFriendRules(falseFriendRules, lang); + System.out.print("<td valign=\"top\" align=\"right\">" + count + "</td>"); + //System.out.print("<td valign=\"top\">" + (isAutoDetected(lang.getShortName()) ? "yes" : "-") + "</td>"); + + // maintainer information: + final StringBuilder maintainerInfo = getMaintainerInfo(lang); + final String maintainerText; + if (LANGUAGES_WITH_MAINTAINER_NEED.contains(langCode)) { + maintainerText = " - <span class='maintainerNeeded'><a href='http://wiki.languagetool.org/tasks-for-language-maintainers'>Looking for new maintainer</a></span>"; } else { - final int count = countFalseFriendRules(falseFriendRules, lang); - System.out.print("<td valign=\"top\" align=\"right\">" + count + "</td>"); - - //System.out.print("<td valign=\"top\">" + (isAutoDetected(lang.getShortName()) ? "yes" : "-") + "</td>"); - - // maintainer information: - final StringBuilder maintainerInfo = getMaintainerInfo(lang); - System.out.print("<td valign=\"top\" align=\"left\">" + maintainerInfo.toString() + "</td>"); + maintainerText = ""; } + System.out.print("<td valign=\"top\" align=\"left\">" + maintainerInfo.toString() + maintainerText + "</td>"); System.out.println("</tr>"); } @@ -252,23 +252,23 @@ public final class RuleOverview { return false; }*/ -} - -class JavaFilter implements FileFilter { + private class JavaFilter implements FileFilter { - private final String langName; + private final String langName; - public JavaFilter(String langName) { - this.langName = langName; - } + public JavaFilter(String langName) { + this.langName = langName; + } - public boolean accept(final File f) { - final String filename = f.getName(); - final boolean isAbstractTopClass = filename.endsWith(langName + "Rule.java"); - if (filename.endsWith(".java") && !isAbstractTopClass) { - return true; + public boolean accept(final File f) { + final String filename = f.getName(); + final boolean isAbstractTopClass = filename.endsWith(langName + "Rule.java"); + if (filename.endsWith(".java") && !isAbstractTopClass) { + return true; + } + return false; } - return false; + } }
prepare marking languages that are in need of a new maintainer
languagetool-org_languagetool
train
c085f5c098908b1c32eb64b9601f8e8c95db2e8f
diff --git a/fn/publish.go b/fn/publish.go index <HASH>..<HASH> 100644 --- a/fn/publish.go +++ b/fn/publish.go @@ -5,7 +5,7 @@ import ( "net/http" "os" "os/exec" - "path/filepath" + "strings" functions "github.com/iron-io/functions_go" "github.com/urfave/cli" @@ -92,13 +92,17 @@ func (p *publishcmd) route(path string, ff *funcfile) error { return fmt.Errorf("error setting endpoint: %v", err) } - an, r := extractAppNameRoute(path) + // TODO: This is just a nasty hack and should be cleaned up all the way + pathsSplit := strings.Split(ff.FullName(), "/") + if ff.App == nil { - ff.App = &an + ff.App = &pathsSplit[0] } if ff.Route == nil { - ff.Route = &r + path := "/" + strings.Split(pathsSplit[1], ":")[0] + ff.Route = &path } + if ff.Memory == nil { ff.Memory = new(int64) } @@ -108,11 +112,12 @@ func (p *publishcmd) route(path string, ff *funcfile) error { body := functions.RouteWrapper{ Route: functions.Route{ - Path: *ff.Route, - Image: ff.FullName(), - Memory: *ff.Memory, - Type_: *ff.Type, - Config: expandEnvConfig(ff.Config), + Path: *ff.Route, + Image: ff.FullName(), + AppName: *ff.App, + Memory: *ff.Memory, + Type_: *ff.Type, + Config: expandEnvConfig(ff.Config), }, } @@ -136,20 +141,6 @@ func expandEnvConfig(configs map[string]string) map[string]string { return configs } -func extractAppNameRoute(path string) (appName, route string) { - - // The idea here is to extract the root-most directory name - // as application name, it turns out that stdlib tools are great to - // extract the deepest one. Thus, we revert the string and use the - // stdlib as it is - and revert back to its normal content. Not fastest - // ever, but it is simple. - - rpath := reverse(path) - rroute, rappName := filepath.Split(rpath) - route = filepath.Dir(reverse(rroute)) - return reverse(rappName), route -} - func reverse(s string) string { r := []rune(s) for i, j := 0, len(r)-1; i < len(r)/2; i, j = i+1, j-1 {
Add fn publish fallback option to extract appname and path (#<I>) * Add fn publish fallback option to extract appname and path * Add todo for fn publish hack
iron-io_functions
train
339be65d669c83fd4c64541a9e82086dc5e64682
diff --git a/actiontext/lib/action_text/system_test_helper.rb b/actiontext/lib/action_text/system_test_helper.rb index <HASH>..<HASH> 100644 --- a/actiontext/lib/action_text/system_test_helper.rb +++ b/actiontext/lib/action_text/system_test_helper.rb @@ -24,7 +24,7 @@ module ActionText # # <input id="trix_input_1" name="message[content]" type="hidden"> # # <trix-editor input="trix_input_1"></trix-editor> # fill_in_rich_text_area "message[content]", with: "Hello <em>world!</em>" - def fill_in_rich_text_area(locator, with:) + def fill_in_rich_text_area(locator = nil, with:) find(:rich_text_area, locator).execute_script("this.editor.loadHTML(arguments[0])", with.to_s) end end @@ -33,12 +33,16 @@ end Capybara.add_selector :rich_text_area do label "rich-text area" xpath do |locator| - input_located_by_name = XPath.anywhere(:input).where(XPath.attr(:name) == locator).attr(:id) + if locator.nil? + XPath.descendant(:"trix-editor") + else + input_located_by_name = XPath.anywhere(:input).where(XPath.attr(:name) == locator).attr(:id) - XPath.descendant(:"trix-editor").where \ - XPath.attr(:id).equals(locator) | - XPath.attr(:placeholder).equals(locator) | - XPath.attr(:"aria-label").equals(locator) | - XPath.attr(:input).equals(input_located_by_name) + XPath.descendant(:"trix-editor").where \ + XPath.attr(:id).equals(locator) | + XPath.attr(:placeholder).equals(locator) | + XPath.attr(:"aria-label").equals(locator) | + XPath.attr(:input).equals(input_located_by_name) + end end end diff --git a/actiontext/test/system/system_test_helper_test.rb b/actiontext/test/system/system_test_helper_test.rb index <HASH>..<HASH> 100644 --- a/actiontext/test/system/system_test_helper_test.rb +++ b/actiontext/test/system/system_test_helper_test.rb @@ -30,4 +30,10 @@ class ActionText::SystemTestHelperTest < ApplicationSystemTestCase fill_in_rich_text_area "message[content]", with: "Hello world!" assert_selector :field, "message[content]", with: /Hello world!/, type: "hidden" end + + test "filling in the first rich-text area" do + visit new_message_url + fill_in_rich_text_area with: "Hello world!" + assert_selector :field, "message[content]", with: /Hello world!/, type: "hidden" + end end
Allow filling in the only rich-text area without a locator
rails_rails
train
2ce951bc9722a4fde3a10d4dcd214b6470c62fa6
diff --git a/src/pybel/manager/models.py b/src/pybel/manager/models.py index <HASH>..<HASH> 100644 --- a/src/pybel/manager/models.py +++ b/src/pybel/manager/models.py @@ -651,15 +651,15 @@ class Evidence(Base): def __str__(self): return '{}:{}'.format(self.citation, self.sha512[:8]) - def to_json(self, include_id=False): + def to_json(self, include_id: bool = False): """Create a dictionary that is used to recreate the edge data dictionary for a :class:`BELGraph`. - :param bool include_id: If true, includes the model identifier + :param include_id: If true, includes the model identifier :return: Dictionary containing citation and evidence for a :class:`BELGraph` edge. :rtype: dict """ result = { - CITATION: self.citation.to_json(), + CITATION: self.citation.to_json(include_id=include_id), EVIDENCE: self.text }
Propgate inclusion of id's to citation
pybel_pybel
train
79e9a9e065ea0e9b44cc5d14fecfc75987bbb1ee
diff --git a/cluster/gce/gci/master-helper.sh b/cluster/gce/gci/master-helper.sh index <HASH>..<HASH> 100755 --- a/cluster/gce/gci/master-helper.sh +++ b/cluster/gce/gci/master-helper.sh @@ -94,9 +94,16 @@ function create-master-instance-internal() { preemptible_master="--preemptible --maintenance-policy TERMINATE" fi + local enable_ip_aliases + if [[ "${NODE_IPAM_MODE:-}" == "CloudAllocator" ]]; then + enable_ip_aliases=true + else + enable_ip_aliases=false + fi + local network=$(make-gcloud-network-argument \ "${NETWORK_PROJECT}" "${REGION}" "${NETWORK}" "${SUBNETWORK:-}" \ - "${address:-}" "${ENABLE_IP_ALIASES:-}" "${IP_ALIAS_SIZE:-}") + "${address:-}" "${enable_ip_aliases:-}" "${IP_ALIAS_SIZE:-}") local metadata="kube-env=${KUBE_TEMP}/master-kube-env.yaml" metadata="${metadata},user-data=${KUBE_ROOT}/cluster/gce/gci/master.yaml" diff --git a/pkg/controller/node/ipam/sync/sync.go b/pkg/controller/node/ipam/sync/sync.go index <HASH>..<HASH> 100644 --- a/pkg/controller/node/ipam/sync/sync.go +++ b/pkg/controller/node/ipam/sync/sync.go @@ -244,7 +244,9 @@ func (op *updateOp) validateRange(ctx context.Context, sync *NodeSync, node *v1. // alias. func (op *updateOp) updateNodeFromAlias(ctx context.Context, sync *NodeSync, node *v1.Node, aliasRange *net.IPNet) error { if sync.mode != SyncFromCloud { - glog.Warningf("Detect mode %q while expect to sync from cloud", sync.mode) + sync.kubeAPI.EmitNodeWarningEvent(node.Name, InvalidModeEvent, + "Cannot sync from cloud in mode %q", sync.mode) + return fmt.Errorf("cannot sync from cloud in mode %q", sync.mode) } glog.V(2).Infof("Updating node spec with alias range, node.PodCIDR = %v", aliasRange) @@ -274,7 +276,9 @@ func (op *updateOp) updateNodeFromAlias(ctx context.Context, sync *NodeSync, nod // updateAliasFromNode updates the cloud alias given the node allocation. func (op *updateOp) updateAliasFromNode(ctx context.Context, sync *NodeSync, node *v1.Node) error { if sync.mode != SyncFromCluster { - glog.Warningf("Detect mode %q while expect to sync from cluster", sync.mode) + sync.kubeAPI.EmitNodeWarningEvent( + node.Name, InvalidModeEvent, "Cannot sync to cloud in mode %q", sync.mode) + return fmt.Errorf("cannot sync to cloud in mode %q", sync.mode) } _, aliasRange, err := net.ParseCIDR(node.Spec.PodCIDR) diff --git a/pkg/controller/node/ipam/sync/sync_test.go b/pkg/controller/node/ipam/sync/sync_test.go index <HASH>..<HASH> 100644 --- a/pkg/controller/node/ipam/sync/sync_test.go +++ b/pkg/controller/node/ipam/sync/sync_test.go @@ -145,10 +145,11 @@ func TestNodeSyncUpdate(t *testing.T) { events: []fakeEvent{{"node1", "CloudCIDRAllocatorMismatch"}}, }, { - desc: "update alias from node", - mode: SyncFromCloud, - node: nodeWithCIDRRange, - events: nil, + desc: "update alias from node", + mode: SyncFromCloud, + node: nodeWithCIDRRange, + events: []fakeEvent{{"node1", "CloudCIDRAllocatorInvalidMode"}}, + wantError: true, }, { desc: "update alias from node", @@ -164,11 +165,12 @@ func TestNodeSyncUpdate(t *testing.T) { // XXX/bowei -- validation }, { - desc: "update node from alias", - mode: SyncFromCluster, - node: nodeWithoutCIDRRange, - fake: fakeAPIs{aliasRange: test.MustParseCIDR("10.1.2.3/16")}, - events: nil, + desc: "update node from alias", + mode: SyncFromCluster, + node: nodeWithoutCIDRRange, + fake: fakeAPIs{aliasRange: test.MustParseCIDR("10.1.2.3/16")}, + events: []fakeEvent{{"node1", "CloudCIDRAllocatorInvalidMode"}}, + wantError: true, }, { desc: "allocate range",
A couple of more changes. Specifically, a) make the same changes to master-helper.sh for gci as container-linux.sh does; b) revert changes to sync.go and sync_test.go.
kubernetes_kubernetes
train
c251b53cef6b763565cf068e07e613fc46b3654b
diff --git a/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBPropertyHelper.java b/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBPropertyHelper.java index <HASH>..<HASH> 100644 --- a/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBPropertyHelper.java +++ b/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBPropertyHelper.java @@ -71,8 +71,14 @@ public class MongoDBPropertyHelper extends ParserPropertyHelper implements Prope return MongoDBDialect.ID_FIELDNAME; } String column = getColumn( persister, propertyPath ); - if ( propertyPath.size() > 1 && propertyPath.get( 0 ).equals( identifierPropertyName ) ) { - column = MongoDBDialect.ID_FIELDNAME + "." + column.substring( propertyPath.get( 0 ).length() + 1 ); + if ( propertyPath.size() > 1 ) { + String prop = propertyPath.get( 0 ); + if ( prop.equals( identifierPropertyName ) ) { + if ( column.startsWith( identifierPropertyName + "." ) ) { + column = column.substring( prop.length() + 1 ); + } + column = MongoDBDialect.ID_FIELDNAME + "." + column; + } } return column; }
OGM-<I> Avoid exception when @Column is used on an embedded id Overriding the column name on an embedded id used to cause a StringOutOfBoundException.
hibernate_hibernate-ogm
train
8f7281fec2fb939c92f77301f71623e263bd99cd
diff --git a/clients/web/src/templates/widgets/dateTimeWidget.jade b/clients/web/src/templates/widgets/dateTimeWidget.jade index <HASH>..<HASH> 100644 --- a/clients/web/src/templates/widgets/dateTimeWidget.jade +++ b/clients/web/src/templates/widgets/dateTimeWidget.jade @@ -1,2 +1,2 @@ .input-group - input.input-sm.form-control(type="text" id="#{prefix}-datetime") + input.input-sm.form-control.g-datetime-widget(type="text") diff --git a/clients/web/src/views/widgets/DateTimeWidget.js b/clients/web/src/views/widgets/DateTimeWidget.js index <HASH>..<HASH> 100644 --- a/clients/web/src/views/widgets/DateTimeWidget.js +++ b/clients/web/src/views/widgets/DateTimeWidget.js @@ -6,21 +6,14 @@ */ girder.views.DateTimeWidget = girder.View.extend({ - /** - * @param [settings.prefix='default'] Prefix for element IDs in case - * multiple DateTimeWidgets are rendered simultaneously. - */ - initialize: function (settings) { - this.prefix = settings.prefix || 'default'; - this.dateTimeId = '#' + this.prefix + '-datetime'; + initialize: function () { }, render: function () { this.$el.html(girder.templates.dateTimeWidget({ - prefix: this.prefix })); - this.$(this.dateTimeId).datetimepicker({ + this.$('.g-datetime-widget').datetimepicker({ showClear: true, showTodayButton: true, useCurrent: 'day', @@ -89,7 +82,7 @@ girder.views.DateTimeWidget = girder.View.extend({ * Convenience function to access the datetimepicker on an element. */ _picker: function () { - var picker = this.$(this.dateTimeId).data('DateTimePicker'); + var picker = this.$('.g-datetime-widget').data('DateTimePicker'); return picker; } }); diff --git a/clients/web/test/spec/dateTimeWidgetSpec.js b/clients/web/test/spec/dateTimeWidgetSpec.js index <HASH>..<HASH> 100644 --- a/clients/web/test/spec/dateTimeWidgetSpec.js +++ b/clients/web/test/spec/dateTimeWidgetSpec.js @@ -5,19 +5,27 @@ describe('Test DateTimeWidget', function() { $('body').off(); widget = new girder.views.DateTimeWidget({ - parentView: null, - el: 'body', - prefix: 'test' + parentView: null }); widget.render(); }); it('create the widget', function() { - expect($('input#test-datetime').length).toBe(1); + expect(widget.$('input.g-datetime-widget').length).toBe(1); + }); + + it('create multiple widgets', function() { + var widget2 = new girder.views.DateTimeWidget({ + parentView: null + }); + widget2.render(); + + expect(widget.$('input.g-datetime-widget').length).toBe(1); + expect(widget2.$('input.g-datetime-widget').length).toBe(1); }); it('default initialization', function() { - expect($('input#test-datetime').val().length).toBe(0); + expect(widget.$('input.g-datetime-widget').length).toBe(1); expect(widget.date()).toBeNull(); expect(widget.dateString().length).toBe(0); });
DateTimeWidget: refer to input by class instead of id
girder_girder
train
d6ffb3d6efc31e6d5018be59b4017d63608b52cf
diff --git a/src/DataTable/Table.js b/src/DataTable/Table.js index <HASH>..<HASH> 100644 --- a/src/DataTable/Table.js +++ b/src/DataTable/Table.js @@ -45,7 +45,7 @@ class Table extends React.Component { }, className); const columnChildren = !!children - ? React.Children.toArray(children) + ? React.Children.toArray(children).filter(Boolean) : columns.map(column => <TableHeader key={column.name}
fix: Table preact compatibility (#<I>)
tleunen_react-mdl
train
968c499aca5ca71dc909ba7a632c5dbc4f58b101
diff --git a/railties/lib/rails/commands/server/server_command.rb b/railties/lib/rails/commands/server/server_command.rb index <HASH>..<HASH> 100644 --- a/railties/lib/rails/commands/server/server_command.rb +++ b/railties/lib/rails/commands/server/server_command.rb @@ -70,7 +70,7 @@ module Rails end def served_url - "#{options[:SSLEnable] ? 'https' : 'http'}://#{host}:#{port}" unless use_puma? + "#{options[:SSLEnable] ? 'https' : 'http'}://#{options[:Host]}:#{options[:Port]}" unless use_puma? end private diff --git a/railties/test/commands/server_test.rb b/railties/test/commands/server_test.rb index <HASH>..<HASH> 100644 --- a/railties/test/commands/server_test.rb +++ b/railties/test/commands/server_test.rb @@ -239,6 +239,12 @@ class Rails::Command::ServerCommandTest < ActiveSupport::TestCase ARGV.replace original_args end + def test_served_url + args = %w(-u webrick -b 127.0.0.1 -p 4567) + server = Rails::Server.new(parse_arguments(args)) + assert_equal "http://127.0.0.1:4567", server.served_url + end + private def run_command(*args) build_app
Fix "NameError: undefined local variable or method `host'" The `host` and `port` can't use this context.
rails_rails
train
a202be58911e0f213de0a8f385f37f6f1b5a58ca
diff --git a/src/main/java/spark/webserver/serialization/InputStreamSerializer.java b/src/main/java/spark/webserver/serialization/InputStreamSerializer.java index <HASH>..<HASH> 100644 --- a/src/main/java/spark/webserver/serialization/InputStreamSerializer.java +++ b/src/main/java/spark/webserver/serialization/InputStreamSerializer.java @@ -37,8 +37,7 @@ class InputStreamSerializer extends Serializer { @Override public void process(OutputStream outputStream, Object element) throws IOException { - String content = IOUtils.toString((InputStream) element); - outputStream.write(content.getBytes("utf-8")); + IOUtils.copy((InputStream) element, outputStream); } }
copy stream directly rather than buffer as a utf-8 string
perwendel_spark
train
60cb5707aac8f8b615be3ada252c69948b8f2d51
diff --git a/ios/src/playn/ios/IOSNet.java b/ios/src/playn/ios/IOSNet.java index <HASH>..<HASH> 100644 --- a/ios/src/playn/ios/IOSNet.java +++ b/ios/src/playn/ios/IOSNet.java @@ -55,9 +55,18 @@ class IOSNet implements Net try { WebResponse rsp = req.EndGetResponse(result); reader = new StreamReader(rsp.GetResponseStream()); - callback.onSuccess(reader.ReadToEnd()); - } catch (Throwable t) { - callback.onFailure(t); + final String data = reader.ReadToEnd(); + IOSPlatform.instance.queueAction(new Runnable() { + public void run () { + callback.onSuccess(data); + } + }); + } catch (final Throwable t) { + IOSPlatform.instance.queueAction(new Runnable() { + public void run () { + callback.onFailure(t); + } + }); } finally { if (reader != null) reader.Close(); diff --git a/ios/src/playn/ios/IOSPlatform.java b/ios/src/playn/ios/IOSPlatform.java index <HASH>..<HASH> 100644 --- a/ios/src/playn/ios/IOSPlatform.java +++ b/ios/src/playn/ios/IOSPlatform.java @@ -15,6 +15,9 @@ */ package playn.ios; +import java.util.ArrayList; +import java.util.List; + import cli.MonoTouch.Foundation.NSUrl; import cli.MonoTouch.UIKit.UIApplication; import cli.MonoTouch.UIKit.UIScreen; @@ -62,6 +65,8 @@ public class IOSPlatform implements Platform { private final UIWindow mainWindow; private final IOSGameView gameView; + private final List<Runnable> pendingActions = new ArrayList<Runnable>(); + private IOSPlatform(UIApplication app) { this.app = app; RectangleF bounds = UIScreen.get_MainScreen().get_Bounds(); @@ -200,6 +205,24 @@ public class IOSPlatform implements Platform { void update(float delta) { // log.debug("Update " + delta); + // process any pending actions + List<Runnable> actions = null; + synchronized (pendingActions) { + if (!pendingActions.isEmpty()) { + actions = new ArrayList<Runnable>(pendingActions); + pendingActions.clear(); + } + } + if (actions != null) { + for (Runnable action : actions) { + try { + action.run(); + } catch (Exception e) { + log().warn("Pending action failed", e); + } + } + } + // perform the game updates float updateRate = game.updateRate(); if (updateRate == 0) { @@ -221,4 +244,11 @@ public class IOSPlatform implements Platform { // log.debug("Paint " + alpha); graphics.paint(game, alpha); } + + /** Queues an action to be executed before the next {@link #update}. */ + void queueAction(Runnable r) { + synchronized (pendingActions) { + pendingActions.add(r); + } + } }
Dispatch net responses on the main game thread. The async net requests are responded to on some random worker thread. This code is a wonderful example of IOC spaghetti. Yay!
threerings_playn
train
2fdb941dd51629b64576ef4ab0ed2bcbb98eccf6
diff --git a/media_library.go b/media_library.go index <HASH>..<HASH> 100644 --- a/media_library.go +++ b/media_library.go @@ -289,14 +289,16 @@ func (mediaBox MediaBox) ConfigureQorMeta(metaor resource.Metaor) { json.Unmarshal([]byte(mediaOptionStr), &mediaOption) } - if _, err := getImageFormat(filename); err == nil { - mediaLibrary.SetSelectedType("image") - } else if isVideoFormat(filename) { - mediaLibrary.SetSelectedType("video") - } else if mediaOption.SelectedType == "video_link" { + if mediaOption.SelectedType == "video_link" { mediaLibrary.SetSelectedType("video_link") - } else { - mediaLibrary.SetSelectedType("file") + } else if filename != "" { + if _, err := getImageFormat(filename); err == nil { + mediaLibrary.SetSelectedType("image") + } else if isVideoFormat(filename) { + mediaLibrary.SetSelectedType("video") + } else { + mediaLibrary.SetSelectedType("file") + } } } return nil
Respect frontend's selected type as top priority
qor_media
train
7a418c2c3b967ed5162c8ecf1edbbe08496a9e50
diff --git a/src/java/org/apache/cassandra/utils/FBUtilities.java b/src/java/org/apache/cassandra/utils/FBUtilities.java index <HASH>..<HASH> 100644 --- a/src/java/org/apache/cassandra/utils/FBUtilities.java +++ b/src/java/org/apache/cassandra/utils/FBUtilities.java @@ -489,9 +489,8 @@ public class FBUtilities public static String decodeToUTF8(ByteBuffer bytes) throws CharacterCodingException { - bytes.mark(); + bytes = bytes.duplicate(); String decoded = Charsets.UTF_8.newDecoder().decode(bytes).toString(); - bytes.reset(); return decoded; }
FBU.decodeToUtf8 duplicates the BB so other threads can trust its position. patch by gdusbabek, reviewed by jbellis. CASSANDRA-<I> git-svn-id: <URL>
Stratio_stratio-cassandra
train
739ba2e9ebfb12617d5ef1926702248274ced74b
diff --git a/test/runner.js b/test/runner.js index <HASH>..<HASH> 100644 --- a/test/runner.js +++ b/test/runner.js @@ -33,7 +33,20 @@ checkForDevFile(function () { var runner = mocha.run(); runner.on("fail", function (test, err) { + var cp = require("child_process"); + var cwd = process.cwd(); + var path = require("path"); + + var child = cp.spawn("git", [ + "checkout", "--", path.join(cwd, "Gruntfile.js") + ], { + stdio: "inherit" + }); + + child.on("exit", function () { + process.exit(1); + }); + process.stderr.write(" " + err.toString() + "\n\n"); - process.exit(1); }); });
Reset the Gruntfile after a test failure.
Modernizr_grunt-modernizr
train
e33ef2e88cc10344e0c028fcfccbc525e8194335
diff --git a/c7n/version.py b/c7n/version.py index <HASH>..<HASH> 100644 --- a/c7n/version.py +++ b/c7n/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -version = "0.8.9" +version = "0.8.9.1" diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages setup( name="c7n", - version='0.8.9', + version='0.8.9.1', description="Cloud Custodian - Policy Rules Engine", long_description_markdown_filename='README.md', classifiers=[
micro version increment (#<I>)
cloud-custodian_cloud-custodian
train
567523b5c3aa78f02caf1e582cf0ee4a3b065875
diff --git a/packages/vuetron-plugins/vuetron-vue/methods/domTree.js b/packages/vuetron-plugins/vuetron-vue/methods/domTree.js index <HASH>..<HASH> 100644 --- a/packages/vuetron-plugins/vuetron-vue/methods/domTree.js +++ b/packages/vuetron-plugins/vuetron-vue/methods/domTree.js @@ -1,9 +1,27 @@ import get from 'lodash.get'; +const idGenerator = () => { + let cache = {}; + return function guid() { + function s4() { + return Math.floor((1 + Math.random()) * 0x10000) + .toString(16) + .substring(1); + } + let id = s4() + s4() + '-' + s4() + '-' + s4() + '-' + + s4() + '-' + s4() + s4() + s4(); + if (!cache[id]) return id; + guid(); + }; +}; + +const gid = idGenerator(); + const buildObject = (component) => { const name = get(component, 'component.$vnode.tag', null); + const id = gid(); if (!name) return undefined; - let obj = { name }; + let obj = { name, id }; if (component.hasOwnProperty('$children') && component.$children.length > 0) { obj.children = []; for (let childComponent of component.$children) { @@ -13,23 +31,23 @@ const buildObject = (component) => { return obj; }; -const buildRouteObject = (obj, name) => { - let res = {name}; +const buildRouteObject = (obj, name, id) => { + let res = {name, id}; res.children = []; if (obj.components) { let childNames = Object.keys(obj.components); for (let i = 0; i < childNames.length; i++) { - res.children.push(buildRouteObject(obj.components[childNames[i]], childNames[i])); + res.children.push(buildRouteObject(obj.components[childNames[i]], childNames[i], gid())); } } return res; }; -const buildRouterObject = (name, arr) => { - const obj = {name}; +const buildRouterObject = (name, id, arr) => { + const obj = {name, id}; obj.children = []; for (let i = 0; i < arr.length; i++) { - obj.children.push(buildRouteObject(arr[i].component, arr[i].name)); + obj.children.push(buildRouteObject(arr[i].component, arr[i].name, gid())); } return obj; }; @@ -41,7 +59,7 @@ const grabAndEmitDOM = (socket) => { let tag = get(node, '__vue__.$children[0].$vnode.tag', null); let routes = get(node, '__vue__._router.options.routes', []); if (routes.length > 0) { - socket.emit('clientDomTree', buildRouterObject(tag, routes)); + socket.emit('clientDomTree', buildRouterObject(tag, gid(), routes)); } else { let firstComp = get(node, '__vue__.$children', []); if (firstComp.length > 0) {
update domTree plugin to generate unique(ish) id for each tree element
vuetwo_vuetron
train
8576e4c7f8d2e4e8c6a5553d9967999beeebdd56
diff --git a/lib/bel_parser/expression/model/annotation.rb b/lib/bel_parser/expression/model/annotation.rb index <HASH>..<HASH> 100644 --- a/lib/bel_parser/expression/model/annotation.rb +++ b/lib/bel_parser/expression/model/annotation.rb @@ -5,9 +5,11 @@ module BELParser class Annotation include Enumerable - attr_reader :keyword - attr_reader :type - attr_reader :domain + attr_accessor :keyword + attr_accessor :type + attr_accessor :domain + attr_reader :uri_reader + attr_reader :url_reader def initialize(keyword, type, domain, options = {}) raise(ArgumentError, 'keyword is nil') unless keyword @@ -18,13 +20,26 @@ module BELParser @type = type.to_sym @domain = domain - # configure reader for URIs (RDF). - @uri_reader = options.fetch(:uri_reader, nil) + uri_reader = options.fetch(:uri_reader, nil) + url_reader = options.fetch(:url_reader, nil) + end + + def initialize_copy(original) + @keyword = original.keyword + @type = original.type + @domain = original.domain + @uri_reader = original.uri_reader + @url_reader = original.url_reader + end + + def uri_reader=(uri_reader) BELParser::Resource::Reader.assert_reader(@uri_reader, 'uri_reader') + @uri_reader = uri_reader + end - # configure reader for URLs (Resource files). - @url_reader = options.fetch(:url_reader, nil) + def url_reader=(url_reader) BELParser::Resource::Reader.assert_reader(@url_reader, 'url_reader') + @url_reader = url_reader end def uri? @@ -37,9 +52,9 @@ module BELParser def [](value) if uri? - @uri_reader.retrieve_value_from_resource(@uri, value) + @uri_reader.retrieve_value_from_resource(@domain, value) elsif url? - @url_reader.retrieve_value_from_resource(@url, value) + @url_reader.retrieve_value_from_resource(@domain, value) else nil end @@ -49,9 +64,9 @@ module BELParser if block_given? values = if uri? - @uri_reader.retrieve_values_from_resource(@uri) + @uri_reader.retrieve_values_from_resource(@domain) elsif url? - @url_reader.retrieve_values_from_resource(@url) + @url_reader.retrieve_values_from_resource(@domain) else [] end @@ -64,15 +79,22 @@ module BELParser end def hash - [@keyword, @uri, @url].hash + [@keyword, @type, @domain].hash end def ==(other) return false if other.nil? - @keyword == other.keyword && @uri == other.uri && @url == other.url + @keyword == other.keyword && + @type == other.type && + @domain == other.domain end alias :eql? :'==' + def domain_equal?(other) + return false if other.nil? + @type == other.type && @domain == other.domain + end + def to_s @keyword.to_s end diff --git a/lib/bel_parser/expression/model/namespace.rb b/lib/bel_parser/expression/model/namespace.rb index <HASH>..<HASH> 100644 --- a/lib/bel_parser/expression/model/namespace.rb +++ b/lib/bel_parser/expression/model/namespace.rb @@ -13,6 +13,8 @@ module BELParser attr_accessor :keyword attr_accessor :uri attr_accessor :url + attr_reader :uri_reader + attr_reader :url_reader def initialize(keyword, uri = nil, url = nil, options = {}) raise(ArgumentError, 'keyword is nil') unless keyword @@ -29,6 +31,14 @@ module BELParser BELParser::Resource::Reader.assert_reader(@url_reader, 'url_reader') end + def initialize_copy(original) + @keyword = original.keyword + @uri = original.uri + @uri_reader = original.uri_reader + @url = original.url + @url_reader = original.url_reader + end + def uri? [email protected]? end @@ -75,6 +85,11 @@ module BELParser end alias :eql? :'==' + def domain_equal?(other) + return false if other.nil? + @uri == other.uri && @url == other.url + end + def to_s @keyword.to_s end
initialize a dup/clone added accessors and methods for equality checking on the type/domain of the reference only (i.e. regardless of keyword). this is useful in simplifying the BufferingNanopubCombiner.
OpenBEL_bel_parser
train
aacd72805b7db133907929a85045ccd75528bea8
diff --git a/agent/taskresource/envFiles/envfile.go b/agent/taskresource/envFiles/envfile.go index <HASH>..<HASH> 100644 --- a/agent/taskresource/envFiles/envfile.go +++ b/agent/taskresource/envFiles/envfile.go @@ -273,7 +273,7 @@ func (envfile *EnvironmentFileResource) setTerminalReason(reason string) { }) } -// Create performs resource creation. This retrieves env file contents in parallel +// Create performs resource creation. This retrieves env file contents concurrently // from s3 and writes them to disk func (envfile *EnvironmentFileResource) Create() error { seelog.Debugf("Creating envfile resource.") @@ -285,30 +285,48 @@ func (envfile *EnvironmentFileResource) Create() error { return err } + var wg sync.WaitGroup + errorEvents := make(chan error, len(envfile.environmentFilesSource)) + iamCredentials := executionCredentials.GetIAMRoleCredentials() for _, envfileSource := range envfile.environmentFilesSource { + wg.Add(1) // if we support types besides S3 ARN, we will need to add filtering before the below method is called + // call an additional go routine per env file + go envfile.downloadEnvfileFromS3(envfileSource.Value, iamCredentials, &wg, errorEvents) + } + + wg.Wait() + close(errorEvents) - err := envfile.downloadEnvfileFromS3(envfileSource.Value, iamCredentials) - if err != nil { - err = errors.Wrapf(err, "unable to download envfile with ARN %v from s3", envfileSource.Value) - envfile.setTerminalReason(err.Error()) - return err + if len(errorEvents) > 0 { + var terminalReasons []string + for err := range errorEvents { + terminalReasons = append(terminalReasons, err.Error()) } + + errorString := strings.Join(terminalReasons, ";") + envfile.setTerminalReason(errorString) + return errors.New(errorString) } return nil } -func (envfile *EnvironmentFileResource) downloadEnvfileFromS3(envFilePath string, iamCredentials credentials.IAMRoleCredentials) error { +func (envfile *EnvironmentFileResource) downloadEnvfileFromS3(envFilePath string, iamCredentials credentials.IAMRoleCredentials, + wg *sync.WaitGroup, errorEvents chan error) { + defer wg.Done() + bucket, key, err := s3.ParseS3ARN(envFilePath) if err != nil { - return errors.Wrapf(err, "unable to parse bucket and key from s3 ARN specified in environmentFile %s", envFilePath) + errorEvents <- fmt.Errorf("unable to parse bucket and key from s3 ARN specified in environmentFile %s, error: %v", envFilePath, err) + return } s3Client, err := envfile.s3ClientCreator.NewS3ClientForBucket(bucket, envfile.region, iamCredentials) if err != nil { - return errors.Wrapf(err, "unable to initialize s3 client for bucket %s", bucket) + errorEvents <- fmt.Errorf("unable to initialize s3 client for bucket %s, error: %v", bucket, err) + return } seelog.Debugf("Downlading envfile with bucket name %v and key name %v", bucket, key) @@ -319,11 +337,11 @@ func (envfile *EnvironmentFileResource) downloadEnvfileFromS3(envFilePath string }, downloadPath) if err != nil { - return errors.Wrapf(err, "unable to download env file with key %s from bucket %s", key, bucket) + errorEvents <- fmt.Errorf("unable to download env file with key %s from bucket %s, error: %v", key, bucket, err) + return } seelog.Debugf("Downloaded envfile from s3 and saved to %s", downloadPath) - return nil } func (envfile *EnvironmentFileResource) writeEnvFile(writeFunc func(file oswrapper.File) error, fullPathName string) error {
retrieve envfiles from s3 concurrently using go routines
aws_amazon-ecs-agent
train
b45c724a552d8c4784b78e29985d161d73f10d38
diff --git a/actor/v3action/application.go b/actor/v3action/application.go index <HASH>..<HASH> 100644 --- a/actor/v3action/application.go +++ b/actor/v3action/application.go @@ -75,7 +75,7 @@ func (actor Actor) GetApplicationByNameAndSpace(appName string, spaceGUID string return Application(apps[0]), Warnings(warnings), nil } -func (_ Actor) GetClock() clock.Clock { +func (Actor) GetClock() clock.Clock { return clock.NewClock() }
don't underscore, cuz metalinter
cloudfoundry_cli
train
4430083db3f0ac57f364162da08173d4ce112a25
diff --git a/diff.go b/diff.go index <HASH>..<HASH> 100644 --- a/diff.go +++ b/diff.go @@ -4,10 +4,10 @@ import ( "github.com/hashicorp/terraform/diff" ) -var diffBuilder *diff.LazyResourceMap +var diffMap *diff.LazyResourceMap func init() { - diffBuilder = &diff.LazyResourceMap{ + diffMap = &diff.LazyResourceMap{ Resources: map[string]diff.ResourceBuilderFactory{ "aws_instance": diffBuilder_aws_instance, }, diff --git a/resource_provider.go b/resource_provider.go index <HASH>..<HASH> 100644 --- a/resource_provider.go +++ b/resource_provider.go @@ -1,6 +1,8 @@ package aws import ( + "fmt" + "github.com/hashicorp/terraform/terraform" ) @@ -23,7 +25,12 @@ func (p *ResourceProvider) Configure(*terraform.ResourceConfig) error { func (p *ResourceProvider) Diff( s *terraform.ResourceState, c *terraform.ResourceConfig) (*terraform.ResourceDiff, error) { - return nil, nil + b := diffMap.Get(s.Type) + if b == nil { + return nil, fmt.Errorf("Unknown type: %s", s.Type) + } + + return b.Diff(s, c) } func (p *ResourceProvider) Resources() []terraform.ResourceType {
providers/aws: generate diff
terraform-providers_terraform-provider-aws
train
c4ea19941398d7cd28f8fa7dc39518395c940a76
diff --git a/esgfpid/rabbit/rabbitutils.py b/esgfpid/rabbit/rabbitutils.py index <HASH>..<HASH> 100644 --- a/esgfpid/rabbit/rabbitutils.py +++ b/esgfpid/rabbit/rabbitutils.py @@ -42,11 +42,20 @@ def get_routing_key_and_string_message_from_message_if_possible(msg): json_ok = True logdebug(LOGGER, 'Message was transformed to json.') except ValueError as e: - - # Invalid string message - loginfo(LOGGER, 'Message seems to be invalid json: %s', msg) - msg_string = str(msg) - json_ok = False + + # Try again, in case there was ' instead of " + if "'" in msg: + msg_string = msg + msg_temp = msg.replace("'", '"') + msg_json = json.loads(msg_temp) + json_ok = True + logdebug(LOGGER, 'Message was transformed to json (after replacing single quotes with double quotes).') + + else: + # Invalid string message + loginfo(LOGGER, 'Message seems to be invalid json: %s', msg) + msg_string = str(msg) + json_ok = False else: try: # Message is json already. diff --git a/tests/testcases/rabbit/rabbitutil_tests.py b/tests/testcases/rabbit/rabbitutil_tests.py index <HASH>..<HASH> 100644 --- a/tests/testcases/rabbit/rabbitutil_tests.py +++ b/tests/testcases/rabbit/rabbitutil_tests.py @@ -47,6 +47,21 @@ class RabbitUtilsTestCase(unittest.TestCase): self.assertEquals(received_key, 'roukey', 'Wrong routing key: %s' % received_key) self.assertEquals(expected_message, received_message, 'Wrong message.\nExpected: %s\nReceived: %s' % (expected_message, received_message)) + def test_get_message_and_routing_key_string_singlequtoes_ok(self): + + # Test variables: + passed_message = "{'bla':'foo', 'ROUTING_KEY':'roukey'}" + LOGGER.info('Message: %s' % passed_message) + + # Run code to be checked: + received_key, received_message = rutils.get_routing_key_and_string_message_from_message_if_possible(passed_message) + + # Check result: + received_message = received_message.replace("'", '"') + expected_message = str(passed_message).replace("'", '"') + self.assertEquals(received_key, 'roukey', 'Wrong routing key: %s' % received_key) + self.assertEquals(expected_message, received_message, 'Wrong message.\nExpected: %s\nReceived: %s' % (expected_message, received_message)) + def test_get_message_and_routing_key_string_no_key(self): # Test variables:
Detail: Messages with single quotes instead of double are recognized as JSON (incl. unit test).
IS-ENES-Data_esgf-pid
train
ab81963705e5b0a2e5d4bbeee4a8a53e9a2101cf
diff --git a/test/integration/tests/regression/new_api_pixelization.py b/test/integration/tests/regression/new_api_pixelization.py index <HASH>..<HASH> 100644 --- a/test/integration/tests/regression/new_api_pixelization.py +++ b/test/integration/tests/regression/new_api_pixelization.py @@ -19,9 +19,13 @@ def make_pipeline( phase_name="phase_1__lens_sersic", phase_folders=phase_folders, galaxies=dict( - lens=al.GalaxyModel( + source=al.GalaxyModel( redshift=0.5, light=al.light_profiles.SphericalExponential + ), + lens=al.GalaxyModel( + redshift=0.5, + light=al.mass_profiles.SphericalExponential ) ), optimizer_class=optimizer_class, @@ -41,7 +45,7 @@ def make_pipeline( galaxies=dict( lens=al.GalaxyModel( redshift=0.5, - mass=al.mass_profiles.EllipticalIsothermal(), + mass=al.mass_profiles.SphericalExponential(), hyper_galaxy=phase1.result.hyper_combined.constant.galaxies.lens.hyper_galaxy, ), source=al.GalaxyModel( @@ -72,7 +76,7 @@ def make_pipeline( galaxies=dict( lens=al.GalaxyModel( redshift=0.5, - mass=al.mass_profiles.EllipticalIsothermal(), + mass=al.mass_profiles.SphericalExponential(), hyper_galaxy=phase6.result.hyper_combined.constant.galaxies.lens.hyper_galaxy, ), source=al.GalaxyModel(
ensuring lens and source present in phase
Jammy2211_PyAutoLens
train
73cb7de4b826ff669ccf0c9e5af114e158ef1966
diff --git a/shardingsphere-jdbc/shardingsphere-jdbc-orchestration/src/main/java/org/apache/shardingsphere/driver/orchestration/internal/datasource/OrchestrationShardingSphereDataSource.java b/shardingsphere-jdbc/shardingsphere-jdbc-orchestration/src/main/java/org/apache/shardingsphere/driver/orchestration/internal/datasource/OrchestrationShardingSphereDataSource.java index <HASH>..<HASH> 100644 --- a/shardingsphere-jdbc/shardingsphere-jdbc-orchestration/src/main/java/org/apache/shardingsphere/driver/orchestration/internal/datasource/OrchestrationShardingSphereDataSource.java +++ b/shardingsphere-jdbc/shardingsphere-jdbc-orchestration/src/main/java/org/apache/shardingsphere/driver/orchestration/internal/datasource/OrchestrationShardingSphereDataSource.java @@ -110,6 +110,9 @@ public class OrchestrationShardingSphereDataSource extends AbstractOrchestration @Subscribe @SneakyThrows public final synchronized void renew(final RuleConfigurationsChangedEvent ruleConfigurationsChangedEvent) { + if (!ruleConfigurationsChangedEvent.getShardingSchemaName().contains(DefaultSchema.LOGIC_NAME)) { + return; + } dataSource = new ShardingSphereDataSource(dataSource.getDataSourceMap(), ruleConfigurationsChangedEvent.getRuleConfigurations(), dataSource.getSchemaContexts().getProperties().getProps()); } @@ -122,6 +125,9 @@ public class OrchestrationShardingSphereDataSource extends AbstractOrchestration @Subscribe @SneakyThrows public final synchronized void renew(final DataSourceChangedEvent dataSourceChangedEvent) { + if (!dataSourceChangedEvent.getShardingSchemaName().contains(DefaultSchema.LOGIC_NAME)) { + return; + } Map<String, DataSourceConfiguration> dataSourceConfigurations = dataSourceChangedEvent.getDataSourceConfigurations(); dataSource.close(getDeletedDataSources(dataSourceConfigurations)); dataSource.close(getModifiedDataSources(dataSourceConfigurations).keySet());
Optimize renew() for OrchestrationShardingSphereDataSource (#<I>)
apache_incubator-shardingsphere
train
dc4fcfd92a30c7a0761c1103f0b103f1988e5989
diff --git a/werkzeug/urls.py b/werkzeug/urls.py index <HASH>..<HASH> 100644 --- a/werkzeug/urls.py +++ b/werkzeug/urls.py @@ -16,31 +16,12 @@ from werkzeug.datastructures import MultiDict, iter_multi_items from collections import namedtuple -# XXX XXX XXX -# Parts of this module were originally based on the Lib/urllib/parse.py module -# of the Python 3.x standard library, licensed under the `PSF 2 License`_ using the -# following copyright notice:: -# -# Copyright © 2001-2013 Python Software Foundation; All Rights Reserved -# -# .. _PSF 2 License: http://docs.python.org/3/license.html -# -# The goal is to diverge enough from that implementation that we can remove that -# copyright notice and be a pure BSD library. Considering there are currently -# limitations with the way the parsing and joining works we would prefer on -# dropping that part anyways. - - # A regular expression for what a valid schema looks like _scheme_re = re.compile(r'^[a-zA-Z0-9+-.]+$') -#: Characters that are safe in any part of an URL. -ALWAYS_SAFE = ( - b'abcdefghijklmnopqrstuvwxyz' - b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' - b'0123456789' - b'_.-+' -) +# Characters that are safe in any part of an URL. +_always_safe = (b'abcdefghijklmnopqrstuvwxyz' + b'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+') _hexdigits = '0123456789ABCDEFabcdef' _hextobyte = dict( @@ -382,7 +363,7 @@ def url_quote(string, charset='utf-8', errors='strict', safe='/:'): string = string.encode(charset, errors) if isinstance(safe, text_type): safe = safe.encode(charset, errors) - safe = frozenset(bytearray(safe) + ALWAYS_SAFE) + safe = frozenset(bytearray(safe) + _always_safe) rv = bytearray() for char in bytearray(string): if char in safe:
Cleanup and remove PSF copyright license as no significant code is remaining any more
pallets_werkzeug
train
7be3fa25c097eb7490b0d5b4acad028e4c75df81
diff --git a/src/main/java/eu/hansolo/medusa/Gauge.java b/src/main/java/eu/hansolo/medusa/Gauge.java index <HASH>..<HASH> 100644 --- a/src/main/java/eu/hansolo/medusa/Gauge.java +++ b/src/main/java/eu/hansolo/medusa/Gauge.java @@ -2127,7 +2127,7 @@ public class Gauge extends Control { } public ObjectProperty<TickLabelLocation> tickLabelLocationProperty() { if (null == tickLabelLocation) { - tickLabelLocation = new ObjectPropertyBase<TickLabelLocation>() { + tickLabelLocation = new ObjectPropertyBase<TickLabelLocation>(_tickLabelLocation) { @Override protected void invalidated() { if(null == get()) set(TickLabelLocation.INSIDE); fireUpdateEvent(REDRAW_EVENT); @@ -3428,7 +3428,7 @@ public class Gauge extends Control { } public ObjectProperty<Color> valueColorProperty() { if (null == valueColor) { - valueColor = new ObjectPropertyBase<Color>() { + valueColor = new ObjectPropertyBase<Color>(_valueColor) { @Override protected void invalidated() { fireUpdateEvent(REDRAW_EVENT); } @Override public Object getBean() { return Gauge.this; } @Override public String getName() { return "valueColor"; } @@ -3461,7 +3461,7 @@ public class Gauge extends Control { } public ObjectProperty<Color> thresholdColorProperty() { if (null == thresholdColor) { - thresholdColor = new ObjectPropertyBase<Color>() { + thresholdColor = new ObjectPropertyBase<Color>(_thresholdColor) { @Override protected void invalidated() { fireUpdateEvent(REDRAW_EVENT); } @Override public Object getBean() { return Gauge.this; } @Override public String getName() { return "thresholdColor"; }
Fixed some minor problems related to shadow fields
HanSolo_Medusa
train
9cedcede951870f72d8ec3f57e0133d75bc28e05
diff --git a/core/src/main/java/org/infinispan/executors/LimitedExecutor.java b/core/src/main/java/org/infinispan/executors/LimitedExecutor.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/infinispan/executors/LimitedExecutor.java +++ b/core/src/main/java/org/infinispan/executors/LimitedExecutor.java @@ -69,7 +69,7 @@ public class LimitedExecutor implements Executor { public void shutdownNow() { log.tracef("Stopping limited executor %s", name); running = false; - lock.lock(); + acquireLock(); try { queue.clear(); @@ -77,7 +77,7 @@ public class LimitedExecutor implements Executor { t.interrupt(); } } finally { - lock.unlock(); + unlockLock(); } } @@ -110,11 +110,11 @@ public class LimitedExecutor implements Executor { } private void executeInternal(Runnable command) { - lock.lock(); + acquireLock(); try { queue.add(command); } finally { - lock.unlock(); + unlockLock(); } tryExecute(); } @@ -139,14 +139,14 @@ public class LimitedExecutor implements Executor { private void tryExecute() { boolean addRunner = false; - lock.lock(); + acquireLock(); try { if (availablePermits > 0) { availablePermits--; addRunner = true; } } finally { - lock.unlock(); + unlockLock(); } if (addRunner) { executor.execute(runner); @@ -157,7 +157,7 @@ public class LimitedExecutor implements Executor { runnerStarting(); while (running) { Runnable runnable = null; - lock.lock(); + acquireLock(); try { // If the previous task was asynchronous, we can't execute a new one on the same thread if (availablePermits >= 0) { @@ -168,7 +168,7 @@ public class LimitedExecutor implements Executor { break; } } finally { - lock.unlock(); + unlockLock(); } try { @@ -193,41 +193,41 @@ public class LimitedExecutor implements Executor { } private void runnerStarting() { - lock.lock(); + acquireLock(); try { Thread thread = Thread.currentThread(); threads.put(thread, thread); } finally { - lock.unlock(); + unlockLock(); } } private void runnerFinished() { - lock.lock(); + acquireLock(); try { Thread thread = Thread.currentThread(); threads.remove(thread); taskFinishedCondition.signalAll(); } finally { - lock.unlock(); + unlockLock(); } } private void removePermit() { - lock.lock(); + acquireLock(); try { availablePermits--; } finally { - lock.unlock(); + unlockLock(); } } private void addPermit() { - lock.lock(); + acquireLock(); try { availablePermits++; } finally { - lock.unlock(); + unlockLock(); } } @@ -243,4 +243,14 @@ public class LimitedExecutor implements Executor { tryExecute(); } } + + // Here for instrumentation of blockhound + private void acquireLock() { + lock.lock(); + } + + // Here for symmetry of acquireLock + private void unlockLock() { + lock.unlock(); + } } diff --git a/core/src/main/java/org/infinispan/util/CoreBlockHoundIntegration.java b/core/src/main/java/org/infinispan/util/CoreBlockHoundIntegration.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/infinispan/util/CoreBlockHoundIntegration.java +++ b/core/src/main/java/org/infinispan/util/CoreBlockHoundIntegration.java @@ -40,10 +40,7 @@ public class CoreBlockHoundIntegration implements BlockHoundIntegration { CommonsBlockHoundIntegration.allowPublicMethodsToBlock(builder, StateTransferLockImpl.class); // LimitedExecutor just submits a task to another thread pool - builder.allowBlockingCallsInside(LimitedExecutor.class.getName(), "execute"); - builder.allowBlockingCallsInside(LimitedExecutor.class.getName(), "addPermit"); - builder.allowBlockingCallsInside(LimitedExecutor.class.getName(), "removePermit"); - builder.allowBlockingCallsInside(LimitedExecutor.class.getName(), "runTasks"); + builder.allowBlockingCallsInside(LimitedExecutor.class.getName(), "acquireLock"); // This invokes the actual runnable - we have to make sure it doesn't block as normal builder.disallowBlockingCallsInside(LimitedExecutor.class.getName(), "actualRun");
ISPN-<I> LimitedExecutor.tryExecute blocking call causing random test failures
infinispan_infinispan
train
45a4c813cff32066f232d0703ac59ae1a09be3a4
diff --git a/ChangeLog.md b/ChangeLog.md index <HASH>..<HASH> 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -1,6 +1,11 @@ ChangeLog ========= +2.0.2 (2014-05-??) +------------------ + +* Fixed a problem in examples/calendarserver.php + 2.0.1 (2014-05-28) ------------------ diff --git a/examples/calendarserver.php b/examples/calendarserver.php index <HASH>..<HASH> 100644 --- a/examples/calendarserver.php +++ b/examples/calendarserver.php @@ -57,7 +57,7 @@ $server->addPlugin($caldavPlugin); /* Calendar subscription support */ $server->addPlugin( - new Sabre\CalDAV\Subscription\Plugin() + new Sabre\CalDAV\Subscriptions\Plugin() ); diff --git a/lib/DAV/Version.php b/lib/DAV/Version.php index <HASH>..<HASH> 100644 --- a/lib/DAV/Version.php +++ b/lib/DAV/Version.php @@ -14,6 +14,6 @@ class Version { /** * Full version number */ - const VERSION = '2.0.1'; + const VERSION = '2.0.2'; }
Mistake in example. Subscription = Subscriptions.
sabre-io_dav
train