hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
07a8e094451a0460b8e9705bde0ac35f4d014e24
diff --git a/vent/helpers/meta.py b/vent/helpers/meta.py index <HASH>..<HASH> 100644 --- a/vent/helpers/meta.py +++ b/vent/helpers/meta.py @@ -330,6 +330,9 @@ def Jobs(): json.dump(new_file, outfile) outfile.write("\n") + if 'vent-plugin' in container.attrs['Config']['Labels']: + container.remove() + # add extra one to account for file that just finished if the file was # just created since file_names is processed near the beginning if file_status == 'w' and len(file_names) == 1:
no need for a second loop. Just use the one already being used:
CyberReboot_vent
train
eb687825591d68c84363a31d544de69fc2d28a2a
diff --git a/bootstrap.php b/bootstrap.php index <HASH>..<HASH> 100644 --- a/bootstrap.php +++ b/bootstrap.php @@ -49,12 +49,12 @@ $COCKPIT_BASE_ROUTE = $COCKPIT_BASE_URL; * SYSTEM DEFINES */ if (!defined('COCKPIT_ADMIN')) define('COCKPIT_ADMIN' , 0); -if (!defined('COCKPIT_API_REQUEST')) define('COCKPIT_API_REQUEST' , COCKPIT_ADMIN && strpos($_SERVER['REQUEST_URI'], $COCKPIT_BASE_URL.'/api/')!==false ? 1:0); +if (!defined('COCKPIT_BASE_URL')) define('COCKPIT_BASE_URL' , $COCKPIT_BASE_URL); +if (!defined('COCKPIT_API_REQUEST')) define('COCKPIT_API_REQUEST' , COCKPIT_ADMIN && strpos($_SERVER['REQUEST_URI'], COCKPIT_BASE_URL.'/api/')!==false ? 1:0); if (!defined('COCKPIT_DIR')) define('COCKPIT_DIR' , $COCKPIT_DIR); if (!defined('COCKPIT_SITE_DIR')) define('COCKPIT_SITE_DIR' , $COCKPIT_DIR == $COCKPIT_DOCS_ROOT ? $COCKPIT_DIR : dirname($COCKPIT_DIR)); if (!defined('COCKPIT_CONFIG_DIR')) define('COCKPIT_CONFIG_DIR' , COCKPIT_DIR.'/config'); if (!defined('COCKPIT_DOCS_ROOT')) define('COCKPIT_DOCS_ROOT' , $COCKPIT_DOCS_ROOT); -if (!defined('COCKPIT_BASE_URL')) define('COCKPIT_BASE_URL' , $COCKPIT_BASE_URL); if (!defined('COCKPIT_BASE_ROUTE')) define('COCKPIT_BASE_ROUTE' , $COCKPIT_BASE_ROUTE); if (!defined('COCKPIT_STORAGE_FOLDER')) define('COCKPIT_STORAGE_FOLDER' , COCKPIT_DIR.'/storage'); if (!defined('COCKPIT_PUBLIC_STORAGE_FOLDER')) define('COCKPIT_PUBLIC_STORAGE_FOLDER' , COCKPIT_DIR.'/storage');
moved order of defined constants to take care of COCKPIT_BASE_URL for api request
agentejo_cockpit
train
26455f2019c449f4604e097bd22aaa2ce79cf2ed
diff --git a/aws/resource_aws_sagemaker_app_image_config_test.go b/aws/resource_aws_sagemaker_app_image_config_test.go index <HASH>..<HASH> 100644 --- a/aws/resource_aws_sagemaker_app_image_config_test.go +++ b/aws/resource_aws_sagemaker_app_image_config_test.go @@ -77,6 +77,7 @@ func TestAccAWSSagemakerAppImageConfig_basic(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, sagemaker.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSagemakerAppImageConfigDestroy, Steps: []resource.TestStep{ @@ -106,6 +107,7 @@ func TestAccAWSSagemakerAppImageConfig_kernelGatewayImageConfig_kernalSpecs(t *t resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, sagemaker.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSagemakerAppImageConfigDestroy, Steps: []resource.TestStep{ @@ -148,6 +150,7 @@ func TestAccAWSSagemakerAppImageConfig_kernelGatewayImageConfig_fileSystemConfig resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, sagemaker.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSagemakerAppImageConfigDestroy, Steps: []resource.TestStep{ @@ -193,6 +196,7 @@ func TestAccAWSSagemakerAppImageConfig_disappears(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, sagemaker.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSagemakerAppImageConfigDestroy, Steps: []resource.TestStep{
tests/r/sagemaker_app_image_config: Add ErrorCheck
terraform-providers_terraform-provider-aws
train
d3fffd65d254a3d40276a4984786473866ef9c1a
diff --git a/lib/vagrant/util/curl_helper.rb b/lib/vagrant/util/curl_helper.rb index <HASH>..<HASH> 100644 --- a/lib/vagrant/util/curl_helper.rb +++ b/lib/vagrant/util/curl_helper.rb @@ -4,6 +4,7 @@ module Vagrant # Hosts that do not require notification on redirect SILENCED_HOSTS = [ + "vagrantcloud-files-production.s3-accelerate.amazonaws.com".freeze, "vagrantcloud.com".freeze, "vagrantup.com".freeze ].freeze @@ -21,6 +22,7 @@ module Vagrant # Accumulate progress_data progress_data << data + redirect_notify = false while true # If the download has been redirected and we are no longer downloading # from the original host, notify the user that the target host has @@ -30,16 +32,15 @@ module Vagrant if !location.empty? location_uri = URI.parse(location) - unless location_uri.host.nil? - redirect_notify = false + if !location_uri.host.nil? && !redirect_notify logger.info("download redirected to #{location}") source_uri = URI.parse(source) source_host = source_uri.host.to_s.split(".", 2).last location_host = location_uri.host.to_s.split(".", 2).last - if !redirect_notify && location_host != source_host && !SILENCED_HOSTS.include?(location_host) - ui.rewriting do |ui| - ui.clear_line - ui.detail "Download redirected to host: #{location_uri.host}" + if location_host != source_host && !SILENCED_HOSTS.include?(location_host) && !SILENCED_HOSTS.include?(location_uri.host.to_s) + ui.rewriting do |_ui| + _ui.clear_line + _ui.detail "Download redirected to host: #{location_uri.host}" end end redirect_notify = true
Prevent notifications on redirect to default store This also checks if the redirect notification has been displayed before inspecting the source and location to prevent repeat checks after the notification has been sent.
hashicorp_vagrant
train
84a16b74b9e77daa109c2f1c200de8f1fae5277f
diff --git a/src/Ftven/Build/Common/Service/Base/AbstractInteractiveService.php b/src/Ftven/Build/Common/Service/Base/AbstractInteractiveService.php index <HASH>..<HASH> 100644 --- a/src/Ftven/Build/Common/Service/Base/AbstractInteractiveService.php +++ b/src/Ftven/Build/Common/Service/Base/AbstractInteractiveService.php @@ -64,4 +64,18 @@ abstract class AbstractService implements ServiceInterface { return $this->output; } + /** + * @param string $msg + */ + protected function outln($msg) + { + $this->getOutput()->writeln(call_user_func_array('sprintf', func_get_args())); + } + /** + * @param string $msg + */ + protected function out($msg) + { + $this->getOutput()->write(call_user_func_array('sprintf', func_get_args())); + } } \ No newline at end of file
add shorthand method to out() and outln()
francetv_php-cli-common
train
35e87de7995d5ae21edb265cfa7412c000913af1
diff --git a/lib/falkorlib/version.rb b/lib/falkorlib/version.rb index <HASH>..<HASH> 100644 --- a/lib/falkorlib/version.rb +++ b/lib/falkorlib/version.rb @@ -19,7 +19,7 @@ module FalkorLib #:nodoc: # MAJOR: Defines the major version # MINOR: Defines the minor version # PATCH: Defines the patch version - MAJOR, MINOR, PATCH = 0, 5, 6 + MAJOR, MINOR, PATCH = 0, 5, 7 module_function
bump to version '<I>'
Falkor_falkorlib
train
46fd9f4a535b6bdd32716e29961744aa86240661
diff --git a/internal/config/storageclass/storage-class.go b/internal/config/storageclass/storage-class.go index <HASH>..<HASH> 100644 --- a/internal/config/storageclass/storage-class.go +++ b/internal/config/storageclass/storage-class.go @@ -234,7 +234,7 @@ func (sCfg Config) GetParityForSC(sc string) (parity int) { } // Update update storage-class with new config -func (sCfg Config) Update(newCfg Config) { +func (sCfg *Config) Update(newCfg Config) { ConfigLock.Lock() defer ConfigLock.Unlock() sCfg.RRS = newCfg.RRS
fix: update storage-class properly fixes #<I>
minio_minio
train
b925791ce81abe64d5d83edae86bc5c9ae6e3905
diff --git a/app/network/o-auth-2/refresh-token.js b/app/network/o-auth-2/refresh-token.js index <HASH>..<HASH> 100644 --- a/app/network/o-auth-2/refresh-token.js +++ b/app/network/o-auth-2/refresh-token.js @@ -1,7 +1,7 @@ import * as querystring from '../../common/querystring'; import * as c from './constants'; import {responseToObject} from './misc'; -import {getBasicAuthHeader} from '../../common/misc'; +import {getBasicAuthHeader, setDefaultProtocol} from '../../common/misc'; export default async function (accessTokenUrl, credentialsInBody, @@ -36,7 +36,9 @@ export default async function (accessTokenUrl, headers: headers }; - const response = await window.fetch(accessTokenUrl, config); + const url = setDefaultProtocol(accessTokenUrl); + + const response = await window.fetch(url, config); const body = await response.text(); const results = responseToObject(body, [ c.P_ACCESS_TOKEN,
Fixed OAuth <I> Refresh Token (#<I>)
getinsomnia_insomnia
train
ef177575471ce30083b7b3dd7f4368cb47f24c6a
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -14,7 +14,7 @@ var parallel = require('run-parallel') /** * Create a torrent. - * @param {string|File|FileList|Array.<File>} input + * @param {string|File|FileList|Array.<File>|Blob|Array.<Blob>} input * @param {Object} opts * @param {string=} opts.name * @param {Date=} opts.creationDate @@ -34,14 +34,14 @@ function createTorrent (input, opts, cb) { } var files - if (isFile(input)) { + if (isBlob(input)) { input = [ input ] } if (Array.isArray(input) && input.length > 0) { opts.name = opts.name || input[0].name files = input.map(function (item) { - if (isFile(item)) { + if (isBlob(item)) { return { length: item.size, path: [ item.name ], @@ -212,12 +212,12 @@ function sumLength (sum, file) { } /** - * Check if `obj` is a W3C File object + * Check if `obj` is a W3C Blob object (which is the superclass of W3C File) * @param {*} obj * @return {boolean} */ -function isFile (obj) { - return typeof File !== 'undefined' && obj instanceof File +function isBlob (obj) { + return typeof Blob !== 'undefined' && obj instanceof Blob } /** diff --git a/test/browser/basic.js b/test/browser/basic.js index <HASH>..<HASH> 100644 --- a/test/browser/basic.js +++ b/test/browser/basic.js @@ -6,9 +6,7 @@ var test = require('tape') function makeFileShim (buf, name) { var file = new Blob([ buf ]) - file.__proto__ = File.prototype file.name = name - file.size = file.length return file } @@ -18,9 +16,9 @@ function sha1 (buf) { var leaves = makeFileShim(fs.readFileSync(__dirname + '/../content/Leaves of Grass by Walt Whitman.epub'), 'Leaves of Grass by Walt Whitman.epub') -// HACK: Using utf8 explicitly here workaround a node 0.10 bug with base64. +// HACK: Using utf8 explicitly here workaround a node 0.10.29 bug with base64. // Apparrently if you call fs.createReadStream(file, { encoding: 'base64' }) on a -// very short file (1 or 2 chars) in node 0.10 then no data is ever emitted. +// very short file (1 or 2 chars), then no data is ever emitted. var numbers1 = makeFileShim(fs.readFileSync(__dirname + '/../content/numbers/1.txt', 'utf8'), '1.txt') var numbers2 = makeFileShim(fs.readFileSync(__dirname + '/../content/numbers/2.txt', 'utf8'), '2.txt')
check for Blob, not File, so testing is easier
webtorrent_create-torrent
train
cb1d7146f37004088984c5832bc82b08b4c1d7f4
diff --git a/engine/src/main/java/org/camunda/bpm/engine/impl/db/entitymanager/DbEntityManager.java b/engine/src/main/java/org/camunda/bpm/engine/impl/db/entitymanager/DbEntityManager.java index <HASH>..<HASH> 100644 --- a/engine/src/main/java/org/camunda/bpm/engine/impl/db/entitymanager/DbEntityManager.java +++ b/engine/src/main/java/org/camunda/bpm/engine/impl/db/entitymanager/DbEntityManager.java @@ -25,7 +25,6 @@ import static org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperation import static org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationType.UPDATE_BULK; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.List; @@ -282,7 +281,7 @@ public class DbEntityManager implements Session, EntityLoadListener { protected void flushDbOperationManager() { // obtain totally ordered operation list from operation manager List<DbOperation> operationsToFlush = dbOperationManager.calculateFlush(); - logFlushSummary(operationsToFlush); + LOG.databaseFlushSummary(operationsToFlush); // execute the flush for (DbOperation dbOperation : operationsToFlush) { @@ -307,26 +306,6 @@ public class DbEntityManager implements Session, EntityLoadListener { flushDbOperationManager(); } - @Deprecated - /** - * See {EnginePersistenceLogger.flushDbOperationException} for string formation - */ - protected String formatExceptionMessage(Exception e, DbOperation dbOperation, List<DbOperation> operationsToFlush) { - StringBuilder exceptionMessage = new StringBuilder(); - exceptionMessage.append("Exception while executing Database Operation: "); - exceptionMessage.append(dbOperation.toString()); - exceptionMessage.append(":"); - exceptionMessage.append(e.getMessage()); - exceptionMessage.append("\nFlush summary:\n[\n"); - for (DbOperation op : operationsToFlush) { - exceptionMessage.append(" "); - exceptionMessage.append(op.toString()); - exceptionMessage.append("\n"); - } - exceptionMessage.append("]"); - return exceptionMessage.toString(); - } - protected void handleOptimisticLockingException(DbOperation dbOperation) { boolean isHandled = false; @@ -460,11 +439,6 @@ public class DbEntityManager implements Session, EntityLoadListener { dbOperationManager.addOperation(dbOperation); } - @Deprecated - protected void logFlushSummary(Collection<DbOperation> operations) { - LOG.databaseFlushSummary(operations); - } - public void close() { } diff --git a/engine/src/main/java/org/camunda/bpm/engine/impl/db/sql/DbSqlSession.java b/engine/src/main/java/org/camunda/bpm/engine/impl/db/sql/DbSqlSession.java index <HASH>..<HASH> 100644 --- a/engine/src/main/java/org/camunda/bpm/engine/impl/db/sql/DbSqlSession.java +++ b/engine/src/main/java/org/camunda/bpm/engine/impl/db/sql/DbSqlSession.java @@ -43,7 +43,6 @@ import org.camunda.bpm.engine.impl.db.EnginePersistenceLogger; import org.camunda.bpm.engine.impl.db.HasDbRevision; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbBulkOperation; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbEntityOperation; -import org.camunda.bpm.engine.impl.util.ClassNameUtil; import org.camunda.bpm.engine.impl.util.IoUtil; import org.camunda.bpm.engine.impl.util.ReflectUtil; @@ -249,24 +248,6 @@ public class DbSqlSession extends AbstractPersistenceSession { executeUpdate(statement, parameter); } - // utils ///////////////////////////////////////// - - - /** - * this is now done in {EnginePersistenceLogger.executeDatabaseOperation} - */ - @Deprecated - protected String toString(Object object) { - if(object == null) { - return "null"; - } - if(object instanceof DbEntity) { - DbEntity dbEntity = (DbEntity) object; - return ClassNameUtil.getClassNameWithoutPackage(dbEntity)+"["+dbEntity.getId()+"]"; - } - return object.toString(); - } - // flush //////////////////////////////////////////////////////////////////// public void flush() { @@ -287,7 +268,6 @@ public class DbSqlSession extends AbstractPersistenceSession { // schema operations //////////////////////////////////////////////////////// - @Deprecated public void dbSchemaCheckVersion() { try { String dbVersion = getDbVersion();
fix(logging): remove deprecated methods from internal api Related to: #CAM-<I>
camunda_camunda-bpm-platform
train
4b26635b3df9b5e7213ded640bca9af35a1d8a1d
diff --git a/visidata/settings.py b/visidata/settings.py index <HASH>..<HASH> 100644 --- a/visidata/settings.py +++ b/visidata/settings.py @@ -161,7 +161,7 @@ class OptionsObject: vd.cmdlog.set_option(k, v, obj) else: curval = None - warning('setting unknown option %s' % k) + vd.warning('setting unknown option %s' % k) return self._set(k, v, obj) @@ -218,7 +218,7 @@ def addCommand(cls, keystrokes, longname, execstr, helpstr='', **kwargs): def bindkey(cls, keystrokes, longname): oldlongname = bindkeys._get(keystrokes, cls) if oldlongname: - warning('%s was already bound to %s' % (keystrokes, oldlongname)) + vd.warning('%s was already bound to %s' % (keystrokes, oldlongname)) bindkeys.set(keystrokes, longname, cls) @BaseSheet.class_api
[settings] warning() is part of the VisiData.global_api
saulpw_visidata
train
a6ea244f99c4f59b946cc7a25eee9461c42fb990
diff --git a/examples/flax/summarization/run_summarization_flax.py b/examples/flax/summarization/run_summarization_flax.py index <HASH>..<HASH> 100644 --- a/examples/flax/summarization/run_summarization_flax.py +++ b/examples/flax/summarization/run_summarization_flax.py @@ -769,6 +769,14 @@ def main(): cur_step = epoch * (len(train_dataset) // train_batch_size) write_metric(summary_writer, train_metrics, eval_metrics, train_time, cur_step) + # save checkpoint after each epoch and push checkpoint to the hub + if jax.process_index() == 0: + params = jax.device_get(jax.tree_map(lambda x: x[0], state.params)) + model.save_pretrained(training_args.output_dir, params=params) + tokenizer.save_pretrained(training_args.output_dir) + if training_args.push_to_hub: + repo.push_to_hub(commit_message=f"Saving weights and logs of epoch {epoch}", blocking=False) + # ======================== Prediction loop ============================== if training_args.do_predict: logger.info("*** Predict ***") @@ -808,14 +816,6 @@ def main(): desc = f"Predict Loss: {pred_metrics['loss']} | {rouge_desc})" logger.info(desc) - # save checkpoint after each epoch and push checkpoint to the hub - if jax.process_index() == 0: - params = jax.device_get(jax.tree_map(lambda x: x[0], state.params)) - model.save_pretrained(training_args.output_dir, params=params) - tokenizer.save_pretrained(training_args.output_dir) - if training_args.push_to_hub: - repo.push_to_hub(commit_message=f"Saving weights and logs of epoch {epoch}", blocking=False) - if __name__ == "__main__": main()
Fix: save checkpoint after each epoch and push checkpoint to the hub (#<I>)
huggingface_pytorch-pretrained-BERT
train
fe8d7474cce3818da5fba907a79017c853b9f31c
diff --git a/spec/pidgin2adium_spec.rb b/spec/pidgin2adium_spec.rb index <HASH>..<HASH> 100644 --- a/spec/pidgin2adium_spec.rb +++ b/spec/pidgin2adium_spec.rb @@ -8,11 +8,7 @@ $-w=nil describe "Pidgin2Adium" do before(:all) do - @current_dir = File.dirname(__FILE__) - @aliases = %w{gabebw gabeb-w gbw me}.join(',') - @nonexistent_logfile_path = "./nonexistent_logfile_path/" - @logfile_path = File.join(@current_dir, "logfiles/") @text_logfile_path = "#{@logfile_path}/2006-12-21.223606.txt" @htm_logfile_path = "#{@logfile_path}/2008-01-15.071445-0500PST.htm" diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -25,6 +25,12 @@ prefork_block = lambda do end constant.configure do |config| + config.before(:all) do + @current_dir = File.dirname(__FILE__) + @aliases = %w{gabebw gabeb-w gbw me}.join(',') + + @logfile_path = File.join(@current_dir, "logfiles/") + end end end
Move some useful RSpec instance variables into global before block
gabebw_pipio
train
c24cc6ab8ddcf76e723c2d9485c74c17134666d0
diff --git a/lib/index_for/builder.rb b/lib/index_for/builder.rb index <HASH>..<HASH> 100644 --- a/lib/index_for/builder.rb +++ b/lib/index_for/builder.rb @@ -73,7 +73,7 @@ module IndexFor def format_content content, options = {}, &block # We need to convert content to_a because when dealing with ActiveRecord # Array proxies, the follow statement Array# === content return false - if block && block.arity == 1 + if block && block.arity <= 1 content = block elsif content.respond_to?(:to_ary) content = content.to_a
fix block arguments.length < 1 not working issue
bbtfr_index_for
train
178d43ed19ebafce7cab4c320515d917e710b3d4
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ extras_require = { "docs": ["Sphinx>=1.4.4", "sphinx-rtd-theme>=0.1.9",], "tests": tests_require, "kubernetes": ["kubernetes>=11.0.0,<12.0.0",], - "yadage": ["yadage==0.20.1", "yadage-schemas==0.10.6",], + "yadage": ["adage==0.10.1", "yadage==0.20.1", "yadage-schemas==0.10.6",], } extras_require["all"] = []
setup: pin `adage` in yadage extra closes reanahub/reana-workflow-engine-yadage#<I>
reanahub_reana-commons
train
d9f42182623d4473d605a387805c533011b04aee
diff --git a/lib/url_parser.js b/lib/url_parser.js index <HASH>..<HASH> 100644 --- a/lib/url_parser.js +++ b/lib/url_parser.js @@ -58,18 +58,22 @@ module.exports = function(url, options, callback) { let connectionString = connectionStrings.join(',') + '/?'; - dns.resolveTxt(result.host, function(err, records) { + dns.resolveTxt(result.host, function(err, record) { if (err && err.code !== 'ENODATA') return callback(err); - if (err && err.code === 'ENODATA') records = null; + if (err && err.code === 'ENODATA') record = null; + if (record) { + if (record.length > 1) { + return callback(new Error('multiple text records not allowed')); + } + record = record[0]; + if (record.length > 1) record = record.join(''); + else record = record[0]; - if (records) { - let concatRecords = records.map(function(record) { - // A single record with multiple strings gets concatenated - if (record.length > 1) return record.join(''); - else return record; - }); + if (!record.includes('authSource') && !record.includes('replicaSet')) { + return callback(new Error('text record must only set `authSource` or `replicaSet`')); + } - connectionString += concatRecords.join('&'); + connectionString += record; } parseHandler(connectionString, options, callback); diff --git a/test/functional/url_parser_tests.js b/test/functional/url_parser_tests.js index <HASH>..<HASH> 100644 --- a/test/functional/url_parser_tests.js +++ b/test/functional/url_parser_tests.js @@ -1118,13 +1118,10 @@ describe('Url SRV Parser', function() { }, test: function(done) { // This text record contains two options - // connectTimeoutMS=300000&socketTimeoutMS=300000 parse('mongodb+srv://test5.test.build.10gen.cc', {}, function(err, object) { - var serverOptions = { - socketOptions: { connectTimeoutMS: 300000, socketTimeoutMS: 300000 } - }; expect(err).to.be.null; - expect(object.server_options).to.deep.equal(serverOptions); + expect(object.rs_options.rs_name).to.equal('repl0'); + expect(object.db_options.authSource).to.equal('thisDB'); done(); }); } @@ -1133,7 +1130,7 @@ describe('Url SRV Parser', function() { /** * @ignore */ - it('should build a connection string based on a SRV with multiple TXT records', { + it('should fail if multiple TXT records', { metadata: { requires: { topology: ['single'] } }, @@ -1141,12 +1138,8 @@ describe('Url SRV Parser', function() { // This url has a text record with multiple records // mongodb://localhost.build.10gen.cc:27017/?connectTimeoutMS=200000&socketTimeoutMS=200000 parse('mongodb+srv://test6.test.build.10gen.cc', {}, function(err, object) { - expect(err).to.be.null; - expect(object).to.exist; - expect(object.servers[0].host).to.equal('localhost.test.build.10gen.cc'); - expect(object.servers[0].port).to.equal(27017); - expect(object.server_options.socketOptions.connectTimeoutMS).to.equal(200000); - expect(object.server_options.socketOptions.socketTimeoutMS).to.equal(200000); + expect(err).to.exist; + expect(err.message).to.equal('multiple text records not allowed'); done(); }); } @@ -1155,11 +1148,13 @@ describe('Url SRV Parser', function() { /** * @ignore */ - it('should build a connection string based on SRV, TXT records and options override', { + it.skip('should build a connection string based on SRV, TXT records and options override', { metadata: { requires: { topology: ['single'] } }, test: function(done) { + // TODO this url should error because of multiple text records but need a + // test to check options override // This url has srv and txt records and options passed in through api parse('mongodb+srv://test6.test.build.10gen.cc', { connectTimeoutMS: 250000 }, function( err, @@ -1189,10 +1184,10 @@ describe('Url SRV Parser', function() { }, test: function(done) { // This text record contains a key with no value - // readPreference + // authSource parse('mongodb+srv://test8.test.build.10gen.cc', {}, function(err) { expect(err).to.exist; - expect(err.message).to.equal('query parameter readPreference is an incomplete value pair'); + expect(err.message).to.equal('query parameter authSource is an incomplete value pair'); done(); }); } @@ -1229,13 +1224,10 @@ describe('Url SRV Parser', function() { }, test: function(done) { // This text record contains multiple strings - // "connectTime" "outMS=150000" "&socketT" "imeoutMS" "=" "250000" + // 'replicaS' 'et=rep' 'l0' parse('mongodb+srv://test11.test.build.10gen.cc', function(err, object) { - var serverOptions = { - socketOptions: { connectTimeoutMS: 150000, socketTimeoutMS: 250000 } - }; expect(err).to.be.null; - expect(object.server_options).to.deep.equal(serverOptions); + expect(object.rs_options.rs_name).to.equal('repl0'); done(); }); }
fix(url parser): only 1 txt record allowed with 2 possible options
mongodb_node-mongodb-native
train
85f3954c80bc68feb0de23c186d06c716016faae
diff --git a/isso/js/app/isso.js b/isso/js/app/isso.js index <HASH>..<HASH> 100644 --- a/isso/js/app/isso.js +++ b/isso/js/app/isso.js @@ -36,9 +36,9 @@ var Postbox = function(parent) { var localStorage = utils.localStorageImpl, el = $.htmlify(template.render("postbox", { - "author": JSON.parse(localStorage.getItem("author")), - "email": JSON.parse(localStorage.getItem("email")), - "website": JSON.parse(localStorage.getItem("website")), + "author": JSON.parse(localStorage.getItem("isso-author")), + "email": JSON.parse(localStorage.getItem("isso-email")), + "website": JSON.parse(localStorage.getItem("isso-website")), "preview": '' })); @@ -119,9 +119,9 @@ var Postbox = function(parent) { email = $("[name=email]", el).value || null, website = $("[name=website]", el).value || null; - localStorage.setItem("author", JSON.stringify(author)); - localStorage.setItem("email", JSON.stringify(email)); - localStorage.setItem("website", JSON.stringify(website)); + localStorage.setItem("isso-author", JSON.stringify(author)); + localStorage.setItem("isso-email", JSON.stringify(email)); + localStorage.setItem("isso-website", JSON.stringify(website)); api.create($("#isso-thread").getAttribute("data-isso-id"), { author: author, email: email, website: website,
js: app/isso: Prefix localstorage keys w/ `isso-` This way, they are not interfering with other elements on the "host" page. Note that the LocalStorage is scoped inside the page isso is embedded on, not its API endpoint. Suggested by @adroste in <URL>
posativ_isso
train
9c0f291b272b1c639ea8566db8b7c9bf5302b690
diff --git a/test/PBS/test_pbs_queue.py b/test/PBS/test_pbs_queue.py index <HASH>..<HASH> 100644 --- a/test/PBS/test_pbs_queue.py +++ b/test/PBS/test_pbs_queue.py @@ -563,30 +563,6 @@ run: self.assertTrue(os.path.isfile('test_file1.txt')) @unittest.skipIf(not has_docker, "Docker container not usable") - def testRemoteSectionOption(self): - '''Test remote target''' - FileTarget('test1.txt').remove('both') - script = SoS_Script(''' -[10: remote] -input: 'test_file.txt' -output: local('test1.txt') -task: -run: - echo ${input} >> ${output} -''') - wf = script.workflow() - Base_Executor(wf, config={ - 'config_file': '~/docker.yml', - # do not wait for jobs - 'wait_for_task': True, - 'default_queue': 'docker', - 'sig_mode': 'force', - }).run() - # - self.assertFalse(os.path.isfile('test_file.txt')) - self.assertTrue(os.path.isfile('test1.txt')) - - @unittest.skipIf(not has_docker, "Docker container not usable") def testDelayedInterpolation(self): '''Test delayed interpolation with expression involving remote objects''' # purge all previous tasks diff --git a/test/test_task.py b/test/test_task.py index <HASH>..<HASH> 100644 --- a/test/test_task.py +++ b/test/test_task.py @@ -436,7 +436,6 @@ sh: 'default_queue': None, 'workflow': 'default', 'workdir': '.', - 'remote_targets': False }).run() self.assertEqual(len(res['pending_tasks']), 2) subprocess.call('sos resume -w', shell=True) @@ -470,7 +469,6 @@ sh: 'default_queue': None, 'workflow': 'default', 'workdir': '.', - 'remote_targets': False }).run() self.assertEqual(len(res['pending_tasks']), 2) subprocess.call('sos resume -w', shell=True) @@ -479,68 +477,5 @@ sh: FileTarget('{}.txt'.format(i)).remove('both') FileTarget('test_trunkworker.sos').remove() - - def testLocalTarget(self): - '''Test the use of local target in remote mode''' - # this file does not exist on remote machine - shutil.copy(__file__, 'test_task.py.tmp') - script = SoS_Script(''' -[10] -input: local('test_task.py.tmp') -output: local('size.txt') -sh: - wc -l ${input} > ${output} -''') - wf = script.workflow() - Base_Executor(wf, config={ - 'wait_for_task': False, - 'sig_mode': 'force', - 'script': 'test_trunkworker.sos', - 'max_running_jobs': 10, - 'bin_dirs': [], - 'workflow_args': [], - 'output_dag': '', - 'targets': [], - 'max_procs': 4, - 'default_queue': None, - 'workflow': 'default', - 'workdir': '.', - 'remote_targets': True - }).run() - self.assertTrue(os.path.isfile('size.txt')) - FileTarget('size.txt').remove() - FileTarget('test_task.py.tmp') - - def testLocalSectionOption(self): - '''Test the use of local target in remote mode''' - # this file does not exist on remote machine - shutil.copy(__file__, 'test_task.py.tmp') - script = SoS_Script(''' -[10: local] -input: 'test_task.py.tmp' -output: 'size.txt' -sh: - wc -l ${input} > ${output} -''') - wf = script.workflow() - Base_Executor(wf, config={ - 'wait_for_task': False, - 'sig_mode': 'force', - 'script': 'test_trunkworker.sos', - 'max_running_jobs': 10, - 'bin_dirs': [], - 'workflow_args': [], - 'output_dag': '', - 'targets': [], - 'max_procs': 4, - 'default_queue': None, - 'workflow': 'default', - 'workdir': '.', - 'remote_targets': True - }).run() - self.assertTrue(os.path.isfile('size.txt')) - FileTarget('size.txt').remove() - FileTarget('test_task.py.tmp') - if __name__ == '__main__': unittest.main()
Remove tests for local and remote section option #<I>
vatlab_SoS
train
b3fecadd20d0d02f56ac79a8068d6263d661b122
diff --git a/src/toil/batchSystems/mesos/batchSystem.py b/src/toil/batchSystems/mesos/batchSystem.py index <HASH>..<HASH> 100644 --- a/src/toil/batchSystems/mesos/batchSystem.py +++ b/src/toil/batchSystems/mesos/batchSystem.py @@ -351,7 +351,7 @@ class MesosBatchSystem(AbstractBatchSystem, mesos.interface.Scheduler): log.info("...launching Mesos task %s" % task.task_id.value) if len(tasks) == 0: - log.info("Offer not large enough to run any tasks. Required: %s Offered: %s" % (job_types[-1], (offerMem/ 1000000, offerCpus, offerStor/ 1000000))) + log.info("Offer not large enough to run any tasks. Required: %s Offered: %s" % (job_types[-1], (offerMem*1000000, offerCpus, offerStor*1000000))) def _createTask(self, jt_job, offer): """
Fix log message - resources were being logged incorrectly due to an incorrect unit conversion
DataBiosphere_toil
train
cb1a4034a3e0817316b4ddf023658e177fe3c864
diff --git a/app/models/effective/attribute.rb b/app/models/effective/attribute.rb index <HASH>..<HASH> 100644 --- a/app/models/effective/attribute.rb +++ b/app/models/effective/attribute.rb @@ -32,6 +32,7 @@ module Effective when :datetime ; :datetime when :decimal ; :decimal when :duration ; :duration + when :email ; :email when :integer ; :integer when :percentage ; :percentage when :price ; :price @@ -80,14 +81,14 @@ module Effective when :effective_obfuscation klass.respond_to?(:deobfuscate) ? klass.deobfuscate(value) : value.to_s when :effective_roles - EffectiveRoles.roles_for(value) + EffectiveRoles.roles.include?(value.to_sym) ? value : EffectiveRoles.roles_for(value) when :integer, :percentage (value.kind_of?(String) ? value.gsub(/\D/, '') : value).to_i when :nil value.presence when :price (value.kind_of?(Integer) ? value : (value.to_s.gsub(/[^0-9|\-|\.]/, '').to_f * 100.0)).to_i - when :string, :text + when :string, :text, :email value.to_s when :belongs_to_polymorphic value.to_s diff --git a/app/models/effective/resources/relation.rb b/app/models/effective/resources/relation.rb index <HASH>..<HASH> 100644 --- a/app/models/effective/resources/relation.rb +++ b/app/models/effective/resources/relation.rb @@ -110,7 +110,7 @@ module Effective relation.where("#{sql_column} = ?", term) when :price relation.where("#{sql_column} = ?", term) - when :string, :text + when :string, :text, :email if fuzzy relation.where("#{sql_column} #{ilike} ?", "%#{term}%") else
effective_roles, and email fields
code-and-effect_effective_resources
train
f4caaf94996b03979a818357e1600dad53085bab
diff --git a/src/Phlexible/Bundle/FrontendMediaBundle/Controller/MediaController.php b/src/Phlexible/Bundle/FrontendMediaBundle/Controller/MediaController.php index <HASH>..<HASH> 100644 --- a/src/Phlexible/Bundle/FrontendMediaBundle/Controller/MediaController.php +++ b/src/Phlexible/Bundle/FrontendMediaBundle/Controller/MediaController.php @@ -65,6 +65,10 @@ class MediaController extends Controller } } + if (!file_exists($filePath)) { + return $this->createNotFoundException("File not found."); + } + $extension = pathinfo($filePath, PATHINFO_EXTENSION); return $this->get('igorw_file_serve.response_factory') @@ -93,6 +97,11 @@ class MediaController extends Controller $file = $volume->findFile($fileId); $filePath = $file->getPhysicalPath(); + + if (!file_exists($filePath)) { + return $this->createNotFoundException("File not found."); + } + $mimeType = $file->getMimeType(); return $this->get('igorw_file_serve.response_factory') @@ -122,6 +131,11 @@ class MediaController extends Controller $file = $volume->findFile($fileId); $filePath = $file->getPhysicalPath(); + + if (!file_exists($filePath)) { + return $this->createNotFoundException("File not found."); + } + $mimeType = $file->getMimeType(); return $this->get('igorw_file_serve.response_factory')
send <I> on file not found
phlexible_phlexible
train
ccd7cc4f83ea0f7e772e4cd0bb107874867b0acb
diff --git a/vstutils/static/js/guiElements.js b/vstutils/static/js/guiElements.js index <HASH>..<HASH> 100644 --- a/vstutils/static/js/guiElements.js +++ b/vstutils/static/js/guiElements.js @@ -618,12 +618,18 @@ guiElements.autocomplete = function() let list = []; + let url_vars = {} + if (options.dynamic_properties && options.dynamic_properties.url_vars) + { + url_vars = options.dynamic_properties.url_vars + } + if(props['obj']) { for (let i in props['obj']) { list.push(new guiObjectFactory(props['obj'][i], - options.dynamic_properties.url_vars) + url_vars) ); } }
Small fix for autocomplete url_vars [ci skip]
vstconsulting_vstutils
train
55f6d5d3fe481fbe26f32dfd06041baa55a4ffd4
diff --git a/liquibase-cli/src/main/java/liquibase/integration/commandline/LiquibaseCommandLine.java b/liquibase-cli/src/main/java/liquibase/integration/commandline/LiquibaseCommandLine.java index <HASH>..<HASH> 100644 --- a/liquibase-cli/src/main/java/liquibase/integration/commandline/LiquibaseCommandLine.java +++ b/liquibase-cli/src/main/java/liquibase/integration/commandline/LiquibaseCommandLine.java @@ -655,7 +655,7 @@ public class LiquibaseCommandLine { String argName = argNames[i]; String camelCaseArg = StringUtil.toCamelCase(argName.substring(2)); if (! argName.equals("--" + camelCaseArg)) { - legacyArgDisplayPrefix = "--" + camelCaseArg + " [deprecated]"; + legacyArgDisplayPrefix = "\n--" + camelCaseArg + " [deprecated]"; legacyArgDisplaySuffix = "\n(legacy: " + camelCaseArg + ")"; }
Fix missing new line in help DAT-<I>
liquibase_liquibase
train
6e2f627b009f7e27e4110b72c0c08f7a0772bbcd
diff --git a/cli/lib/kontena/cli/common.rb b/cli/lib/kontena/cli/common.rb index <HASH>..<HASH> 100644 --- a/cli/lib/kontena/cli/common.rb +++ b/cli/lib/kontena/cli/common.rb @@ -270,7 +270,7 @@ module Kontena exit_with_error 'Command requires --force' unless $stdout.tty? && $stdin.tty? puts "Destructive command. To proceed, type \"#{name}\" or re-run this command with --force option." - ask("Enter '#{name}' to confirm: ") == name || error("Confirmation did not match #{name}. Aborted command.") + ask("Enter '#{name}' to confirm: ") == name.to_s || error("Confirmation did not match #{name}. Aborted command.") end def confirm(message = 'Destructive command. You can skip this prompt by running this command with --force option. Are you sure?') diff --git a/cli/spec/kontena/cli/common_spec.rb b/cli/spec/kontena/cli/common_spec.rb index <HASH>..<HASH> 100644 --- a/cli/spec/kontena/cli/common_spec.rb +++ b/cli/spec/kontena/cli/common_spec.rb @@ -137,8 +137,15 @@ describe Kontena::Cli::Common do it 'returns true if input matches' do allow(subject).to receive(:ask).and_return('name-to-confirm') - expect(subject.confirm_command('name-to-confirm')).to be_truthy expect{subject.confirm_command('name-to-confirm')}.to_not raise_error + expect(subject.confirm_command('name-to-confirm')).to be_truthy + end + + it 'returns true if input matches and param is not a string' do + allow(subject).to receive(:ask).and_return('123') + + expect{subject.confirm_command(123)}.to_not raise_error + expect(subject.confirm_command(123)).to be_truthy end it 'raises error unless input matches' do
Fix confirmation dialog in stack related commands (#<I>)
kontena_kontena
train
d3f23eadc4cc5da72c520f2306251e4c5f52458b
diff --git a/tests/transfer.py b/tests/transfer.py index <HASH>..<HASH> 100644 --- a/tests/transfer.py +++ b/tests/transfer.py @@ -129,9 +129,9 @@ class Transfer_: def accepts_local_and_remote_kwargs(self, sftp_objs): transfer, client = sftp_objs - transfer.put(remote="path1", local="path2") + transfer.put(local="path2", remote="path1") client.put.assert_called_with( - remotepath="/remote/path1", localpath="/local/path2" + localpath="/local/path2", remotepath="/remote/path1" ) def returns_rich_Result_object(self, transfer):
These args were in unintuitive order
fabric_fabric
train
ad9b97664fc932265db1cd87bb2fceda679f5ecf
diff --git a/addon/affinity-engine/configs/stage/scene.js b/addon/affinity-engine/configs/stage/scene.js index <HASH>..<HASH> 100644 --- a/addon/affinity-engine/configs/stage/scene.js +++ b/addon/affinity-engine/configs/stage/scene.js @@ -1,27 +1,29 @@ export default { priority: 2, - component: { - stage: { - direction: { - scene: { - attrs: { - layer: 'stage.windows', - transitionIn: { - effect: { opacity: [1, 0] }, - duration: 250 - }, - transitionOut: { - effect: { opacity: 0 }, - duration: 250 + default: { + component: { + stage: { + direction: { + scene: { + attrs: { + layer: 'stage.windows', + transitionIn: { + effect: { opacity: [1, 0] }, + duration: 250 + }, + transitionOut: { + effect: { opacity: 0 }, + duration: 250 + } } } - } - }, - layer: { - stage: { - windows: { - attrs: { - zIndex: 1000 + }, + layer: { + stage: { + windows: { + attrs: { + zIndex: 1000 + } } } } diff --git a/addon/affinity-engine/stage/direction.js b/addon/affinity-engine/stage/direction.js index <HASH>..<HASH> 100644 --- a/addon/affinity-engine/stage/direction.js +++ b/addon/affinity-engine/stage/direction.js @@ -47,6 +47,7 @@ export default Ember.Object.extend(Evented, { }, _applyEngineConfig() { + this._applyConfigSource(get(this, 'engineConfig.attrs.default')); this._applyConfigSource(get(this, 'engineConfig.attrs')); }, diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "affinity-engine-stage", - "version": "0.2.3", + "version": "0.2.4", "description": "Where the magic happens", "keywords": [ "ember-addon",
prevent default config conflicts with user config
affinity-engine_affinity-engine-stage
train
b4ae1a6ff870433dcafaa20353d7ac82ee3fe47f
diff --git a/src/main/java/org/inferred/freebuilder/processor/PropertyCodeGenerator.java b/src/main/java/org/inferred/freebuilder/processor/PropertyCodeGenerator.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/inferred/freebuilder/processor/PropertyCodeGenerator.java +++ b/src/main/java/org/inferred/freebuilder/processor/PropertyCodeGenerator.java @@ -15,14 +15,20 @@ */ package org.inferred.freebuilder.processor; +import com.google.common.base.Objects; +import com.google.common.base.Objects.ToStringHelper; import com.google.common.base.Optional; import com.google.common.base.Predicate; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.inferred.freebuilder.processor.Metadata.Property; import org.inferred.freebuilder.processor.util.Excerpt; import org.inferred.freebuilder.processor.util.SourceBuilder; +import java.lang.reflect.Field; +import java.util.Map; import java.util.Set; import javax.lang.model.util.Elements; @@ -135,4 +141,47 @@ public abstract class PropertyCodeGenerator { return input.isTemplateRequiredInClear(); } }; + + @Override + public boolean equals(final Object obj) { + if (obj == null || !getClass().isInstance(obj)) { + return false; + } + PropertyCodeGenerator other = (PropertyCodeGenerator) obj; + return fieldValues().equals(other.fieldValues()); + } + + @Override + public int hashCode() { + return ImmutableList.copyOf(fieldValues().values()).hashCode(); + } + + @Override + public String toString() { + ToStringHelper stringHelper = Objects.toStringHelper(this); + for (Map.Entry<String, Object> fieldValue : fieldValues().entrySet()) { + stringHelper.add(fieldValue.getKey(), fieldValue.getValue()); + } + return stringHelper.toString(); + } + + private Map<String, Object> fieldValues() { + ImmutableMap.Builder<String, Object> valuesBuilder = ImmutableMap.builder(); + addFieldValues(getClass(), valuesBuilder); + return valuesBuilder.build(); + } + + private void addFieldValues(Class<?> cls, ImmutableMap.Builder<String, Object> valuesBuilder) { + try { + if (cls.getSuperclass() != null) { + addFieldValues(cls.getSuperclass(), valuesBuilder); + } + for (Field field : cls.getDeclaredFields()) { + field.setAccessible(true); + valuesBuilder.put(field.getName(), field.get(this)); + } + } catch (IllegalAccessException e) { + throw new AssertionError(e); + } + } }
Define equals on PropertyCodeGenerator for tests Add a reflection-based implementation of hashCode, equals and toString to PropertyCodeGenerator for use in tests.
inferred_FreeBuilder
train
d71b0ef8d3a3730c2d83515a23fe38866f901a59
diff --git a/examples/index.js b/examples/index.js index <HASH>..<HASH> 100644 --- a/examples/index.js +++ b/examples/index.js @@ -85,18 +85,14 @@ var c = new b.$Constructor({ } }) - - console.log('OBJECTS','a:',a.$toString(),'\n\nb:',b.$toString(),'\n\nc:',c.$toString()) - // console.log('--check for enums...') // for(var i in c.c) { // //this is fucked --- how to do non-enum??? // console.log('field:',i) // } - /* some standards --- $field is very important just like operators and getters this all comes from the basic Fibre (no value, data, object) diff --git a/lib/base/index.js b/lib/base/index.js index <HASH>..<HASH> 100644 --- a/lib/base/index.js +++ b/lib/base/index.js @@ -55,26 +55,19 @@ define( proto, '$setKey', { value:function( key, value ) { - // if(key[]) - - //if not like this it does not store shit -- pretty bad check into setter its flawed now - //seems to get the wrong this var added = '_'+key + var field if(this[added]) { - // // window.keyCnt++ - // //dit add letterlijk 0.2 sec (evenveel als al het andere doen...) - // //vershil tussen dingen opslaan op _ is x3 in speed (op 500k) - // //dit wat de slowness verooraakt key = added - // //deze moeten stored worden in __ denk ik.... of # zoiets } - if(this[key] && this[key]._$parent !== this) { - // console.info(key, this[key] && this[key]._$parent) - this[key] = new this[key].$Constructor( value, this ) + field = this[key] + + if(field && field._$parent !== this) { + this[key] = new field.$Constructor( value, this ) } else if(this[key]) { - this[key].$set( value ) + field.$set( value ) } else { this[key] = new this.$children.$Constructor( value, this ) this[key]._$name = key
made setKey a little bit faster
vigour-io_vjs
train
67375413618784ee21c36bf1732eaaf24d672f23
diff --git a/sharding-core/sharding-core-common/src/test/java/org/apache/shardingsphere/core/strategy/keygen/SnowflakeShardingKeyGeneratorTest.java b/sharding-core/sharding-core-common/src/test/java/org/apache/shardingsphere/core/strategy/keygen/SnowflakeShardingKeyGeneratorTest.java index <HASH>..<HASH> 100644 --- a/sharding-core/sharding-core-common/src/test/java/org/apache/shardingsphere/core/strategy/keygen/SnowflakeShardingKeyGeneratorTest.java +++ b/sharding-core/sharding-core-common/src/test/java/org/apache/shardingsphere/core/strategy/keygen/SnowflakeShardingKeyGeneratorTest.java @@ -153,7 +153,7 @@ public final class SnowflakeShardingKeyGeneratorTest { public void assertSetWorkerIdFailureWhenTooMuch() { SnowflakeShardingKeyGenerator keyGenerator = new SnowflakeShardingKeyGenerator(); Properties properties = new Properties(); - properties.setProperty("worker.id", String.valueOf(-Long.MAX_VALUE)); + properties.setProperty("worker.id", String.valueOf(Long.MIN_VALUE)); keyGenerator.setProperties(properties); keyGenerator.generateKey(); }
Update SnowflakeShardingKeyGeneratorTest.java The code in line "properties.setProperty("worker.id", String.valueOf(-Long.MAX_VALUE));" is modified to "properties.setProperty("worker.id", String.valueOf(Long.MIN_VALUE));"
apache_incubator-shardingsphere
train
5ca0d3bed7eaab9f6a5c725eb2d0baa51a0e9595
diff --git a/public/javascripts/typo.js b/public/javascripts/typo.js index <HASH>..<HASH> 100644 --- a/public/javascripts/typo.js +++ b/public/javascripts/typo.js @@ -9,7 +9,12 @@ function get_local_time_for_date(time) { user_date = new Date(); delta_minutes = Math.round((user_date - system_date) / (60 * 1000)); if (Math.abs(delta_minutes) <= (8*7*24*60)) { // eight weeks... I'm lazy to count days for longer than that - return distance_of_time_in_words(delta_minutes) + ' ago'; + distance = distance_of_time_in_words(delta_minutes); + if (delta_minutes < 0) { + return distance + ' from now'; + } else { + return distance + ' ago'; + } } else { return 'on ' + system_date.toLocaleDateString(); }
Fixed the display of future time deltas (closes #<I>) [<EMAIL>] NOTE: I'm committing this for now, however a better approach might be to not display articles with a future timestamp at all in order to enable the blog author to effectively pre-post articles to appear at certain times. I'll create a separate ticket for this. git-svn-id: <URL>
publify_publify
train
569d8fcf061717fabd9e65ce237d5b232d2e0dd4
diff --git a/lib/node_modules/@stdlib/math/base/random/minstd-shuffle/test/test.js b/lib/node_modules/@stdlib/math/base/random/minstd-shuffle/test/test.js index <HASH>..<HASH> 100644 --- a/lib/node_modules/@stdlib/math/base/random/minstd-shuffle/test/test.js +++ b/lib/node_modules/@stdlib/math/base/random/minstd-shuffle/test/test.js @@ -3,11 +3,19 @@ // MODULES // var tape = require( 'tape' ); +var kstest = require( '@stdlib/math/generics/statistics/kstest' ); var INT32_MAX = require( '@stdlib/math/constants/int32-max' ); var isPositiveInteger = require( '@stdlib/math/base/utils/is-positive-integer' ); var minstd = require( './../lib' ); +// VARIABLES // + +var opts = { + 'skip': ( process.env.TEST_MODE === 'coverage' ) +}; + + // TESTS // tape( 'main export is a function', function test( t ) { @@ -69,6 +77,36 @@ tape( 'the `normalized` method returns pseudorandom numbers strictly between 0 ( t.end(); }); +tape( 'the `normalized` methods returns pseudorandom numbers from a uniform distribution', opts, function test( t ) { + var rejected; + var pValue; + var N; + var i; + var j; + var x; + + rejected = 0; + x = new Array( 1e3 ); + N = 300; + for ( i = 0; i < N; i++ ) { + for ( j = 0; j < x.length; j++ ) { + x[ j ] = minstd.normalized(); + if ( x[ j ] < 0.0 || x[ j ] > 1.0 ) { + t.ok( false, 'returned a number outside support: '+x[ j ] ); + } + } + // Test using Kolmogorov-Smirnov goodness-of-fit test: + pValue = kstest( x, 'uniform', 0.0, 1.0 ).pValue; + t.equal( typeof pValue, 'number', 'returns a p-value' ); + if ( pValue < 0.05 ) { + rejected += 1; + } + } + // Account for small sample sizes and few repeats... + t.ok( rejected / N < 0.07, 'null hypothesis (i.e., that numbers are drawn from Uniform(0,1)) is rejected in less than 7% of cases ('+rejected+' of '+N+')' ); + t.end(); +}); + tape( 'attached to the `normalized` method is the generator name', function test( t ) { t.equal( minstd.normalized.NAME, 'minstd-shuffle', 'has `NAME` property' ); t.end();
Add kstest to test that numbers are uniformly distributed
stdlib-js_stdlib
train
82cd13ca0cffa01a83e9133b3d21742c5c1b3b39
diff --git a/recipe/deploy/lock.php b/recipe/deploy/lock.php index <HASH>..<HASH> 100644 --- a/recipe/deploy/lock.php +++ b/recipe/deploy/lock.php @@ -23,8 +23,5 @@ task('deploy:lock', function () { desc('Unlock deploy'); task('deploy:unlock', function () { - try { - run("rm {{deploy_path}}/.dep/deploy.lock");//always success - } catch (\Exception $e) { - } + run("rm -f {{deploy_path}}/.dep/deploy.lock");//always success });
Use -f to avoid error when file is not exist Use -f to avoid error when file is not exist
deployphp_deployer
train
e936f94fc5dbf36035c1e26272b74b9fe54a5b61
diff --git a/lib/friendly_id.rb b/lib/friendly_id.rb index <HASH>..<HASH> 100644 --- a/lib/friendly_id.rb +++ b/lib/friendly_id.rb @@ -51,19 +51,6 @@ module FriendlyId autoload :Finders, "friendly_id/finders" autoload :SequentiallySlugged, "friendly_id/sequentially_slugged" - # Instances of these classes will never be considered a friendly id. - # @see FriendlyId::ObjectUtils#friendly_id - UNFRIENDLY_CLASSES = [ - ActiveRecord::Base, - Array, - FalseClass, - Hash, - NilClass, - Numeric, - Symbol, - TrueClass - ] - # FriendlyId takes advantage of `extended` to do basic model setup, primarily # extending {FriendlyId::Base} to add {FriendlyId::Base#friendly_id # friendly_id} as a class method. diff --git a/lib/friendly_id/object_utils.rb b/lib/friendly_id/object_utils.rb index <HASH>..<HASH> 100644 --- a/lib/friendly_id/object_utils.rb +++ b/lib/friendly_id/object_utils.rb @@ -1,4 +1,17 @@ module FriendlyId + # Instances of these classes will never be considered a friendly id. + # @see FriendlyId::ObjectUtils#friendly_id + UNFRIENDLY_CLASSES = [ + ActiveRecord::Base, + Array, + FalseClass, + Hash, + NilClass, + Numeric, + Symbol, + TrueClass + ] + # Utility methods for determining whether any object is a friendly id. # # Monkey-patching Object is a somewhat extreme measure not to be taken lightly
Moved UNFRIENDLY_CLASSES from friendly_id to object_utils. We are using FriendlyId::UNFRIENDLY_CLASSES only to determine classed which are definitely "unfrienldy". It happens in module FrienldyId::Object utils. So, I think it's good idea to put constant somewhere close to only one place in the project where this constant is used =)
norman_friendly_id
train
b7a197968c3645fe6eeef0c0b82116946c0dbbc9
diff --git a/datasketch/experimental/aio/storage.py b/datasketch/experimental/aio/storage.py index <HASH>..<HASH> 100644 --- a/datasketch/experimental/aio/storage.py +++ b/datasketch/experimental/aio/storage.py @@ -150,6 +150,8 @@ if motor is not None and ReturnDocument is not None: self._batch_size = 1000 self._mongo_client = motor.motor_asyncio.AsyncIOMotorClient(dsn, **additional_args) self._collection = self._mongo_client.get_default_database(db_lsh).get_collection(self._collection_name) + self._collection.create_index("key", background=True) + self._initialized = True self._buffer = AsyncMongoBuffer(self._collection, self._batch_size)
Fixes #<I>; MinhashLSH creates mongo index key. (#<I>)
ekzhu_datasketch
train
53c735d6d9fa23bbbecda43b16d212c1ae28b9d2
diff --git a/components/popup/popup.js b/components/popup/popup.js index <HASH>..<HASH> 100644 --- a/components/popup/popup.js +++ b/components/popup/popup.js @@ -248,7 +248,7 @@ export default class Popup extends RingComponentWithShortcuts { }} > <div - data-test="ring-popup" + data-test={this.props['data-test'] || 'ring-popup'} style={this.position()} ref={el => { this.popup = el; diff --git a/components/query-assist/query-assist.js b/components/query-assist/query-assist.js index <HASH>..<HASH> 100644 --- a/components/query-assist/query-assist.js +++ b/components/query-assist/query-assist.js @@ -738,6 +738,7 @@ export default class QueryAssist extends RingComponentWithShortcuts { className={this.props.popupClassName} directions={[PopupMenu.PopupProps.Directions.BOTTOM_RIGHT]} data={this.renderSuggestions()} + data-test="ring-query-assist-popup" hint={this.props.hint} hintOnSelection={this.props.hintOnSelection} left={this.getPopupOffset(this.state.suggestions)}
RG-<I> Query assist popup always present in DOM with data-test='ring-popup' Former-commit-id: <I>b<I>b<I>fcb<I>c<I>e<I>a<I>
JetBrains_ring-ui
train
6bc0c7f79a224daec25e214df3efd1a448e30aa3
diff --git a/umi_tools/extract.py b/umi_tools/extract.py index <HASH>..<HASH> 100644 --- a/umi_tools/extract.py +++ b/umi_tools/extract.py @@ -246,9 +246,10 @@ class Extractor: read1_format = read1.guessFormat() assert self.encoding in read1_format, ( "specified quality encoding (%s) does not match possible " - "format for read (%s)" % (self.encoding, read1_format)) + "format for read: \n\n%s\n\npossible format(s)=%s)" % ( + self.encoding, read1, read1_format)) - umi_qual = map(ord, bc_qual1) + umi_qual = map(ord, [bc_qual1[x] for x in self.umi_bases]) umi_qual = [x - RANGES[self.encoding][0] for x in umi_qual] below_threshold = len([x for x in umi_qual if x < self.thresh]) @@ -274,7 +275,7 @@ class Extractor: "specified quality encoding (%s) does not match possible " "format(s) for read 2 (%s)" % (self.encoding, read2_format)) - umi_qual2 = map(ord, bc_qual2) + umi_qual2 = map(ord, [bc_qual2[x] for x in self.umi_bases2]) umi_qual2 = [x - RANGES[self.encoding][0] for x in umi_qual2] below_threshold = len([x for x in umi_qual2 if x < self.thresh])
corrects quality checking so only UMI bases in bc-pattern are checked
CGATOxford_UMI-tools
train
f05f85fcfe9fd46f37f79d014f2544bc96bb16da
diff --git a/framework/yii/db/Connection.php b/framework/yii/db/Connection.php index <HASH>..<HASH> 100644 --- a/framework/yii/db/Connection.php +++ b/framework/yii/db/Connection.php @@ -251,16 +251,7 @@ class Connection extends Component */ private $_schema; - /** - * Closes the connection when this component is being serialized. - * @return array - */ - public function __sleep() - { - $this->close(); - return array_keys(get_object_vars($this)); - } - + /** * Returns a value indicating whether the DB connection is established. * @return boolean whether the DB connection is established
Removed Connection::__sleep.
yiisoft_yii2-bootstrap4
train
ece66e1d4ad15616235d51fa1de8426eb61c5edd
diff --git a/spring-boot-test/src/main/java/org/springframework/boot/test/json/JsonContentAssert.java b/spring-boot-test/src/main/java/org/springframework/boot/test/json/JsonContentAssert.java index <HASH>..<HASH> 100644 --- a/spring-boot-test/src/main/java/org/springframework/boot/test/json/JsonContentAssert.java +++ b/spring-boot-test/src/main/java/org/springframework/boot/test/json/JsonContentAssert.java @@ -961,9 +961,9 @@ public class JsonContentAssert extends AbstractAssert<JsonContentAssert, CharSeq * Extract the map value at the given JSON path for further object assertions. * @param expression the {@link JsonPath} expression * @param args arguments to parameterize the {@code JsonPath} expression with, using + * formatting specifiers defined in {@link String#format(String, Object...)} * @param <K> key type - * @param <V> value type formatting specifiers defined in - * {@link String#format(String, Object...)} + * @param <V> value type * @return a new assertion object whose object under test is the extracted item * @throws AssertionError if the path is not valid or does not result in a map */
Polish javadoc in JsonContentAssert See gh-<I>
spring-projects_spring-boot
train
f6e63a458970e0b67daa7bc570770cbf8d8f0fd3
diff --git a/src/system/modules/metamodelsfilter_checkbox/MetaModelFilterSettingCheckbox.php b/src/system/modules/metamodelsfilter_checkbox/MetaModelFilterSettingCheckbox.php index <HASH>..<HASH> 100644 --- a/src/system/modules/metamodelsfilter_checkbox/MetaModelFilterSettingCheckbox.php +++ b/src/system/modules/metamodelsfilter_checkbox/MetaModelFilterSettingCheckbox.php @@ -99,7 +99,7 @@ class MetaModelFilterSettingCheckbox extends MetaModelFilterSettingSimpleLookup /** * {@inheritdoc} */ - public function getParameterFilterWidgets($arrIds, $arrFilterUrl, $arrJumpTo, $blnAutoSubmit, $blnHideClearFilter) + public function getParameterFilterWidgets($arrIds, $arrFilterUrl, $arrJumpTo, MetaModelFrontendFilterOptions $objFrontendFilterOptions) { $objAttribute = $this->getMetaModel()->getAttributeById($this->get('attr_id')); @@ -146,7 +146,7 @@ class MetaModelFilterSettingCheckbox extends MetaModelFilterSettingSimpleLookup return array ( - $this->getParamName() => $this->prepareFrontendFilterWidget($arrWidget, $arrFilterUrl, $arrJumpTo, $blnAutoSubmit) + $this->getParamName() => $this->prepareFrontendFilterWidget($arrWidget, $arrFilterUrl, $arrJumpTo, $objFrontendFilterOptions) ); } } \ No newline at end of file
Add a container for fe filter options.
MetaModels_filter_checkbox
train
583800ee3836a22e1f18af405a0c9511b4d7d385
diff --git a/go/cmd/vtcombo/tablet_map.go b/go/cmd/vtcombo/tablet_map.go index <HASH>..<HASH> 100644 --- a/go/cmd/vtcombo/tablet_map.go +++ b/go/cmd/vtcombo/tablet_map.go @@ -165,7 +165,9 @@ type internalTabletConn struct { } // Execute is part of tabletconn.TabletConn +// We need to copy the bind variables as tablet server will change them. func (itc *internalTabletConn) Execute(ctx context.Context, query string, bindVars map[string]interface{}, transactionID int64) (*mproto.QueryResult, error) { + bindVars = tproto.Proto3ToBindVariables(tproto.BindVariablesToProto3(bindVars)) reply := &mproto.QueryResult{} if err := itc.tablet.qsc.QueryService().Execute(ctx, &pbq.Target{ Keyspace: itc.tablet.keyspace, @@ -182,14 +184,20 @@ func (itc *internalTabletConn) Execute(ctx context.Context, query string, bindVa } // ExecuteBatch is part of tabletconn.TabletConn +// We need to copy the bind variables as tablet server will change them. func (itc *internalTabletConn) ExecuteBatch(ctx context.Context, queries []tproto.BoundQuery, asTransaction bool, transactionID int64) (*tproto.QueryResultList, error) { + q := make([]tproto.BoundQuery, len(queries)) + for i, query := range queries { + q[i].Sql = query.Sql + q[i].BindVariables = tproto.Proto3ToBindVariables(tproto.BindVariablesToProto3(query.BindVariables)) + } reply := &tproto.QueryResultList{} if err := itc.tablet.qsc.QueryService().ExecuteBatch(ctx, &pbq.Target{ Keyspace: itc.tablet.keyspace, Shard: itc.tablet.shard, TabletType: itc.tablet.tabletType, }, &tproto.QueryList{ - Queries: queries, + Queries: q, AsTransaction: asTransaction, TransactionId: transactionID, }, reply); err != nil { @@ -199,7 +207,9 @@ func (itc *internalTabletConn) ExecuteBatch(ctx context.Context, queries []tprot } // StreamExecute is part of tabletconn.TabletConn +// We need to copy the bind variables as tablet server will change them. func (itc *internalTabletConn) StreamExecute(ctx context.Context, query string, bindVars map[string]interface{}, transactionID int64) (<-chan *mproto.QueryResult, tabletconn.ErrFunc, error) { + bindVars = tproto.Proto3ToBindVariables(tproto.BindVariablesToProto3(bindVars)) result := make(chan *mproto.QueryResult, 10) var finalErr error
Now vtcombo fake vtgate -> vttablet connection copies the bind variables, as tabletserver will change them, and that messes up vtgate.
vitessio_vitess
train
8a7caab14bd331bb3f6e4b93bc649c5b4b534f95
diff --git a/packages/ui-tabs/src/Tabs/Tab/styles.js b/packages/ui-tabs/src/Tabs/Tab/styles.js index <HASH>..<HASH> 100644 --- a/packages/ui-tabs/src/Tabs/Tab/styles.js +++ b/packages/ui-tabs/src/Tabs/Tab/styles.js @@ -32,9 +32,6 @@ const selectedTab = keyframes` transform: translate3d(0, 0, 0) scaleX(1); }` -const defaultTabVerticalPadding = '1.25rem' -const secondaryTabVerticalPadding = '1rem' - /** * Generates the style object from the theme and provided additional information * @param {Object} componentTheme The theme variable object. @@ -49,7 +46,7 @@ const generateStyle = (componentTheme, props, state) => { const variants = { default: { - padding: `1rem ${defaultTabVerticalPadding}`, + padding: '1rem 1.25rem', // if horizontal padding changes, update `scrollOverlayWidthDefault` in `Tabs/theme.js` lineHeight: 1, position: 'relative', zIndex: componentTheme.zIndex, @@ -83,7 +80,7 @@ const generateStyle = (componentTheme, props, state) => { } }, secondary: { - padding: `0.75rem ${secondaryTabVerticalPadding}`, + padding: '0.75rem 1rem', // if horizontal padding changes, update `scrollOverlayWidthSecondary` in `Tabs/theme.js` color: componentTheme.secondaryColor, marginInlineEnd: '0.2em', marginBottom: '-0.0625rem', @@ -138,4 +135,3 @@ const generateStyle = (componentTheme, props, state) => { } export default generateStyle -export { generateStyle, defaultTabVerticalPadding, secondaryTabVerticalPadding } diff --git a/packages/ui-tabs/src/Tabs/index.js b/packages/ui-tabs/src/Tabs/index.js index <HASH>..<HASH> 100644 --- a/packages/ui-tabs/src/Tabs/index.js +++ b/packages/ui-tabs/src/Tabs/index.js @@ -169,10 +169,6 @@ class Tabs extends Component { componentDidMount() { if (this.props.tabOverflow === 'scroll' && this._tabList) { this.startScrollOverflow() - - // make sure active tab is always visible - const activeTabEl = this._tabList.querySelector('[aria-selected="true"]') - this.showActiveTabIfOverlayed(activeTabEl) } if (this.props.focus || this.props.shouldFocusOnRender) { @@ -214,6 +210,18 @@ class Tabs extends Component { this.handleResize() } + // when tabList is set as overflown, + // make sure active tab is always visible + if ( + this.props.tabOverflow === 'scroll' && + this._tabList && + !prevState.withTabListOverflow && + this.state.withTabListOverflow + ) { + const activeTabEl = this._tabList.querySelector('[aria-selected="true"]') + this.showActiveTabIfOverlayed(activeTabEl) + } + this.props.makeStyles() } diff --git a/packages/ui-tabs/src/Tabs/theme.js b/packages/ui-tabs/src/Tabs/theme.js index <HASH>..<HASH> 100644 --- a/packages/ui-tabs/src/Tabs/theme.js +++ b/packages/ui-tabs/src/Tabs/theme.js @@ -22,11 +22,6 @@ * SOFTWARE. */ -import { - defaultTabVerticalPadding, - secondaryTabVerticalPadding -} from './Tab/styles' - /** * Generates the theme object for the component from the theme and provided additional information * @param {Object} theme The actual theme object. @@ -46,8 +41,8 @@ const generateComponentTheme = (theme, themeOverride = {}) => { small: breakpoints?.small, medium: breakpoints?.medium, large: breakpoints?.large, - scrollOverlayWidthDefault: `calc(4 * ${defaultTabVerticalPadding})`, - scrollOverlayWidthSecondary: `calc(3 * ${secondaryTabVerticalPadding})` + scrollOverlayWidthDefault: '5rem', // has to be 4 times the horizontal padding of the default style tab + scrollOverlayWidthSecondary: '3rem' // has to be 3 times the horizontal padding of the secondary style tab } return {
refactor(ui-tabs): make selected tab always visible Closes: INSTUI-<I>
instructure_instructure-ui
train
a8a64f3712e1d51c9f316548ce020d3e050b28c9
diff --git a/tests/frontend/org/voltdb/MockVoltDB.java b/tests/frontend/org/voltdb/MockVoltDB.java index <HASH>..<HASH> 100644 --- a/tests/frontend/org/voltdb/MockVoltDB.java +++ b/tests/frontend/org/voltdb/MockVoltDB.java @@ -77,6 +77,7 @@ public class MockVoltDB implements VoltDBInterface private SiteTracker m_siteTracker; private final Map<MailboxType, List<MailboxNodeContent>> m_mailboxMap = new HashMap<MailboxType, List<MailboxNodeContent>>(); + private boolean m_replicationActive = false; public MockVoltDB() { this(VoltDB.DEFAULT_PORT, VoltDB.DEFAULT_ADMIN_PORT, -1, VoltDB.DEFAULT_DR_PORT); @@ -473,12 +474,13 @@ public class MockVoltDB implements VoltDBInterface @Override public void setReplicationActive(boolean active) { + m_replicationActive = active; } @Override public boolean getReplicationActive() { - return false; + return m_replicationActive; } @Override
ENG-<I> DR blocked for elastic test.
VoltDB_voltdb
train
efb102b1765dae40f350d20ed1025386f1e25c9f
diff --git a/mailparser/mailparser.py b/mailparser/mailparser.py index <HASH>..<HASH> 100644 --- a/mailparser/mailparser.py +++ b/mailparser/mailparser.py @@ -237,6 +237,7 @@ class MailParser(object): self._attachments = [] self._text_plain = [] + self._text_html = [] self._defects = [] self._defects_categories = set() self._has_defects = False @@ -378,7 +379,10 @@ class MailParser(object): payload = ported_string( p.get_payload(decode=True), encoding=charset) if payload: - self._text_plain.append(payload) + if p.get_content_subtype() == 'html': + self._text_html.append(payload) + else: + self._text_plain.append(payload) # Parsed object mail self._make_mail() @@ -497,10 +501,10 @@ class MailParser(object): @property def body(self): """ - Return all text plain parts of mail delimited from string + Return all text plain and text html parts of mail delimited from string "--- mail_boundary ---" """ - return "\n--- mail_boundary ---\n".join(self.text_plain) + return "\n--- mail_boundary ---\n".join(self.text_plain + self.text_html) @property def headers(self): @@ -527,6 +531,13 @@ class MailParser(object): return self._text_plain @property + def text_html(self): + """ + Return a list of all text html parts of email. + """ + return self._text_html + + @property def date(self): """ Return the mail date in datetime.datetime format and UTC.
Add text_html property (#<I>)
SpamScope_mail-parser
train
8117106b26d260a1e7a214ae99a4002344303006
diff --git a/server/databases.go b/server/databases.go index <HASH>..<HASH> 100644 --- a/server/databases.go +++ b/server/databases.go @@ -10,8 +10,8 @@ import ( ) type dbLinks struct { - Self string `json:"self"` // Self link mapping to this resource - RPs string `json:"rps"` // URL for retention policies for this database + Self string `json:"self"` // Self link mapping to this resource + RPs string `json:"retentionPolicies"` // URL for retention policies for this database } type dbResponse struct { @@ -22,6 +22,16 @@ type dbResponse struct { Links dbLinks `json:"links"` // Links are URI locations related to the database } +func NewDBResponse(srcID int, name string) dbResponse { + base := "/chronograf/v1/sources" + return dbResponse{ + Name: name, + Links: dbLinks{ + Self: fmt.Sprintf("%s/%d/dbs/%s", base, srcID, name), + }, + } +} + type dbsResponse struct { Databases []dbResponse `json:"databases"` } @@ -39,6 +49,13 @@ type rpResponse struct { Links rpLinks `json:"links"` // Links are URI locations related to the database } +func (r *rpResponse) WithLinks(srcID int, dbName string) { + base := "/chronograf/v1/sources" + r.Links = rpLinks{ + Self: fmt.Sprintf("%s/%d/dbs/%s/rps/%s", base, srcID, dbName, r.Name), + } +} + type rpsResponse struct { RetentionPolicies []rpResponse `json:"retentionPolicies"` } @@ -60,7 +77,6 @@ func (h *Service) GetDatabases(w http.ResponseWriter, r *http.Request) { } db := h.Databases - if err = db.Connect(ctx, &src); err != nil { msg := fmt.Sprintf("Unable to connect to source %d: %v", srcID, err) Error(w, http.StatusBadRequest, msg, h.Logger) @@ -75,9 +91,7 @@ func (h *Service) GetDatabases(w http.ResponseWriter, r *http.Request) { dbs := make([]dbResponse, len(databases)) for i, d := range databases { - dbs[i] = dbResponse{ - Name: d.Name, - } + dbs[i] = NewDBResponse(srcID, d.Name) } res := dbsResponse{ @@ -127,7 +141,7 @@ func (h *Service) NewDatabase(w http.ResponseWriter, r *http.Request) { return } - res := dbResponse{Name: database.Name} + res := NewDBResponse(srcID, database.Name) encodeJSON(w, http.StatusCreated, res, h.Logger) } @@ -198,13 +212,15 @@ func (h *Service) RetentionPolicies(w http.ResponseWriter, r *http.Request) { rps := make([]rpResponse, len(allRP)) for i, rp := range allRP { - rps[i] = rpResponse{ + rp := rpResponse{ Name: rp.Name, Duration: rp.Duration, Replication: rp.Replication, ShardDuration: rp.ShardDuration, Default: rp.Default, } + rp.WithLinks(srcID, dbID) + rps[i] = rp } res := rpsResponse{ @@ -302,7 +318,7 @@ func (h *Service) UpdateRetentionPolicy(w http.ResponseWriter, r *http.Request) return } - // TODO: this needs to be the actual RP information + // TODO: this needs to be the actual RP information res := rpResponse{Name: rp.Name} encodeJSON(w, http.StatusCreated, res, h.Logger) } diff --git a/server/sources.go b/server/sources.go index <HASH>..<HASH> 100644 --- a/server/sources.go +++ b/server/sources.go @@ -18,6 +18,7 @@ type sourceLinks struct { Permissions string `json:"permissions"` // URL for all allowed permissions for this source Users string `json:"users"` // URL for all users associated with this source Roles string `json:"roles,omitempty"` // URL for all users associated with this source + Databases string `json:"databases"` // URL for the databases contained within this soure } type sourceResponse struct { @@ -43,6 +44,7 @@ func newSourceResponse(src chronograf.Source) sourceResponse { Kapacitors: fmt.Sprintf("%s/%d/kapacitors", httpAPISrcs, src.ID), Permissions: fmt.Sprintf("%s/%d/permissions", httpAPISrcs, src.ID), Users: fmt.Sprintf("%s/%d/users", httpAPISrcs, src.ID), + Databases: fmt.Sprintf("%s/%d/dbs", httpAPISrcs, src.ID), }, }
Update databases/retention policy link responses
influxdata_influxdb
train
0348ef32b00caab82588eef656ed01fbcf788350
diff --git a/upload/catalog/controller/extension/payment/pp_braintree.php b/upload/catalog/controller/extension/payment/pp_braintree.php index <HASH>..<HASH> 100644 --- a/upload/catalog/controller/extension/payment/pp_braintree.php +++ b/upload/catalog/controller/extension/payment/pp_braintree.php @@ -897,7 +897,7 @@ class ControllerExtensionPaymentPPBraintree extends Controller { } if ($product['image']) { - $image = $this->model_tool_image->resize($product['image'], $this->config->get($this->config->get('config_theme') . '_image_cart_width'), $this->config->get($this->config->get('config_theme') . '_image_cart_height')); + $image = $this->model_tool_image->resize($product['image'], $this->config->get('theme_' . $this->config->get('config_theme') . '_image_cart_width'), $this->config->get('theme_' . $this->config->get('config_theme') . '_image_cart_height')); } else { $image = ''; }
Fix Braintree image resize bug - due to new format of config options
opencart_opencart
train
2bf01a66115bfce1f43befb8f087c2a0243c048e
diff --git a/src/Illuminate/View/Compilers/Concerns/CompilesConditionals.php b/src/Illuminate/View/Compilers/Concerns/CompilesConditionals.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/View/Compilers/Concerns/CompilesConditionals.php +++ b/src/Illuminate/View/Compilers/Concerns/CompilesConditionals.php @@ -5,7 +5,7 @@ namespace Illuminate\View\Compilers\Concerns; trait CompilesConditionals { /** - * Identifier for the first case in switch statement + * Identifier for the first case in switch statement. * * @var bool */ @@ -139,7 +139,7 @@ trait CompilesConditionals */ protected function compileDefault() { - return "<?php default: ?>"; + return '<?php default: ?>'; } /** @@ -149,6 +149,6 @@ trait CompilesConditionals */ protected function compileEndSwitch() { - return "<?php endswitch; ?>"; + return '<?php endswitch; ?>'; } }
corrections as per the styleCI check
laravel_framework
train
c15889b6b679706f068bd975a4e5c1deeefeb6ff
diff --git a/src/accounts/search_indexes.py b/src/accounts/search_indexes.py index <HASH>..<HASH> 100644 --- a/src/accounts/search_indexes.py +++ b/src/accounts/search_indexes.py @@ -32,7 +32,6 @@ class UserIndex(indexes.SearchIndex, indexes.Indexable): def prepare_description(self, obj): return u'{}\n{}\n{}\n{}\n{}\n{}'.format( obj.institution, obj.role, obj.username, obj.get_full_name(), - obj.google_talk, obj.webpage ) def prepare_icon_name(self, obj):
Removing unnecessary fields from accounts search indexes
colab_colab
train
8c98570041759dfa76a46c1b32bc4e882c2fa22a
diff --git a/lib/pragmatic_segmenter/languages/arabic.rb b/lib/pragmatic_segmenter/languages/arabic.rb index <HASH>..<HASH> 100644 --- a/lib/pragmatic_segmenter/languages/arabic.rb +++ b/lib/pragmatic_segmenter/languages/arabic.rb @@ -18,11 +18,6 @@ module PragmaticSegmenter # Rubular: http://rubular.com/r/kPRgApNHUg ReplaceNonSentenceBoundaryCommaRule = Rule.new(/،(?=\s\S+،)/, '♬') - class Process < Process - private - - end - class AbbreviationReplacer < AbbreviationReplacer private diff --git a/lib/pragmatic_segmenter/languages/persian.rb b/lib/pragmatic_segmenter/languages/persian.rb index <HASH>..<HASH> 100644 --- a/lib/pragmatic_segmenter/languages/persian.rb +++ b/lib/pragmatic_segmenter/languages/persian.rb @@ -9,11 +9,6 @@ module PragmaticSegmenter ReplaceColonBetweenNumbersRule = Rule.new(/(?<=\d):(?=\d)/, '♭') ReplaceNonSentenceBoundaryCommaRule = Rule.new(/،(?=\s\S+،)/, '♬') - class Process < Process - private - - end - class AbbreviationReplacer < AbbreviationReplacer private
Remove Process classes from langs * Arabic * Persian
diasks2_pragmatic_segmenter
train
de3916fe2a7115ecb5f152c84819cc17274d7a45
diff --git a/docs/developer/.scripts/33_render_status.py b/docs/developer/.scripts/33_render_status.py index <HASH>..<HASH> 100644 --- a/docs/developer/.scripts/33_render_status.py +++ b/docs/developer/.scripts/33_render_status.py @@ -323,7 +323,8 @@ def render_recommended_monitors_progress(): def render_manifest_v2_progress(): - valid_checks = get_valid_integrations() + valid_checks = sorted(get_valid_integrations()) + total_checks = len(valid_checks) checks_v2_manifest = 0
Sort integrations when rendering (#<I>) * Sort integrations when rendering * Update docs/developer/.scripts/<I>_render_status.py
DataDog_integrations-core
train
20ba844667b2b7e7916953f1407670faf0160f5c
diff --git a/tensorflow_probability/python/bijectors/real_nvp_test.py b/tensorflow_probability/python/bijectors/real_nvp_test.py index <HASH>..<HASH> 100644 --- a/tensorflow_probability/python/bijectors/real_nvp_test.py +++ b/tensorflow_probability/python/bijectors/real_nvp_test.py @@ -105,37 +105,36 @@ class RealNVPTest(test_util.VectorDistributionTestHelpers, tf.test.TestCase): 'b': tf.reshape(tf.range(N * 2, dtype=tf.float32), (N, 2)), } - with self.test_session() as sess: - nvp = tfb.RealNVP( - num_masked=4, validate_args=True, **self._real_nvp_kwargs) - x = tf.constant(x_) - - forward_x = nvp.forward(x, **conditions) - # Use identity to invalidate cache. - inverse_y = nvp.inverse(tf.identity(forward_x), **conditions) - forward_inverse_y = nvp.forward(inverse_y, **conditions) - fldj = nvp.forward_log_det_jacobian(x, event_ndims=1, **conditions) - # Use identity to invalidate cache. - ildj = nvp.inverse_log_det_jacobian( - tf.identity(forward_x), event_ndims=1, **conditions) - tf.global_variables_initializer().run() - [ - forward_x_, - inverse_y_, - forward_inverse_y_, - ildj_, - fldj_, - ] = sess.run([ - forward_x, - inverse_y, - forward_inverse_y, - ildj, - fldj, - ]) - self.assertEqual("real_nvp", nvp.name) - self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-1, atol=0.) - self.assertAllClose(x_, inverse_y_, rtol=1e-1, atol=0.) - self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=0.) + nvp = tfb.RealNVP( + num_masked=4, validate_args=True, **self._real_nvp_kwargs) + x = tf.constant(x_) + + forward_x = nvp.forward(x, **conditions) + # Use identity to invalidate cache. + inverse_y = nvp.inverse(tf.identity(forward_x), **conditions) + forward_inverse_y = nvp.forward(inverse_y, **conditions) + fldj = nvp.forward_log_det_jacobian(x, event_ndims=1, **conditions) + # Use identity to invalidate cache. + ildj = nvp.inverse_log_det_jacobian( + tf.identity(forward_x), event_ndims=1, **conditions) + self.evaluate(tf.global_variables_initializer()) + [ + forward_x_, + inverse_y_, + forward_inverse_y_, + ildj_, + fldj_, + ] = self.evaluate([ + forward_x, + inverse_y, + forward_inverse_y, + ildj, + fldj, + ]) + self.assertEqual("real_nvp", nvp.name) + self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-1, atol=0.) + self.assertAllClose(x_, inverse_y_, rtol=1e-1, atol=0.) + self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=0.) def testMutuallyConsistent(self): dims = 4
Use new test evaluate pattern in conditional real nvp tests
tensorflow_probability
train
321903d06e196704bce5b190648ce3aebb0e89de
diff --git a/django-openstack/django_openstack/syspanel/views/flavors.py b/django-openstack/django_openstack/syspanel/views/flavors.py index <HASH>..<HASH> 100644 --- a/django-openstack/django_openstack/syspanel/views/flavors.py +++ b/django-openstack/django_openstack/syspanel/views/flavors.py @@ -42,7 +42,7 @@ class DeleteFlavor(forms.SelfHandlingForm): try: flavor_id = data['flavorid'] flavor = api.flavor_get(request, flavor_id) - api.flavor_delete(request, flavor_id, True) + api.flavor_delete(request, flavor_id, False) messages.info(request, 'Successfully deleted flavor: %s' % flavor.name) except api_exceptions.ApiException, e:
don't purge flavor by default!!!
openstack_horizon
train
cbc72cb888f52d138def0319b49287f53f8a4dda
diff --git a/lib/down.rb b/lib/down.rb index <HASH>..<HASH> 100644 --- a/lib/down.rb +++ b/lib/down.rb @@ -127,12 +127,12 @@ module Down end request_headers = options.select { |key, value| key.is_a?(String) } + get = Net::HTTP::Get.new(uri.request_uri, request_headers) + get.basic_auth(uri.user, uri.password) if uri.user || uri.password request = Fiber.new do http.start do - req = Net::HTTP::Get.new(uri.request_uri, request_headers) - req.basic_auth(uri.user, uri.password) if uri.user || uri.password - http.request(req) do |response| + http.request(get) do |response| Fiber.yield response response.instance_variable_set("@read", true) end
Move creating request object outside of the Fiber
janko_down
train
a01226326f31b7b1f336ef643079ad30614899db
diff --git a/classic.py b/classic.py index <HASH>..<HASH> 100644 --- a/classic.py +++ b/classic.py @@ -176,6 +176,11 @@ class ClassicCollection(Collection): for tmp in self.db.list_users(): self.users[tmp[0]] = ClassicUser(self, *tmp) self.on_keys_changed() + with self.lock: + if len(self.media) > 0: + self.got_media_event.set() + else: + self.got_media_event.clear() def list_media(self): if self.media is None: @@ -193,6 +198,9 @@ class ClassicCollection(Collection): def _user_by_key(self, key): return self.users[key] + def stop(self): + self.got_media_event.set() + class ClassicRandom(Random): def __init__(self, settings, logger): super(ClassicRandom, self).__init__(settings, logger)
classic: properly stop and add got_media_event
bwesterb_mirte
train
a7275f43101dcb4f42c2ad9643fc151af3de3bf4
diff --git a/lib/zk/client/threaded.rb b/lib/zk/client/threaded.rb index <HASH>..<HASH> 100644 --- a/lib/zk/client/threaded.rb +++ b/lib/zk/client/threaded.rb @@ -15,23 +15,11 @@ module ZK # unconfigurable, event dispatch thread. In 1.0 the number of event # delivery threads is configurable, but still defaults to 1. # - # The configurability is intended to allow users to easily dispatch events to - # event handlers that will perform (application specific) work. Be aware, - # the default will give you the guarantee that only one event will be delivered - # at a time. The advantage to this is that you can be sure that no event will - # be delivered "behind your back" while you're in an event handler. If you're - # comfortable with dealing with threads and concurrency, then feel free to - # set the `:threadpool_size` option to the constructor to a value you feel is - # correct for your app. - # # If you use the threadpool/event callbacks to perform work, you may be # interested in registering an `on_exception` callback that will receive # all exceptions that occur on the threadpool that are not handled (i.e. # that bubble up to top of a block). # - # It is recommended that you not run any possibly long-running work on the - # event threadpool, as `close!` will attempt to shutdown the threadpool, and - # **WILL NOT WAIT FOREVER**. (TODO: more on this) # # @example Register on_connected callback. # @@ -77,16 +65,28 @@ module ZK # case of an expired session, we will keep trying to reestablish the # connection. # - # @option opts [Fixnum] :threadpool_size (1) the size of the threadpool that - # should be used to deliver events. As of 1.0, this is the number of - # event delivery threads and controls the amount of concurrency in your - # app if you're doing work in the event callbacks. + # @option opts [:per_event,:single] :thread (:single) choose your event + # delivery model: # - # @option opts [true,false] :actor (false) if true, use the new (experimental) - # Actor style callback dispatching code. This should be compatible with most - # existing code, and presents a safer alternative to adjusting the `:threadpool_size` - # option. see {ZK::EventHandlerSubscription::Actor Actor} for a discussion about - # the relative advantages of this strategy. + # * `:single`: There is one thread, and only one callback is called at + # a time. This is the default mode (for now), and will provide the most + # safety for your app. All events will be delivered as received, to + # callbacks in the order they were registered. This safety has the + # tradeoff that if one of your callbacks performs some action that blocks + # the delivery thread, you will not recieve other events until it returns. + # You're also limiting the concurrency of your app. This should be fine + # for most simple apps, and is a good choice to start with when + # developing your application + # + # * `:per_callback`: This option will use a new-style Actor model (inspired by + # [Celluloid](https://github.com/celluloid/celluloid)) that uses a + # per-callback queue and thread to allow for greater concurrency in + # your app, whille still maintaining some kind of sanity. By choosing + # this option your callbacks will receive events in order, and will + # receive only one at a time, but in parallel with other callbacks. + # This model has the advantage you can have all of your callbacks + # making progress in parallel, and if one of them happens to block, + # it will not affect the others. # # @option opts [Fixnum] :timeout how long we will wait for the connection # to be established. If timeout is nil, we will wait forever *use
document the two :thread options {:per_callback,:single}
zk-ruby_zk
train
fe774666e26bff49350a7e8cbd0def29a622cf7a
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -51,7 +51,7 @@ "proxyquire": "^2.1.0", "simple-get": "^4.0.0", "snazzy": "^9.0.0", - "standard": "^16.0.2", + "standard": "^17.0.0", "tap": "^16.0.0", "tsd": "^0.20.0", "typescript": "^4.0.2" diff --git a/test/dir-list.test.js b/test/dir-list.test.js index <HASH>..<HASH> 100644 --- a/test/dir-list.test.js +++ b/test/dir-list.test.js @@ -1,6 +1,6 @@ 'use strict' -/* eslint node/no-deprecated-api: "off" */ +/* eslint n/no-deprecated-api: "off" */ const fs = require('fs') const path = require('path') diff --git a/test/static.test.js b/test/static.test.js index <HASH>..<HASH> 100644 --- a/test/static.test.js +++ b/test/static.test.js @@ -1,6 +1,6 @@ 'use strict' -/* eslint node/no-deprecated-api: "off" */ +/* eslint n/no-deprecated-api: "off" */ const path = require('path') const fs = require('fs') @@ -1270,7 +1270,7 @@ t.test('root not found warning', (t) => { }, destination ) - const fastify = Fastify({ logger: logger }) + const fastify = Fastify({ logger }) fastify.register(fastifyStatic, pluginOptions) fastify.listen(0, (err) => { t.error(err)
build(deps-dev): bump standard from <I> to <I> (#<I>)
fastify_fastify-static
train
5acc87cfd360820f161f1d3803fda269e9c05399
diff --git a/openxc/sources/base.py b/openxc/sources/base.py index <HASH>..<HASH> 100644 --- a/openxc/sources/base.py +++ b/openxc/sources/base.py @@ -4,7 +4,7 @@ from openxc.formats.json import JsonFormatter class DataSource(object): - def __init__(self, callback): + def __init__(self, callback=None): self.callback = callback self.bytes_received = 0 @@ -27,7 +27,8 @@ class DataSource(object): message_buffer) if message is not None: self.bytes_received += byte_count - self.callback(message) + if callback is not None: + self.callback(message) else: break diff --git a/openxc/sources/serial.py b/openxc/sources/serial.py index <HASH>..<HASH> 100644 --- a/openxc/sources/serial.py +++ b/openxc/sources/serial.py @@ -11,7 +11,7 @@ class SerialDataSource(DataSource): DEFAULT_PORT = "/dev/ttyUSB0" DEFAULT_BAUDRATE = 115200 - def __init__(self, callback, port=None, baudrate=None): + def __init__(self, callback=None, port=None, baudrate=None): super(SerialDataSource, self).__init__(callback) port = port or self.DEFAULT_PORT baudrate = baudrate or self.DEFAULT_BAUDRATE diff --git a/openxc/sources/trace.py b/openxc/sources/trace.py index <HASH>..<HASH> 100644 --- a/openxc/sources/trace.py +++ b/openxc/sources/trace.py @@ -11,7 +11,7 @@ class TraceDataSource(DataSource): DEFAULT_PORT = "/dev/ttyUSB0" DEFAULT_BAUDRATE = 115200 - def __init__(self, callback, filename=None, realtime=True, loop=True): + def __init__(self, callback=None, filename=None, realtime=True, loop=True): super(TraceDataSource, self).__init__(callback) self.realtime = realtime self.loop = loop @@ -61,7 +61,8 @@ class TraceDataSource(DataSource): continue if self.realtime and 'timestamp' in message: self.wait(starting_time, message['timestamp']) - self.callback(message) + if self.callback is not None: + self.callback(message) self.trace_file.close() self.trace_file = None diff --git a/openxc/sources/usb.py b/openxc/sources/usb.py index <HASH>..<HASH> 100644 --- a/openxc/sources/usb.py +++ b/openxc/sources/usb.py @@ -15,7 +15,7 @@ class UsbDataSource(DataSource): DEFAULT_READ_REQUEST_SIZE = 512 DEFAULT_READ_TIMEOUT = 1000000 - def __init__(self, callback, vendor_id=None): + def __init__(self, callback=None, vendor_id=None): super(UsbDataSource, self).__init__(callback) if vendor_id is not None and not isinstance(vendor_id, int): vendor_id = int(vendor_id, 0)
Allow a data source to have no callback.
openxc_openxc-python
train
66de3998646334b81e59ce68a3e8eacc3280c349
diff --git a/lib/args.js b/lib/args.js index <HASH>..<HASH> 100644 --- a/lib/args.js +++ b/lib/args.js @@ -32,17 +32,18 @@ function version() { } var map = { - help : help, - version : version, - watch : bool, - cover : bool, - node : bool, - wd : bool, - reporter : string, - ui : string, - timeout : int, - port : int, - yields : int + help : help, + version : version, + watch : bool, + cover : bool, + node : bool, + wd : bool, + reporter : string, + ui : string, + phantomjs : string, + timeout : int, + port : int, + yields : int }; var alias = { diff --git a/lib/launch.js b/lib/launch.js index <HASH>..<HASH> 100644 --- a/lib/launch.js +++ b/lib/launch.js @@ -85,9 +85,10 @@ module.exports = function (context, opts, out) { function launchPhantom(callback) { phantomic(context.ps, { - debug : opts.debug, - port : opts.port, - brout : true + debug : opts.debug, + port : opts.port, + brout : true, + phantomjs : opts.phantomjs }, launcherCallback(callback)) .pipe(tracebackFormatter()) .pipe(out()); diff --git a/test/args-test.js b/test/args-test.js index <HASH>..<HASH> 100644 --- a/test/args-test.js +++ b/test/args-test.js @@ -106,6 +106,12 @@ describe('args', function () { assert.equal(opts.yields, 123); }); + it('parses --phantomjs', function () { + var opts = args(['--phantomjs', '/foo/bar']); + + assert.equal(opts.phantomjs, '/foo/bar'); + }); + it('quits with usage', function (done) { run('passes', ['--unknown'], function (code, stdout) { assert.equal(code, 1); diff --git a/test/phantom-test.js b/test/phantom-test.js index <HASH>..<HASH> 100644 --- a/test/phantom-test.js +++ b/test/phantom-test.js @@ -81,4 +81,12 @@ describe('phantom', function () { }); }); + it('uses custom phantomjs', function (done) { + run('passes', ['--phantomjs', 'some/path'], function (code, stdout) { + assert.equal(stdout.indexOf('Cannot find phantomjs'), 0); + assert.equal(code, 1); + done(); + }); + }); + });
Add support for custom phantomjs install path with --phantomjs
mantoni_mochify.js
train
b9e8bb59707d3d0f0cc3847c52e0f4aa29b84c22
diff --git a/parsl/dataflow/dflow.py b/parsl/dataflow/dflow.py index <HASH>..<HASH> 100644 --- a/parsl/dataflow/dflow.py +++ b/parsl/dataflow/dflow.py @@ -81,6 +81,8 @@ class DataFlowKernel(object): logger.info("Parsl version: {}".format(get_version())) logger.info("Libsubmit version: {}".format(libsubmit.__version__)) + self.checkpoint_lock = threading.Lock() + # Update config with defaults self._config = update_config(config, self.rundir) @@ -613,71 +615,70 @@ class DataFlowKernel(object): By default the checkpoints are written to the RUNDIR of the current run under RUNDIR/checkpoints/{tasks.pkl, dfk.pkl} """ + with self.checkpoint_lock: + checkpoint_queue = None + if tasks: + checkpoint_queue = tasks + else: + checkpoint_queue = self.tasks - checkpoint_queue = None - if tasks: - checkpoint_queue = tasks - else: - checkpoint_queue = self.tasks - - checkpoint_dir = '{0}/checkpoint'.format(self.rundir) - checkpoint_dfk = checkpoint_dir + '/dfk.pkl' - checkpoint_tasks = checkpoint_dir + '/tasks.pkl' - - if not os.path.exists(checkpoint_dir): - try: - os.makedirs(checkpoint_dir) - except FileExistsError as e: - pass - - with open(checkpoint_dfk, 'wb') as f: - state = {'config': self.config, - 'rundir': self.rundir, - 'task_count': self.task_count - } - pickle.dump(state, f) - - count = 0 - - with open(checkpoint_tasks, 'ab') as f: - for task_id in checkpoint_queue: - if not self.tasks[task_id]['checkpoint'] and \ - self.tasks[task_id]['status'] == States.done: - - hashsum = self.tasks[task_id]['hashsum'] - if not hashsum: - continue - t = {'hash': hashsum, - 'exception': None, - 'result': None} - try: - # Asking for the result will raise an exception if - # the app had failed. Should we even checkpoint these? - # TODO : Resolve this question ? - r = self.memoizer.hash_lookup(hashsum).result() - except Exception as e: - t['exception'] = e - else: - t['result'] = r - - # We are using pickle here since pickle dumps to a file in 'ab' - # mode behave like a incremental log. - pickle.dump(t, f) - count += 1 - self.tasks[task_id]['checkpoint'] = True - logger.debug("Task {} checkpointed".format(task_id)) - - self.checkpointed_tasks += count + checkpoint_dir = '{0}/checkpoint'.format(self.rundir) + checkpoint_dfk = checkpoint_dir + '/dfk.pkl' + checkpoint_tasks = checkpoint_dir + '/tasks.pkl' - if count == 0: - if self.checkpointed_tasks == 0: - logger.warn("No tasks checkpointed, please ensure caching is enabled") + if not os.path.exists(checkpoint_dir): + try: + os.makedirs(checkpoint_dir) + except FileExistsError as e: + pass + + with open(checkpoint_dfk, 'wb') as f: + state = {'config': self.config, + 'rundir': self.rundir, + 'task_count': self.task_count + } + pickle.dump(state, f) + + count = 0 + + with open(checkpoint_tasks, 'ab') as f: + for task_id in checkpoint_queue: + if not self.tasks[task_id]['checkpoint'] and \ + self.tasks[task_id]['status'] == States.done: + hashsum = self.tasks[task_id]['hashsum'] + if not hashsum: + continue + t = {'hash': hashsum, + 'exception': None, + 'result': None} + try: + # Asking for the result will raise an exception if + # the app had failed. Should we even checkpoint these? + # TODO : Resolve this question ? + r = self.memoizer.hash_lookup(hashsum).result() + except Exception as e: + t['exception'] = e + else: + t['result'] = r + + # We are using pickle here since pickle dumps to a file in 'ab' + # mode behave like a incremental log. + pickle.dump(t, f) + count += 1 + self.tasks[task_id]['checkpoint'] = True + logger.debug("Task {} checkpointed".format(task_id)) + + self.checkpointed_tasks += count + + if count == 0: + if self.checkpointed_tasks == 0: + logger.warn("No tasks checkpointed, please ensure caching is enabled") + else: + logger.debug("No tasks checkpointed") else: - logger.debug("No tasks checkpointed") - else: - logger.info("Done checkpointing {} tasks".format(count)) + logger.info("Done checkpointing {} tasks".format(count)) - return checkpoint_dir + return checkpoint_dir def _load_checkpoints(self, checkpointDirs): """Load a checkpoint file into a lookup table.
Use a lock to serialise checkpointing. Prior to this, parsl/tests/test_checkpointing/test_regression_<I>.py was failing on benc's laptop fairly regularly - within <I> iterations of while pytest parsl/tests/test_checkpointing/test_regression_<I>.py; do echo TICK ; done
Parsl_parsl
train
f7769a6a1170885327b838bda854a135eb10297c
diff --git a/scripts/generate_address_data.php b/scripts/generate_address_data.php index <HASH>..<HASH> 100644 --- a/scripts/generate_address_data.php +++ b/scripts/generate_address_data.php @@ -114,14 +114,14 @@ function file_put_json($filename, $data) */ function file_put_php($filename, $data) { - $data = var_export($data, true); + $data = var_export($data, true) . ';'; // The var_export output is terrible, so try to get it as close as possible // to the final result. $array_keys = [ '0 => ', '1 => ', '2 => ', '3 => ', '4 => ', '5 => ', '6 => ', '7 => ', '8 => ', '9 => ', '10 => ', '11 => ', ]; - $data = str_replace(['array (', "),\n", "=> \n "], ['[', "],\n", '=> '], $data); + $data = str_replace(['array (', "),\n", ');', "=> \n "], ['[', "],\n", '];', '=> '], $data); $data = str_replace('=> [', '=> [', $data); $data = str_replace($array_keys, '', $data); // Put fields into one row. @@ -138,7 +138,7 @@ function file_put_php($filename, $data) $data = str_replace(' ', ' ', $data); // Unescape backslashes. $data = str_replace('\\\\', '\\', $data); - $data = '<?php' . "\n\n" . '$data = ' . $data . ';'; + $data = '<?php' . "\n\n" . '$data = ' . $data; file_put_contents($filename, $data); }
Improve address format generation, to skip a syntax error in address_formats.php.
commerceguys_addressing
train
7039cc47dcb84ceec34f76ede47e397b89cff07f
diff --git a/voice.go b/voice.go index <HASH>..<HASH> 100644 --- a/voice.go +++ b/voice.go @@ -320,7 +320,7 @@ func (v *VoiceConnection) wsListen(wsConn *websocket.Conn, close <-chan struct{} v.RUnlock() if sameConnection { - v.log(LogError, "voice endpoint %s websocket closed unexpectantly,i %s", v.endpoint, err) + v.log(LogError, "voice endpoint %s websocket closed unexpectantly, %s", v.endpoint, err) // Start reconnect goroutine then exit. go v.reconnect() @@ -835,16 +835,29 @@ func (v *VoiceConnection) reconnect() { } if v.session.DataReady == false { + v.log(LogInformational, "cannot reconenct with unready session") + } else { v.log(LogInformational, "trying to reconnect to voice") - _, err := v.session.ChannelVoiceJoin(v.GuildID, v.ChannelID, v.mute, v.deaf) + /* + // TODO: Move this to a 2nd stage + _, err := v.session.ChannelVoiceJoin(v.GuildID, v.ChannelID, v.mute, v.deaf) + if err == nil { + v.log(LogInformational, "successfully reconnected to voice") + return + } + */ + + err := v.open() if err == nil { v.log(LogInformational, "successfully reconnected to voice") return } + + v.log(LogError, "error reconnecting to voice, %s", err) } <-time.After(wait * time.Second)
Reconnect to voice gateway instead of..
bwmarrin_discordgo
train
c594d1fb55a50f4dc39a7a3c2b6a6d8d87f072d7
diff --git a/selendroid-standalone/src/main/java/io/selendroid/android/impl/DefaultAndroidEmulator.java b/selendroid-standalone/src/main/java/io/selendroid/android/impl/DefaultAndroidEmulator.java index <HASH>..<HASH> 100644 --- a/selendroid-standalone/src/main/java/io/selendroid/android/impl/DefaultAndroidEmulator.java +++ b/selendroid-standalone/src/main/java/io/selendroid/android/impl/DefaultAndroidEmulator.java @@ -278,10 +278,12 @@ public class DefaultAndroidEmulator extends AbstractDevice implements AndroidEmu log.info("Emulator start took: " + (System.currentTimeMillis() - start) / 1000 + " seconds"); log.info("Please have in mind, starting an emulator takes usually about 45 seconds."); - unlockEmulatorScreen(); + + while (!unlockEmulatorScreen()) { + } } - private void unlockEmulatorScreen() throws AndroidDeviceException { + private boolean unlockEmulatorScreen() throws AndroidDeviceException { List<String> event82 = new ArrayList<String>(); event82.add(AndroidSdk.adb()); if (isSerialConfigured()) { @@ -315,6 +317,25 @@ public class DefaultAndroidEmulator extends AbstractDevice implements AndroidEmu } catch (ShellCommandException e) { throw new AndroidDeviceException(e); } + + List<String> event = new ArrayList<String>(); + event.add(AndroidSdk.adb()); + if (isSerialConfigured()) { + event.add("-s"); + event.add(serial); + } + event.add("shell"); + event.add("ps"); + String homeScreenLaunched = null; + try { + homeScreenLaunched = ShellCommand.exec(event); + } catch (ShellCommandException e) { + throw new AndroidDeviceException(e); + } + if (homeScreenLaunched != null && homeScreenLaunched.contains("S com.android.launcher")) { + return true; + } + return false; } @Override
fix for input keyevent 4 and checking if home screen is launched.
selendroid_selendroid
train
f2c34379100c02a8ab944a7174113ac07b6ecb5e
diff --git a/lib/sensu/cli.rb b/lib/sensu/cli.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/cli.rb +++ b/lib/sensu/cli.rb @@ -13,16 +13,16 @@ module Sensu puts VERSION exit end - opts.on('-c', '--config FILE', 'Sensu JSON config FILE. Default is /etc/sensu/config.json') do |file| + opts.on('-c', '--config FILE', 'Sensu JSON config FILE. Default: /etc/sensu/config.json') do |file| options[:config_file] = file end - opts.on('-d', '--config_dir DIR', 'DIR for supplemental Sensu JSON config files. Default is /etc/sensu/conf.d/') do |dir| + opts.on('-d', '--config_dir DIR', 'DIR for supplemental Sensu JSON config files. Default: /etc/sensu/conf.d/') do |dir| options[:config_dir] = dir end opts.on('-e', '--extension_dir DIR', 'DIR for Sensu extensions (experimental)') do |dir| options[:extension_dir] = dir end - opts.on('-l', '--log FILE', 'Log to a given FILE. Default is to log to STDOUT') do |file| + opts.on('-l', '--log FILE', 'Log to a given FILE. Default: STDOUT') do |file| options[:log_file] = file end opts.on('-v', '--verbose', 'Enable verbose logging') do diff --git a/lib/sensu/logstream.rb b/lib/sensu/logstream.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/logstream.rb +++ b/lib/sensu/logstream.rb @@ -38,7 +38,7 @@ module Sensu end if Signal.list.include?('USR2') Signal.trap('USR2') do - if @logfile + if @log_file reopen(@log_file) end end @@ -55,6 +55,7 @@ module Sensu class NullLogger [:debug, :info, :warn, :error, :fatal].each do |method| define_method(method) do |*arguments| + true end end
[refactoring] adjusted logstream and cli
sensu_sensu
train
73a7a656131a230ded55992219d55da097cc7fe7
diff --git a/src/Ssess.php b/src/Ssess.php index <HASH>..<HASH> 100644 --- a/src/Ssess.php +++ b/src/Ssess.php @@ -6,12 +6,9 @@ class Ssess implements \SessionHandlerInterface { private $savePath; private $cipher = 'aes128'; - private $initializationVectorLength; public function open($save_path, $name) { - $this->initializationVectorLength = openssl_cipher_iv_length($this->cipher); - $this->savePath = $save_path; if (!is_dir($this->savePath)) { mkdir($this->savePath, 0777); @@ -28,16 +25,20 @@ class Ssess implements \SessionHandlerInterface public function read($session_id) { $file_name = 'ssess_'.sha1($session_id); - return (string)@file_get_contents("$this->savePath/$file_name"); + $encrypted_data = @file_get_contents("$this->savePath/$file_name"); + + if (!$encrypted_data) { + return ''; + } + + return openssl_decrypt($encrypted_data, $this->cipher, $session_id, 0, $session_id); } public function write($session_id, $session_data) { $file_name = 'ssess_'.sha1($session_id); - $text_data = json_encode($session_data); - $iv = openssl_random_pseudo_bytes($this->initializationVectorLength); - $encrypted_data = openssl_encrypt($text_data, $this->cipher, $session_id, 0, $iv); + $encrypted_data = openssl_encrypt($session_data, $this->cipher, $session_id, 0, $session_id); return file_put_contents("$this->savePath/$file_name", $encrypted_data) !== false; }
Decode encrypted data in read and stop generating a random iv
phpsess_session-handler
train
ac358659c4ecc54551cede5c6316bc3cefd251ac
diff --git a/wpull/http/stream.py b/wpull/http/stream.py index <HASH>..<HASH> 100644 --- a/wpull/http/stream.py +++ b/wpull/http/stream.py @@ -1,5 +1,6 @@ # encoding=utf8 '''HTML protocol streamers.''' +import functools import gettext import http.client import itertools @@ -30,6 +31,16 @@ DEFAULT_NO_CONTENT_CODES = frozenset(itertools.chain( '''Status codes where a response body is prohibited.''' +def close_stream_on_error(func): + '''Decorator to close stream on error.''' + @trollius.coroutine + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + with wpull.util.close_on_error(self.close): + raise Return((yield From(func(self, *args, **kwargs)))) + return wrapper + + class Stream(object): '''HTTP stream reader/writer. @@ -62,6 +73,7 @@ class Stream(object): return self._data_observer @trollius.coroutine + @close_stream_on_error def write_request(self, request): '''Send the request's HTTP status line and header fields. @@ -86,6 +98,7 @@ class Stream(object): yield From(self._connection.write(data, drain=False)) @trollius.coroutine + @close_stream_on_error def write_body(self, file, length=None): '''Send the request's content body. @@ -133,6 +146,7 @@ class Stream(object): bytes_left -= len(data) @trollius.coroutine + @close_stream_on_error def read_response(self, response=None): '''Read the response's HTTP status line and header fields. @@ -167,6 +181,7 @@ class Stream(object): raise Return(response) @trollius.coroutine + @close_stream_on_error def read_body(self, request, response, file=None, raw=False): '''Read the response's content body. diff --git a/wpull/util.py b/wpull/util.py index <HASH>..<HASH> 100644 --- a/wpull/util.py +++ b/wpull/util.py @@ -4,6 +4,7 @@ import calendar import codecs import contextlib import datetime +import functools import os.path import re import sys @@ -154,3 +155,16 @@ def is_ascii(text): return False else: return True + + [email protected] +def close_on_error(close_func): + '''Context manager to close object on error.''' + try: + yield + except (ArithmeticError, ArithmeticError, AssertionError, AttributeError, + BufferError, EOFError, ImportError, LookupError, MemoryError, + NameError, OSError, ReferenceError, RuntimeError, SyntaxError, + SystemError, TypeError, ValueError): + close_func() + raise diff --git a/wpull/util_test.py b/wpull/util_test.py index <HASH>..<HASH> 100644 --- a/wpull/util_test.py +++ b/wpull/util_test.py @@ -3,7 +3,7 @@ import sys import unittest from wpull.util import (datetime_str, python_version, filter_pem, - parse_iso8601_str, is_ascii) + parse_iso8601_str, is_ascii, close_on_error) DEFAULT_TIMEOUT = 30 @@ -50,3 +50,19 @@ class TestUtil(unittest.TestCase): def test_is_acsii(self): self.assertTrue(is_ascii('abc')) self.assertFalse(is_ascii('😤')) + + def test_close_on_error(self): + class MyObject(object): + def __init__(self): + self.closed = False + + def close(self): + self.closed = True + + def oops(self): + with close_on_error(self.close): + raise ValueError() + + my_object = MyObject() + self.assertRaises(ValueError, my_object.oops) + self.assertTrue(my_object.closed)
http.stream: Close the connection on errors to avoid bad state by reuse.
ArchiveTeam_wpull
train
fb57e21fe26c39749d4594c1208fca0dd66b47b3
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java index <HASH>..<HASH> 100644 --- a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java +++ b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMaster.java @@ -971,14 +971,20 @@ public class JobMaster extends FencedRpcEndpoint<JobMasterId> implements JobMast @Override public CompletableFuture<String> triggerSavepoint( - @Nullable final String targetDirectory, - final Time timeout) { - try { - return executionGraph.getCheckpointCoordinator() + @Nullable final String targetDirectory, + final Time timeout) { + final CheckpointCoordinator checkpointCoordinator = executionGraph.getCheckpointCoordinator(); + + if (checkpointCoordinator != null) { + return checkpointCoordinator .triggerSavepoint(System.currentTimeMillis(), targetDirectory) .thenApply(CompletedCheckpoint::getExternalPointer); - } catch (Exception e) { - return FutureUtils.completedExceptionally(e); + } else { + return FutureUtils.completedExceptionally( + new FlinkException( + String.format( + "Cannot trigger a savepoint because the job %s is not a streaming job.", + jobGraph.getJobID()))); } }
[hotfix] [flip6] Harden JobMaster#triggerSavepoint Check first whether the CheckpointCoordinator has been set before triggering a savepoint. If it has not been set, then return a failure message.
apache_flink
train
54060402050a0649b91efa9ad9f27b43578eedc8
diff --git a/lib/resque/data_store.rb b/lib/resque/data_store.rb index <HASH>..<HASH> 100644 --- a/lib/resque/data_store.rb +++ b/lib/resque/data_store.rb @@ -44,6 +44,7 @@ module Resque :heartbeat!, :remove_heartbeat, :all_heartbeats, + :acquire_pruning_dead_worker_lock, :set_worker_payload, :worker_start_time, :worker_done_working @@ -275,6 +276,10 @@ module Resque @redis.hgetall(HEARTBEAT_KEY) end + def acquire_pruning_dead_worker_lock(worker, expiry) + @redis.set(redis_key_for_worker_pruning, worker.to_s, :ex => expiry, :nx => true) + end + def set_worker_payload(worker, data) @redis.set(redis_key_for_worker(worker), data) end @@ -299,6 +304,10 @@ module Resque def redis_key_for_worker_start_time(worker) "#{redis_key_for_worker(worker)}:started" end + + def redis_key_for_worker_pruning + "pruning_dead_workers_in_progress" + end end class StatsAccess diff --git a/lib/resque/worker.rb b/lib/resque/worker.rb index <HASH>..<HASH> 100644 --- a/lib/resque/worker.rb +++ b/lib/resque/worker.rb @@ -585,6 +585,8 @@ module Resque # By checking the current Redis state against the actual # environment, we can determine if Redis is old and clean it up a bit. def prune_dead_workers + return unless data_store.acquire_pruning_dead_worker_lock(self, Resque.heartbeat_interval) + all_workers = Worker.all unless all_workers.empty? diff --git a/test/worker_test.rb b/test/worker_test.rb index <HASH>..<HASH> 100644 --- a/test/worker_test.rb +++ b/test/worker_test.rb @@ -669,6 +669,42 @@ describe "Resque::Worker" do assert_equal [], Resque::Worker.all_workers_with_expired_heartbeats end + it "does not prune if another worker has pruned (started pruning) recently" do + now = Time.now + workerA = Resque::Worker.new(:jobs) + workerA.to_s = 'workerA:1:jobs' + workerA.register_worker + workerA.heartbeat!(now - Resque.prune_interval - 1) + assert_equal 1, Resque.workers.size + assert_equal [workerA], Resque::Worker.all_workers_with_expired_heartbeats + + workerB = Resque::Worker.new(:jobs) + workerB.to_s = 'workerB:1:jobs' + workerB.register_worker + workerB.heartbeat!(now) + assert_equal 2, Resque.workers.size + + workerB.prune_dead_workers + assert_equal [], Resque::Worker.all_workers_with_expired_heartbeats + + workerC = Resque::Worker.new(:jobs) + workerC.to_s = "workerC:1:jobs" + workerC.register_worker + workerC.heartbeat!(now - Resque.prune_interval - 1) + assert_equal 2, Resque.workers.size + assert_equal [workerC], Resque::Worker.all_workers_with_expired_heartbeats + + workerD = Resque::Worker.new(:jobs) + workerD.to_s = 'workerD:1:jobs' + workerD.register_worker + workerD.heartbeat!(now) + assert_equal 3, Resque.workers.size + + # workerC does not get pruned because workerB already pruned recently + workerD.prune_dead_workers + assert_equal [workerC], Resque::Worker.all_workers_with_expired_heartbeats + end + it "does not prune workers that haven't set a heartbeat" do workerA = Resque::Worker.new(:jobs) workerA.to_s = "bar:3:jobs"
Prevent thundering herd when pruning dead workers
resque_resque
train
1a9f27c0f55fac83d3c004c17d81e5617734b38b
diff --git a/test/spec/EditorOptionHandlers-test.js b/test/spec/EditorOptionHandlers-test.js index <HASH>..<HASH> 100644 --- a/test/spec/EditorOptionHandlers-test.js +++ b/test/spec/EditorOptionHandlers-test.js @@ -206,7 +206,7 @@ define(function (require, exports, module) { runs(function () { var editor = EditorManager.getCurrentFullEditor(), - oldEditorSize = SpecRunnerUtils.resizeEditor(editor, testWindow.$, 0, 200); + oldEditorSize = SpecRunnerUtils.resizeEditor(editor, testWindow.$, 200); // Use two cursor positions to detect line wrapping. First position at // the beginning of a long line and the second position to be diff --git a/test/spec/QuickOpen-test.js b/test/spec/QuickOpen-test.js index <HASH>..<HASH> 100644 --- a/test/spec/QuickOpen-test.js +++ b/test/spec/QuickOpen-test.js @@ -155,7 +155,7 @@ define(function (require, exports, module) { runs(function () { if (gotoLineQuery) { var editor = EditorManager.getCurrentFullEditor(); - SpecRunnerUtils.resizeEditor(editor, testWindow.$, 200); + SpecRunnerUtils.resizeEditor(editor, testWindow.$, 0, 600); waitsFor(function () { return getSearchField().val() === gotoLineQuery;
- change the tests to set the correct editor size
adobe_brackets
train
2767e9b88d2c28cb1cbfd6fe39cc832696e996cf
diff --git a/lib/mobility/plugins/active_record/query.rb b/lib/mobility/plugins/active_record/query.rb index <HASH>..<HASH> 100644 --- a/lib/mobility/plugins/active_record/query.rb +++ b/lib/mobility/plugins/active_record/query.rb @@ -223,10 +223,13 @@ enabled for any one attribute on the model. keys, predicates = opts.keys.map(&:to_s), [] + used_keys = [] + query_map = mods.inject(IDENTITY) do |qm, mod| - i18n_keys = mod.names & keys + i18n_keys = mod.names & keys - used_keys next qm if i18n_keys.empty? + used_keys += i18n_keys mod_predicates = i18n_keys.map do |key| build_predicate(scope.backend_node(key.to_sym, locale), opts.delete(key)) end diff --git a/spec/mobility/plugins/active_record/query_spec.rb b/spec/mobility/plugins/active_record/query_spec.rb index <HASH>..<HASH> 100644 --- a/spec/mobility/plugins/active_record/query_spec.rb +++ b/spec/mobility/plugins/active_record/query_spec.rb @@ -192,4 +192,29 @@ describe Mobility::Plugins::ActiveRecord::Query, orm: :active_record, type: :plu expect { query.order(:car_id) }.not_to raise_error end end + + describe "regression for #564" do + it "works if translates is called multiple times" do + stub_const 'Article', Class.new(ActiveRecord::Base) + 2.times { translates Article, :title, backend: :table } + + article = Article.create(title: "Title") + + expect(Article.i18n.where(title: "Title")).to eq([article]) + end + + it "handles intersecting attribute declarations" do + stub_const 'Article', Class.new(ActiveRecord::Base) + translates Article, :title, :content, backend: :key_value, type: :string + + # title defined below clobbers title defined above + translates Article, :title, backend: :table + + article1 = Article.create(title: "Title") + article2 = Article.create(title: "Title", content: "Content") + + expect(Article.i18n.where(title: "Title")).to match_array([article1, article2]) + expect(Article.i18n.where(title: "Title", content: "Content")).to eq([article2]) + end + end end
Avoid querying same attribute more than once Fixes #<I>
shioyama_mobility
train
f7f89bc6384bc674b7fb3b0a9fdb7694b3b830be
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -56,8 +56,8 @@ master_doc = 'index' # General information about the project. project = 'Neo4j Python Driver' -copyright = '2002-2020, Neo Technology' -author = 'Neo Technology' +copyright = '2002-2020, Neo4j' +author = 'Neo4j' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -241,7 +241,7 @@ latex_elements = { # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Neo4jBoltDriverforPython.tex', 'Neo4j Bolt Driver for Python Documentation', - 'Neo Technology', 'manual'), + 'Neo4j', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ setup_args = { "description": "Neo4j Bolt driver for Python", "license": "Apache License, Version 2.0", "long_description": readme, - "author": "Neo Technology", + "author": "Neo4j", "author_email": "[email protected]", "keywords": "neo4j graph database", "url": "https://github.com/neo4j/neo4j-python-driver",
changed author Neo Technology to Neo4j (#<I>)
neo4j_neo4j-python-driver
train
f81ae3ab5aedc5afacf82410417d52e29ac27255
diff --git a/gui/test_is_dock.py b/gui/test_is_dock.py index <HASH>..<HASH> 100644 --- a/gui/test_is_dock.py +++ b/gui/test_is_dock.py @@ -78,6 +78,24 @@ def getUiState(ui): 'Run Button Enabled': myRunButton} +def formattedList(theList): + """Return a string representing a list of layers (in correct order) + but formatted with line breaks between each entry.""" + myListString = '' + for myItem in theList: + myListString += myItem + '\n' + return myListString + + +def canvasList(): + """Return a string representing the list of canvas layers (in correct + order) but formatted with line breaks between each entry.""" + myListString = '' + for myLayer in CANVAS.layers(): + myListString += str(myLayer.name()) + '\n' + return myListString + + def combosToString(ui): """Helper to return a string showing the state of all combos (all their entries""" @@ -674,7 +692,6 @@ class ISDockTest(unittest.TestCase): def test_issue45(self): """Points near the edge of a raster hazard layer are interpolated OK""" - myButton = DOCK.pbnRunStop setCanvasCrs(GEOCRS, True) setYogyaGeoExtent() @@ -863,19 +880,36 @@ class ISDockTest(unittest.TestCase): def test_bubbleLayers(self): """Test the bubbleLayers method works """ + self.tearDown() + # First part of scenario should have enabled run + myFileList = ['Flood_Design_Depth_Jakarta_geographic.asc', + 'Flood_Current_Depth_Jakarta_geographic.asc', + 'Population_Jakarta_geographic.asc'] + loadLayers(myFileList) DOCK.bubbleLayersUpFlag = True DOCK.bubbleLayers() - myExpectedList = [] - myExpectedString = '' - for myLayer in myExpectedList: - myExpectedString += myLayer + '\n' - myListString = '' - for myLayer in CANVAS.layers(): - myListString += str(myLayer.name()) + '\n' - myMessage = '\nGot: \n%s\nExpected: %s\n%s' % ( - myListString, myExpectedString, + myExpectedList = ['Penduduk Jakarta', + 'Banjir Jakarta seperti 2007', + 'Banjir Jakarta Mungkin'] + myExpectedString = formattedList(myExpectedList) + myCanvasListString = canvasList() + myMessage = '\nGot Canvas Layer Order: \n%s\nExpected:\n%s\n\n%s' % ( + myCanvasListString, myExpectedString, + combosToString(DOCK)) + assert myExpectedString == myCanvasListString, myMessage + + # Now select a differnt hazard and check the layers have bubbled + QTest.keyClick(DOCK.cboExposure, QtCore.Qt.Key_Up) + QTest.keyClick(DOCK.cboExposure, QtCore.Qt.Key_Enter) + + myExpectedList = ['Penduduk Jakarta', + 'Banjir Jakarta Mungkin', + 'Banjir Jakarta seperti 2007'] + myExpectedString = formattedList(myExpectedList) + myCanvasListString = canvasList() + myMessage = '\nGot Canvas Layer Order: \n%s\nExpected:\n%s\n\n%s' % ( + myCanvasListString, myExpectedString, combosToString(DOCK)) - assert myExpectedString == myListString, myMessage if __name__ == '__main__': suite = unittest.makeSuite(ISDockTest, 'test')
Added test condition that looks to see if layers have bubbled after changing hazard
inasafe_inasafe
train
df09a0eaa0a6451b5ca3f9d03e780d80da4c5bc9
diff --git a/src/scs_core/estate/package_version.py b/src/scs_core/estate/package_version.py index <HASH>..<HASH> 100644 --- a/src/scs_core/estate/package_version.py +++ b/src/scs_core/estate/package_version.py @@ -205,7 +205,7 @@ class PackageVersions(JSONable): @property def versions(self): - return [self.__versions[package] for package in sorted(self.__versions.keys())] + return {package: self.__versions[package] for package in sorted(self.__versions.keys())} # ----------------------------------------------------------------------------------------------------------------
Fixed a bug in PackageVersion
south-coast-science_scs_core
train
3c916133ca59e1a0e3f7abbb5c0d214bd9249987
diff --git a/lib/wlang/compiler.rb b/lib/wlang/compiler.rb index <HASH>..<HASH> 100644 --- a/lib/wlang/compiler.rb +++ b/lib/wlang/compiler.rb @@ -6,7 +6,7 @@ module WLang end def braces - options[:braces] || [ "{", "}" ] + (dialect && dialect.braces) || options[:braces] end def on_template(fn) diff --git a/lib/wlang/dialect.rb b/lib/wlang/dialect.rb index <HASH>..<HASH> 100644 --- a/lib/wlang/dialect.rb +++ b/lib/wlang/dialect.rb @@ -1,8 +1,18 @@ module WLang class Dialect - def dispatch(symbols, *args) - self.class.dispatch(self, symbols, args) + def braces + [ "{", "}" ] + end + + def dispatch(symbols, *fns) + meth = self.class.dispatch_name(symbols) + if respond_to?(meth) + send meth, *fns + else + start, stop = braces + fns.inject("#{symbols}"){|buf,fn| buf << start; fn.call(buf, self); buf << stop} + end end end # class Dialect diff --git a/lib/wlang/dialect/class_methods.rb b/lib/wlang/dialect/class_methods.rb index <HASH>..<HASH> 100644 --- a/lib/wlang/dialect/class_methods.rb +++ b/lib/wlang/dialect/class_methods.rb @@ -31,15 +31,6 @@ module WLang "_dynamic_#{chars}".to_sym end - def dispatch(dialect, symbols, fns) - meth = dispatch_name(symbols) - if dialect.respond_to?(meth) - dialect.send meth, *fns - else - fns.inject(""){|buf,fn| fn.call(buf, dialect)} - end - end - end extend ClassMethods end # class Daialect diff --git a/spec/dialect/test_dispatch.rb b/spec/dialect/test_dispatch.rb index <HASH>..<HASH> 100644 --- a/spec/dialect/test_dispatch.rb +++ b/spec/dialect/test_dispatch.rb @@ -55,8 +55,8 @@ module WLang bar.dispatch("<", nil).should eq('Bar#less') end it 'dispatches correctly on unknown symbols' do - foo.dispatch(">", lambda{|buf,d| d.should eq(foo); "Foo#>"}).should eq('Foo#>') - bar.dispatch(">", lambda{|buf,d| d.should eq(bar); "Bar#>"}).should eq('Bar#>') + foo.dispatch(">", lambda{|buf,d| d.should eq(foo); buf << "foo"}).should eq('>{foo}') + bar.dispatch(">", lambda{|buf,d| d.should eq(bar); buf << "bar"}).should eq('>{bar}') end end diff --git a/spec/test_compiler.rb b/spec/test_compiler.rb index <HASH>..<HASH> 100644 --- a/spec/test_compiler.rb +++ b/spec/test_compiler.rb @@ -45,6 +45,9 @@ module WLang end context "with a dialect" do + def braces + [ '{', '}' ] + end def dispatch_name(symbols) symbols == "!" ? :execution : nil end
Fix expected behavior of Dialect#dispatch on unknown rule
blambeau_wlang
train
2bbdfe4bf465bb1138c6f02dd328722444d763f0
diff --git a/config_resolver.py b/config_resolver.py index <HASH>..<HASH> 100644 --- a/config_resolver.py +++ b/config_resolver.py @@ -272,7 +272,7 @@ class Config(ConfigResolverBase): # default search path path = ['/etc/%s/%s' % (self.group_name, self.app_name), expanduser('~/.%s/%s' % (self.group_name, self.app_name)), - getcwd()] + join(getcwd(), '.{}'.format(self.group_name), self.app_name)] # If a path was passed directly to this instance, override the path. if self.search_path: diff --git a/test.py b/test.py index <HASH>..<HASH> 100644 --- a/test.py +++ b/test.py @@ -79,7 +79,7 @@ class AdvancedInitTest(unittest.TestCase): cfg = Config('hello', 'world') expected = ['/etc/hello/world/test.ini', expanduser('~/.hello/world/test.ini'), - '{}/test.ini'.format(os.getcwd())] + '{}/.hello/world/test.ini'.format(os.getcwd())] self.assertEqual( cfg.active_path, expected) @@ -88,7 +88,8 @@ class AdvancedInitTest(unittest.TestCase): os.environ['HELLO_WORLD_PATH'] = 'testdata:testdata/a:testdata/b' cfg = Config('hello', 'world') expected = ['testdata/app.ini', - 'testdata/a/app.ini', 'testdata/b/app.ini'] + 'testdata/a/app.ini', + 'testdata/b/app.ini'] self.assertEqual( cfg.active_path, expected) @@ -98,7 +99,7 @@ class AdvancedInitTest(unittest.TestCase): cfg = Config('hello', 'world') expected = ['/etc/hello/world/app.ini', expanduser('~/.hello/world/app.ini'), - '{}/app.ini'.format(os.getcwd()), + '{}/.hello/world/app.ini'.format(os.getcwd()), 'testdata/app.ini', 'testdata/a/app.ini', 'testdata/b/app.ini'] self.assertEqual(
./group/app/app.ini changed to ./app.ini
exhuma_config_resolver
train
e4cc869df4adec22e32e1924d4723ed72f6400d8
diff --git a/templates/js/snippetActions.js b/templates/js/snippetActions.js index <HASH>..<HASH> 100644 --- a/templates/js/snippetActions.js +++ b/templates/js/snippetActions.js @@ -1,6 +1,13 @@ var snippetActions = (function ($, snippetService, iframesService, editorService, viewService) { var module = {}; + var appendIframeContent = function ( frameId, template, content ) { + if ( template ) { + $(frameId).contents().find('html').html(template); + } + $(frameId).contents().find('#snippet').html(content); + }; + module.createSnippet = function ( e ) { var form = $(this), fields = form.find('.js-form-submit-field'), @@ -63,10 +70,10 @@ var snippetActions = (function ($, snippetService, iframesService, editorService currentSnippetElement.appendTo('.main'); - snippetContents = $('#' + snippetId).contents(); + snippetContents = $('#' + snippetId); - snippetContents.find('html').html(template); - snippetContents.find('#snippet').html(snippet.code); + appendIframeContent(snippetContents, template, snippet.code); + snippetContents.load($.proxy(appendIframeContent, null, snippetContents, template, snippet.code)); currentSnippetElement.find('.js-edit-snippet').submit(snippetActions.editSnippet); }); @@ -115,8 +122,10 @@ var snippetActions = (function ($, snippetService, iframesService, editorService snippetContainer.find('.js-snippet-description').html(snippet.description); snippetContainer.find('.js-snippet-code-preview').text(snippet.code); - snippetContents = snippetContainer.find('iframe').contents(); - snippetContents.find('#snippet').html(snippet.code); + snippetContents = snippetContainer.find('iframe'); + + appendIframeContent(snippetContents, null, snippet.code); + snippetContents.load($.proxy(appendIframeContent, null, snippetContents, null, snippet.code)); } else { console.log(snippet); } @@ -125,9 +134,10 @@ var snippetActions = (function ($, snippetService, iframesService, editorService module.drawSnippets = function ( frames, snippets ) { var snippetId, - snippetContents, snippetContainer, + snippetIframe, currentSnippetElement, + currentCode, currentId, index, len = frames.length, @@ -144,12 +154,13 @@ var snippetActions = (function ($, snippetService, iframesService, editorService formFields = currentSnippetElement.find('.js-edit-snippet').find('.js-form-submit-field'); currentSnippetElement.attr('id', currentId); snippetId = frames[index].attr('id'); + currentCode = snippets[index].code; currentSnippetElement.find('.js-snippet-name').html(snippets[index].name); currentSnippetElement.find('.js-snippet-description').html(snippets[index].description); - currentSnippetElement.find('.js-edit-code').text(snippets[index].code); + currentSnippetElement.find('.js-edit-code').text(currentCode); currentSnippetElement.find('.js-edit-css').text(snippets[index].inlineCss); - currentSnippetElement.find('.js-snippet-code-preview').text(snippets[index].code); + currentSnippetElement.find('.js-snippet-code-preview').text(currentCode); currentSnippetElement.find('.js-snippet-source').html(frames[index]); currentSnippetElement.addClass(snippetId); @@ -157,7 +168,7 @@ var snippetActions = (function ($, snippetService, iframesService, editorService var idToDelete = $(this).data('id'); snippetService.deleteById(idToDelete, function ( data ) { if ( typeof data === 'object' && data.isDeleted ) { - console.log($('#' + data.id).detach()); + $('#' + data.id).detach(); } console.log(data); }); @@ -174,10 +185,10 @@ var snippetActions = (function ($, snippetService, iframesService, editorService currentSnippetElement.appendTo('.main'); - snippetContents = $('#' + snippetId).contents(); + snippetIframe = $('#' + snippetId); - snippetContents.find('html').html(template); - snippetContents.find('#snippet').html(snippets[index].code); + appendIframeContent(snippetIframe, template, currentCode); + snippetIframe.load($.proxy(appendIframeContent, null, snippetIframe, template, currentCode)); currentSnippetElement.find('.js-edit-snippet').submit(snippetActions.editSnippet); }
Solved iframe issue with FireFox.
devbridge_Styleguide
train
6b1589d94b8946cde0efefecd8e8b5765ee41e74
diff --git a/tests/suites/casper/logging.js b/tests/suites/casper/logging.js index <HASH>..<HASH> 100644 --- a/tests/suites/casper/logging.js +++ b/tests/suites/casper/logging.js @@ -1,41 +1,48 @@ /*eslint strict:0*/ casper.test.begin('logging tests', 4, function(test) { + var oldLevel; casper.start('tests/site/index.html'); - var oldLevel = casper.options.logLevel; + casper.then(casper.createStep(function() { + oldLevel = casper.options.logLevel; - casper.options.logLevel = 'info'; - casper.options.verbose = false; + casper.options.logLevel = 'info'; + casper.options.verbose = false; + }, {skipLog: true})); - casper.log('foo', 'info'); - casper.test.assert(casper.result.log.some(function(e) { - return e.message === 'foo' && e.level === 'info'; - }), 'Casper.log() adds a log entry'); + casper.then(casper.createStep(function() { + casper.log('foo', 'info'); + }, {skipLog: true})); - casper.options.logLevel = oldLevel; - casper.options.verbose = true; + casper.then(casper.createStep(function() { + test.assert(casper.result.log.some(function(e) { + return e.message === 'foo' && e.level === 'info'; + }), 'Casper.log() adds a log entry'); + }, {skipLog: true})); - casper.then(function() { - var oldLevel = casper.options.logLevel; + casper.then(casper.createStep(function() { casper.options.logLevel = 'debug'; casper.options.verbose = false; casper.evaluate(function() { __utils__.log('debug message'); __utils__.log('info message', 'info'); }); + }, {skipLog: true})); + + casper.then(casper.createStep(function() { test.assert(casper.result.log.some(function(e) { return e.message === 'debug message' && e.level === 'debug' && e.space === 'remote'; }), 'ClientUtils.log() adds a log entry'); test.assert(casper.result.log.some(function(e) { return e.message === 'info message' && e.level === 'info' && e.space === 'remote'; }), 'ClientUtils.log() adds a log entry at a given level'); - casper.options.logLevel = oldLevel; - casper.options.verbose = true; - }); + test.assertEquals(this.result.log.length, 3, 'Casper.log() logged messages'); + }, {skipLog: true})); casper.run(function() { - test.assertEquals(this.result.log.length, 3, 'Casper.log() logged messages'); test.done(); + casper.options.logLevel = oldLevel; + casper.options.verbose = true; }); });
for slimerjs: prevent step logs to enter logging
casperjs_casperjs
train
4e32df8ba372cc82b64c618920f5a4d899f3c4e8
diff --git a/clean-scripts.config.js b/clean-scripts.config.js index <HASH>..<HASH> 100644 --- a/clean-scripts.config.js +++ b/clean-scripts.config.js @@ -55,7 +55,8 @@ module.exports = { `cpy ./packages/core/node_modules/monaco-editor/min/vs/editor/editor.main.css ./packages/core/demo/vs/editor/`, `cpy ./packages/core/node_modules/monaco-editor/min/vs/editor/editor.main.nls.js ./packages/core/demo/vs/editor/`, `cpy ./packages/core/node_modules/monaco-editor/min/vs/base/worker/workerMain.js ./packages/core/demo/vs/base/worker/`, - `cpy ./packages/core/node_modules/monaco-editor/min/vs/basic-languages/typescript/typescript.js ./packages/core/demo/vs/basic-languages/typescript/` + `cpy ./packages/core/node_modules/monaco-editor/min/vs/basic-languages/typescript/typescript.js ./packages/core/demo/vs/basic-languages/typescript/`, + `cpy ./packages/core/node_modules/monaco-editor/min/vs/basic-languages/javascript/javascript.js ./packages/core/demo/vs/basic-languages/javascript/` ], version: [ {
fix: add missing js file used by demo
plantain-00_schema-based-json-editor
train
e05ba9032193534e7c0f4729c2a3f79d89964ed9
diff --git a/metric_tank/mdata/cluster.go b/metric_tank/mdata/cluster.go index <HASH>..<HASH> 100644 --- a/metric_tank/mdata/cluster.go +++ b/metric_tank/mdata/cluster.go @@ -27,18 +27,20 @@ var ( //PersistMessage format version const PersistMessageBatchV1 = 1 +// ClusterStatus has Exported fields but don't touch them directly +// it's only for json marshaling. use the accessor methods. type ClusterStatus struct { sync.Mutex - instance string - primary bool - lastChange time.Time + Instance string `json:"instance"` + Primary bool `json:"primary"` + LastChange time.Time `json:"lastChange"` } func NewClusterStatus(instance string, initialState bool) *ClusterStatus { return &ClusterStatus{ - instance: instance, - primary: initialState, - lastChange: time.Now(), + Instance: instance, + Primary: initialState, + LastChange: time.Now(), } } @@ -50,15 +52,15 @@ func (c *ClusterStatus) Marshal() ([]byte, error) { func (c *ClusterStatus) Set(newState bool) { c.Lock() - c.primary = newState - c.lastChange = time.Now() + c.Primary = newState + c.LastChange = time.Now() c.Unlock() } func (c *ClusterStatus) IsPrimary() bool { c.Lock() defer c.Unlock() - return c.primary + return c.Primary } type PersistMessage struct {
fix: make cluster status json work again
grafana_metrictank
train
822df37f8cc4a4f13963ca7fb81e6dd8456b8b83
diff --git a/cypress/integration/rendering/flowchart.spec.js b/cypress/integration/rendering/flowchart.spec.js index <HASH>..<HASH> 100644 --- a/cypress/integration/rendering/flowchart.spec.js +++ b/cypress/integration/rendering/flowchart.spec.js @@ -575,4 +575,27 @@ it('24.2: Handle link click events (link, anchor, mailto, other protocol, script { flowchart: { htmlLabels: false } } ); }); + it('30: Possibility to style text color of nodes and subgraphs as well as apply classes to subgraphs', () => { + imgSnapshotTest( + `graph LR + subgraph id1 [title is set] + A-->B + end + subgraph id2 [title] + E + end + + B-->C + + subgraph id3 + C-->D + end + class id3,id2,A redBg; + class id3,A whiteTxt; + classDef redBg fill:#622; + classDef whiteTxt color: white; + `, + { flowchart: { htmlLabels: false } } + ); + }); }); diff --git a/src/diagrams/flowchart/flowDb.js b/src/diagrams/flowchart/flowDb.js index <HASH>..<HASH> 100644 --- a/src/diagrams/flowchart/flowDb.js +++ b/src/diagrams/flowchart/flowDb.js @@ -151,12 +151,18 @@ export const updateLink = function(positions, style) { export const addClass = function(id, style) { if (typeof classes[id] === 'undefined') { - classes[id] = { id: id, styles: [] }; + classes[id] = { id: id, styles: [], textStyles: [] }; } if (typeof style !== 'undefined') { if (style !== null) { style.forEach(function(s) { + console.log('style', s); + if (s.match('color')) { + const newStyle1 = s.replace('fill', 'bgFill'); + const newStyle2 = newStyle1.replace('color', 'fill'); + classes[id].textStyles.push(newStyle2); + } classes[id].styles.push(s); }); } @@ -196,6 +202,8 @@ export const setClass = function(ids, className) { vertices[id].classes.push(className); } + console.log('Setting class', className, id, subGraphLookup[id]); + if (typeof subGraphLookup[id] !== 'undefined') { subGraphLookup[id].classes.push(className); } @@ -373,7 +381,8 @@ export const defaultStyle = function() { * Clears the internal graph db so that a new graph can be parsed. */ export const addSubGraph = function(_id, list, _title) { - let id = _id; + console.log('Adding subgraph', _id); + let id = _id.trim(); let title = _title; if (_id === _title && _title.match(/\s/)) { id = undefined; @@ -410,6 +419,7 @@ export const addSubGraph = function(_id, list, _title) { const subGraph = { id: id, nodes: nodeList, title: title.trim(), classes: [] }; subGraphs.push(subGraph); subGraphLookup[id] = subGraph; + console.log('Adding subgraph', id, subGraphs, subGraphLookup); return id; }; diff --git a/src/diagrams/flowchart/flowRenderer.js b/src/diagrams/flowchart/flowRenderer.js index <HASH>..<HASH> 100644 --- a/src/diagrams/flowchart/flowRenderer.js +++ b/src/diagrams/flowchart/flowRenderer.js @@ -283,6 +283,9 @@ export const draw = function(text, id) { logger.debug('Parsing failed'); } + console.log('Classes:', flowDb.getClasses()); + console.log('Subgraphs:', flowDb.getSubGraphs()); + // Fetch the default direction, use TD if none was found let dir = flowDb.getDirection(); if (typeof dir === 'undefined') { @@ -430,6 +433,11 @@ export const draw = function(text, id) { const te = cluster.select('.label'); te.attr('transform', `translate(${xPos + width / 2}, ${yPos + 14})`); te.attr('id', id + 'Text'); + + console.log('Fixing subgraph', id, subG.id, subG.classes); // eslitn-disable-line + for (let j = 0; j < subG.classes.length; j++) { + clusterEl[0].classList.add(subG.classes[j]); + } } } diff --git a/src/mermaidAPI.js b/src/mermaidAPI.js index <HASH>..<HASH> 100644 --- a/src/mermaidAPI.js +++ b/src/mermaidAPI.js @@ -533,11 +533,18 @@ const render = function(id, _txt, cb, container) { // classDef if (graphType === 'flowchart') { const classes = flowRenderer.getClasses(txt); + console.log('classes in mermaidApi', classes); for (const className in classes) { style += `\n.${className} > * { ${classes[className].styles.join( ' !important; ' )} !important; }`; + if (classes[className].textStyles) { + style += `\n.${className} tspan { ${classes[className].textStyles.join( + ' !important; ' + )} !important; }`; + } } + console.log(style); } const style1 = document.createElement('style');
#<I> Possibility to style text color of nodes and subgraphs as well as apply classes to subgraphs
knsv_mermaid
train
802f00332526a4a9418b181a6b83ae960f2e4b09
diff --git a/lib/rake-version/manager.rb b/lib/rake-version/manager.rb index <HASH>..<HASH> 100644 --- a/lib/rake-version/manager.rb +++ b/lib/rake-version/manager.rb @@ -1,8 +1,6 @@ module RakeVersion - attr_accessor :root - class Manager def initialize diff --git a/spec/manager_spec.rb b/spec/manager_spec.rb index <HASH>..<HASH> 100644 --- a/spec/manager_spec.rb +++ b/spec/manager_spec.rb @@ -18,6 +18,9 @@ describe RakeVersion::Manager do @version.stub(:to_s){ MANAGER_SAMPLE_VERSION } @version.stub(:bump){ @version } @version.stub(:kind_of?){ |type| type == RakeVersion::Version } + + @copier = double('copier', :copy => nil) + @config = double('config', :copiers => [ @copier ]) end def with_context &block @@ -89,4 +92,26 @@ describe RakeVersion::Manager do lambda{ @manager.with_context invalid }.should raise_error(RakeVersion::BadContext) end end + + describe 'Copying' do + + it "should ask the given config for its copiers" do + @config.should_receive :copiers + with_context{ |m| m.config = @config } + end + + it "should ask given copiers to copy the version to sources when setting the version" do + @manager.config = @config + @copier.should_receive(:copy).with(kind_of(RakeVersion::Version), @context) + with_context{ |m| m.set '1.2.3' } + end + + [ :major, :minor, :patch ].each do |type| + it "should ask given copiers to copy the version to sources when bumping the #{type} version" do + @manager.config = @config + @copier.should_receive(:copy).with(kind_of(RakeVersion::Version), @context) + with_context{ |m| m.bump type } + end + end + end end
Completed manager spec (copying features).
AlphaHydrae_rake-version
train
623124d6dc9086418c2b09355e0e0022eac3012b
diff --git a/src/main/java/org/cp/elements/lang/Builder.java b/src/main/java/org/cp/elements/lang/Builder.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/cp/elements/lang/Builder.java +++ b/src/main/java/org/cp/elements/lang/Builder.java @@ -13,13 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.cp.elements.lang; /** - * {@link Builder} is an interface defining a contract for objects implementing the Builder Software Design Pattern. + * The {@link Builder} interface defines a contract for {@link Object objects} who's {@link Class types} + * implement the {@literal Builder Software Design Pattern}. * * @author John Blum + * @param <T> {@link Class type} of {@link Object} to {@literal build}. * @see java.lang.FunctionalInterface * @see <a href="https://en.wikipedia.org/wiki/Builder_pattern">Builder Software Design Pattern</a> * @since 1.0.0 @@ -29,9 +30,9 @@ package org.cp.elements.lang; public interface Builder<T> { /** - * Builds an object of type {@code T}. + * Builds an {@link Object} of type {@link T}. * - * @return the built object. + * @return the built {@link Object}. */ T build();
Review and refactor the org.cp.elements.lang.Builder interface. Edit Javadoc. Format source code.
codeprimate-software_cp-elements
train
51a650979fe2a3ecb15fbd81141eff831cb39b7b
diff --git a/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketHalfClosedTest.java b/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketHalfClosedTest.java index <HASH>..<HASH> 100644 --- a/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketHalfClosedTest.java +++ b/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketHalfClosedTest.java @@ -34,6 +34,8 @@ import io.netty.channel.socket.ChannelInputShutdownReadComplete; import io.netty.channel.socket.ChannelOutputShutdownEvent; import io.netty.channel.socket.DuplexChannel; import io.netty.util.UncheckedBooleanSupplier; +import io.netty.util.internal.PlatformDependent; +import org.junit.Assume; import org.junit.Test; import java.util.concurrent.CountDownLatch; @@ -229,6 +231,8 @@ public class SocketHalfClosedTest extends AbstractSocketTest { @Test public void testAutoCloseFalseDoesShutdownOutput() throws Throwable { + // This test only works on Linux / BSD / MacOS as we assume some semantics that are not true for Windows. + Assume.assumeFalse(PlatformDependent.isWindows()); run(); }
Skip test on windows as the semantics we expect are only true on Linux / Unix / BSD / MacOS (#<I>) Motivation: In the test we assume some semantics on how RST is done that are not true for Windows so we should skip it. Modifications: Skip test when on windows. Result: Be able to run testsuite on windows. Fixes <URL>
netty_netty
train
6fee2dc0f08ee772dde9b486e9413e08620b7131
diff --git a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py index <HASH>..<HASH> 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py +++ b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py @@ -73,9 +73,7 @@ class RequestRetryLocalExternalStepLauncher(LocalExternalStepLauncher): if step_context.previous_attempt_count == 0: raise RetryRequested() else: - return super(RequestRetryLocalExternalStepLauncher, self).launch_step( - step_context, step_context.previous_attempt_count - ) + return super(RequestRetryLocalExternalStepLauncher, self).launch_step(step_context) @resource(config_schema=local_external_step_launcher.config_schema)
step launcher fix up (#<I>)
dagster-io_dagster
train
777e4eed563e346fc687c9d383df3aed082fc39e
diff --git a/Concurrent_AP.py b/Concurrent_AP.py index <HASH>..<HASH> 100644 --- a/Concurrent_AP.py +++ b/Concurrent_AP.py @@ -179,7 +179,7 @@ def parse_options(): default = False, action = 'store_true', help = ("Specifies if a matrix of similarities " "has already been computed; only makes sense " - "with -h [default %default]")) + "with -f or --file in effect [default %default]")) parser.add_option('-i', '--iterations', dest = 'max_iter', default = 200, type = 'int', help = ("The maximum number of message passing "
Update Concurrent_AP.py
GGiecold_Concurrent_AP
train
a62115ec4ccfb0f1116eed3f428e688fca58663e
diff --git a/src/system/modules/metamodelsattribute_select/MetaModelAttributeSelect.php b/src/system/modules/metamodelsattribute_select/MetaModelAttributeSelect.php index <HASH>..<HASH> 100644 --- a/src/system/modules/metamodelsattribute_select/MetaModelAttributeSelect.php +++ b/src/system/modules/metamodelsattribute_select/MetaModelAttributeSelect.php @@ -52,7 +52,7 @@ class MetaModelAttributeSelect extends MetaModelAttributeHybrid // TODO: add tree support here. $arrFieldDef=parent::getFieldDefinition(); $arrFieldDef['inputType'] = 'select'; - $arrFieldDef['options'] = $this->getOptions(); + $arrFieldDef['options'] = $this->getFilterOptions(); return $arrFieldDef; } @@ -76,6 +76,51 @@ class MetaModelAttributeSelect extends MetaModelAttributeHybrid return $objFilterRule; } + /** + * {@inheritdoc} + * + * Fetch filter options from foreign table. + * + */ + public function getFilterOptions($arrIds = array()) + { + $strTableName = $this->get('select_table'); + $strColNameId = $this->get('select_id'); + $arrReturn = array(); + + if ($strTableName && $strColNameId) + { + $strColNameValue = $this->get('select_column'); + $strColNameAlias = $this->get('select_alias'); + if (!$strColNameAlias) + { + $strColNameAlias = $strColNameId; + } + $objDB = Database::getInstance(); + if ($arrIds) + { + $objValue = $objDB->prepare(sprintf(' + SELECT %1$s.* + FROM %1$s + WHERE %1$s.%2$s IN (%3$s) GROUP BY %1$s.%2$s', + $strTableName, // 1 + $strColNameId, // 2 + implode(',', $arrIds) // 3 + )) + ->execute($this->get('id')); + } else { + $objValue = $objDB->prepare(sprintf('SELECT %1$s.* FROM %1$s', $strTableName)) + ->execute(); + } + + while ($objValue->next()) + { + $arrReturn[$objValue->$strColNameAlias] = $objValue->$strColNameValue; + } + } + return $arrReturn; + } + ///////////////////////////////////////////////////////////////// // interface IMetaModelAttributeSimple /////////////////////////////////////////////////////////////////
added base implementation of getFilterOptions().
MetaModels_attribute_select
train
f1faf7b0e241e7d2779f51f82b7adbe7bf4250f2
diff --git a/src/main/java/org/openqa/selenium/htmlunit/HtmlUnitDriver.java b/src/main/java/org/openqa/selenium/htmlunit/HtmlUnitDriver.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/openqa/selenium/htmlunit/HtmlUnitDriver.java +++ b/src/main/java/org/openqa/selenium/htmlunit/HtmlUnitDriver.java @@ -40,6 +40,7 @@ import java.util.Set; import java.util.WeakHashMap; import java.util.concurrent.Callable; import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; @@ -843,6 +844,9 @@ public class HtmlUnitDriver implements WebDriver, JavascriptExecutor, webClient.close(); webClient = null; } + if (executor instanceof ExecutorService) { + ((ExecutorService)executor).shutdown(); + } currentWindow = null; }
fix HtmlUnit Issue #<I>
SeleniumHQ_htmlunit-driver
train
b6b373f67d97036162bfed46497ac4d97d61b5c9
diff --git a/lib/cequel/metal/new_relic_instrumentation.rb b/lib/cequel/metal/new_relic_instrumentation.rb index <HASH>..<HASH> 100644 --- a/lib/cequel/metal/new_relic_instrumentation.rb +++ b/lib/cequel/metal/new_relic_instrumentation.rb @@ -1,7 +1,7 @@ # -*- encoding : utf-8 -*- begin require 'new_relic/agent/datastores' -rescue LoadError => e +rescue LoadError fail LoadError, "Can't use NewRelic instrumentation without NewRelic gem" end @@ -13,7 +13,7 @@ module Cequel module NewRelicInstrumentation extend ActiveSupport::Concern - define_method :execute_with_consistency_with_newrelic do |statement, bind_vars, consistency| + define_method :execute_with_options_with_newrelic do |statement, bind_vars, options| callback = Proc.new do |result, scoped_metric, elapsed| NewRelic::Agent::Datastores.notice_statement(statement, elapsed) end @@ -33,12 +33,13 @@ module Cequel end NewRelic::Agent::Datastores.wrap("Cassandra", operation, table, callback) do - execute_with_consistency_without_newrelic(statement, bind_vars, consistency) + execute_with_options_without_newrelic(statement, bind_vars, options) end end + included do - alias_method_chain :execute_with_consistency, :newrelic + alias_method_chain :execute_with_options, :newrelic end end end
switch instrumentation from execute_with_consistency to execute_with_options
cequel_cequel
train
32764a1e7fb5cef15f6d4c4a62007c40f496bc93
diff --git a/build/wr.js b/build/wr.js index <HASH>..<HASH> 100644 --- a/build/wr.js +++ b/build/wr.js @@ -1 +1 @@ -!function(e){function t(){var e="div",t="post-body",a="parentNode",l="querySelectorAll",c="getElementsByClassName",d="getElementsByTagName",v,m,g,y;if(l in s)m=s[l]([e,t].join("."));else if(c in s)m=s[c](t);else for(v=s[d]("div"),m=[],g=0,y=v.length;g<y;g++)~v[g].className.indexOf(t)&&m.push(v[g]);for(g=0,y=m.length;g<y;g++)p.test((n(m[g][a])||{}).innerHTML)&&o<new r(+i.$3,h[i.$1],+i.$2)?m[g].innerHTML=tinydown(u.call(m[g].textContent||m[g].innerText)).replace(f,'<pre class="code">$1</pre>'):m[g].style.whiteSpace="normal"}function n(e){while(e&&e.nodeName!=="H2")e=e.previousSibling;return e}var r=e.Date,i=e.RegExp,s=e.document,o=(new r(2013,3,11)).getTime(),u="".trim||function(){return this.replace(a)},a=/^\s+|\s+$/g,f=/<pre><code(?: class="[^"]+?")>([^\x00]+?)<\/code><\/pre>/g,l=/<br\/>/g,c=function(e,t){return'<pre class="code">'+t.replace(l,"\n")+"</pre>"},h={January:0,February:1,March:2,April:3,May:4,June:5,July:6,August:7,September:8,October:9,November:10,December:11},p="",d=!1,v=e.addEventListener||e.attachEvent,m=function(){d||(d=!0,t())},g;for(g in h)p+="|"+g;p=new i("("+p.slice(1)+")\\s+(\\d+),\\s+(\\d+)"),v("DOMContentLoaded",m),v("onload",m),v("load",m)}(this); \ No newline at end of file +!function(e){function t(){var t="div",a="post-body",l="parentNode",c="querySelectorAll",d="getElementsByClassName",v="getElementsByTagName",m,g,y,b;if(c in s)g=s[c]([t,a].join("."));else if(d in s)g=s[d](a);else for(m=s[v]("div"),g=[],y=0,b=m.length;y<b;y++)~m[y].className.indexOf(a)&&g.push(m[y]);for(y=0,b=g.length;y<b;y++)p.test((n(g[y][l])||{}).innerHTML)&&o<new r(+i.$3,h[i.$1],+i.$2)?g[y].innerHTML=tinydown(u.call(g[y].textContent||g[y].innerText)).replace(f,'<pre class="code">$1</pre>').replace(e.attachEvent?/<\/code><\/pre>\s*<pre><code>/g:/^\x00/,""):g[y].style.whiteSpace="normal"}function n(e){while(e&&e.nodeName!=="H2")e=e.previousSibling;return e}var r=e.Date,i=e.RegExp,s=e.document,o=(new r(2013,3,11)).getTime(),u="".trim||function(){return this.replace(a)},a=/^\s+|\s+$/g,f=/<pre><code(?: class="[^"]+?")>([^\x00]+?)<\/code><\/pre>/g,l=/<br\/>/g,c=function(e,t){return'<pre class="code">'+u.call(t.replace(l,"\n"))+"</pre>"},h={January:0,February:1,March:2,April:3,May:4,June:5,July:6,August:7,September:8,October:9,November:10,December:11},p="",d=!1,v=e.addEventListener||e.attachEvent,m=function(){d||(d=!0,t())},g;for(g in h)p+="|"+g;p=new i("("+p.slice(1)+")\\s+(\\d+),\\s+(\\d+)"),v("DOMContentLoaded",m),v("onload",m),v("load",m)}(this); \ No newline at end of file diff --git a/src/webreflection.js b/src/webreflection.js index <HASH>..<HASH> 100644 --- a/src/webreflection.js +++ b/src/webreflection.js @@ -35,6 +35,9 @@ ) { list[i].innerHTML = tinydown(trim.call(list[i].textContent || list[i].innerText)).replace( pre, '<pre class="code">$1</pre>' + ).replace( + window.attachEvent ? + /<\/code><\/pre>\s*<pre><code>/g : /^\x00/, '' ); } else { list[i].style.whiteSpace = 'normal'; @@ -58,7 +61,7 @@ pre = /<pre><code(?: class="[^"]+?")>([^\x00]+?)<\/code><\/pre>/g, br = /<br\/>/g, preplace = function (m, $1) { - return '<pre class="code">' + $1.replace(br, '\n') + '</pre>'; + return '<pre class="code">' + trim.call($1.replace(br, '\n')) + '</pre>'; }, months = { 'January':0,
trying to understand IE8 ...
WebReflection_tinydown
train
d68d41ec0a7fecdff51fc52cd6965b122ace536e
diff --git a/sources/scalac/atree/ACode.java b/sources/scalac/atree/ACode.java index <HASH>..<HASH> 100644 --- a/sources/scalac/atree/ACode.java +++ b/sources/scalac/atree/ACode.java @@ -15,6 +15,11 @@ import scalac.symtab.Type; public class ACode { //######################################################################## + // Public Constants + + public static final ACode[] EMPTY_ARRAY = new ACode[0]; + + //######################################################################## // Public Cases // jvm : -
- Added constant EMPTY_ARRAY
scala_scala
train
f5997aeae5c757886ddc1076cd634c78210a485f
diff --git a/lib/quickbooks/service/reports.rb b/lib/quickbooks/service/reports.rb index <HASH>..<HASH> 100644 --- a/lib/quickbooks/service/reports.rb +++ b/lib/quickbooks/service/reports.rb @@ -6,6 +6,7 @@ module Quickbooks if(options == {}) return "#{url_for_base}/reports/#{which_report}?date_macro=#{URI.encode_www_form_component(date_macro)}" else + options_string = "" options.each do |key, value| options_string += "#{key}=#{value}&" end @@ -16,7 +17,7 @@ module Quickbooks end end - def fetch_collection(model, date_macro, object_query, options) + def fetch_collection(model, date_macro, object_query, options = {}) response = do_http_get(url_for_query(object_query, date_macro, options)) parse_collection(response, model)
default for 'options' in fetch_collection, and set reports_string to "" initially
ruckus_quickbooks-ruby
train
011cf2be5076dc4a335e0733401273bf141e121e
diff --git a/src/TemplateData.js b/src/TemplateData.js index <HASH>..<HASH> 100644 --- a/src/TemplateData.js +++ b/src/TemplateData.js @@ -206,9 +206,8 @@ TemplateData.prototype.getLocalDataPaths = function(templatePath) { if (!inputDir) { paths.push(dirPath); } else { - if ( - (dir + (inputDirHasTrailingSlash ? "/" : "")).indexOf(inputDir) === 0 - ) { + let dirStr = dir + (inputDirHasTrailingSlash ? "/" : ""); + if (dirStr.indexOf(inputDir) === 0 && dirStr !== inputDir) { paths.push(dirPath); } } diff --git a/test/TemplateDataTest.js b/test/TemplateDataTest.js index <HASH>..<HASH> 100644 --- a/test/TemplateDataTest.js +++ b/test/TemplateDataTest.js @@ -147,10 +147,7 @@ test("getLocalDataPaths", async t => { "./test/stubs/component/component.liquid" ); - t.deepEqual(paths, [ - "./test/stubs/component/component.json", - "./test/stubs/stubs.json" - ]); + t.deepEqual(paths, ["./test/stubs/component/component.json"]); }); test("getLocalDataPaths with inputDir passed in (trailing slash)", async t => { @@ -159,10 +156,7 @@ test("getLocalDataPaths with inputDir passed in (trailing slash)", async t => { "./test/stubs/component/component.liquid" ); - t.deepEqual(paths, [ - "./test/stubs/component/component.json", - "./test/stubs/stubs.json" - ]); + t.deepEqual(paths, ["./test/stubs/component/component.json"]); }); test("getLocalDataPaths with inputDir passed in (no trailing slash)", async t => { @@ -171,8 +165,5 @@ test("getLocalDataPaths with inputDir passed in (no trailing slash)", async t => "./test/stubs/component/component.liquid" ); - t.deepEqual(paths, [ - "./test/stubs/component/component.json", - "./test/stubs/stubs.json" - ]); + t.deepEqual(paths, ["./test/stubs/component/component.json"]); }); diff --git a/test/TemplateTest.js b/test/TemplateTest.js index <HASH>..<HASH> 100644 --- a/test/TemplateTest.js +++ b/test/TemplateTest.js @@ -395,8 +395,7 @@ test("Local template data file import (without a global data json)", async t => let data = await tmpl.getData(); t.deepEqual(dataObj.getLocalDataPaths(tmpl.getInputPath()), [ - "./test/stubs/component/component.json", - "./test/stubs/stubs.json" + "./test/stubs/component/component.json" ]); t.is(data.localdatakey1, "localdatavalue1"); t.is(await tmpl.render(), "localdatavalue1"); @@ -416,8 +415,7 @@ test("Local template data file import (two subdirectories deep)", async t => { t.deepEqual(dataObj.getLocalDataPaths(tmpl.getInputPath()), [ "./test/stubs/firstdir/seconddir/component.json", "./test/stubs/firstdir/seconddir/seconddir.json", - "./test/stubs/firstdir/firstdir.json", - "./test/stubs/stubs.json" + "./test/stubs/firstdir/firstdir.json" ]); }); @@ -435,8 +433,7 @@ test("Posts inherits local JSON, layouts", async t => { let localDataPaths = dataObj.getLocalDataPaths(tmpl.getInputPath()); t.deepEqual(localDataPaths, [ "./test/stubs/posts/post1.json", - "./test/stubs/posts/posts.json", - "./test/stubs/stubs.json" + "./test/stubs/posts/posts.json" ]); let localData = await dataObj.getLocalData(tmpl.getInputPath()); @@ -465,10 +462,7 @@ test("Template and folder name are the same, make sure data imports work ok", as ); let localDataPaths = dataObj.getLocalDataPaths(tmpl.getInputPath()); - t.deepEqual(localDataPaths, [ - "./test/stubs/posts/posts.json", - "./test/stubs/stubs.json" - ]); + t.deepEqual(localDataPaths, ["./test/stubs/posts/posts.json"]); let localData = await dataObj.getLocalData(tmpl.getInputPath()); t.is(localData.layout, "mylocallayout.njk");
Don’t include a top level root directory json file. #<I>
11ty_eleventy
train
6b0b5d801634b227afad685db779348b828b6f6d
diff --git a/django_core/forms/mixins/users.py b/django_core/forms/mixins/users.py index <HASH>..<HASH> 100644 --- a/django_core/forms/mixins/users.py +++ b/django_core/forms/mixins/users.py @@ -1,3 +1,7 @@ +from __future__ import unicode_literals + +from django import forms + class UserFormMixin(object): """Form mixin that puts the user on the form object.""" @@ -10,3 +14,28 @@ class UserFormMixin(object): self.user = user super(UserFormMixin, self).__init__(*args, **kwargs) + + +class UserAuthorizationRequiredForm(UserFormMixin, forms.Form): + """Form for requiring a user to enter their password to successfully pass + form validation. This is useful for flows like: + + * change_email + * change_password + """ + error_messages = { + 'password_incorrect': _("Your old password was entered incorrectly. " + "Please enter it again."), + } + user_password = forms.CharField(max_length=50, widget=forms.PasswordInput) + + def clean_user_password(self): + password = self.cleaned_data['user_password'] + + if not self.user.check_password(password): + raise forms.ValidationError( + self.error_messages['password_incorrect'], + code='password_incorrect', + ) + + return password
added user authorization required form mixin.
InfoAgeTech_django-core
train
eeefd94da98371b9ae17cb2f0b755bb901fb4953
diff --git a/lib/ruby_odata/service.rb b/lib/ruby_odata/service.rb index <HASH>..<HASH> 100644 --- a/lib/ruby_odata/service.rb +++ b/lib/ruby_odata/service.rb @@ -705,12 +705,12 @@ class Service end def parse_value(content, property_type = nil, property_null = nil) - # Handle a nil property type, this is a string - return content if property_type.nil? - # Handle anything marked as null return nil if !property_null.nil? && property_null == "true" + # Handle a nil property type, this is a string + return content if property_type.nil? + # Handle integers return content.to_i if property_type.match(/^Edm.Int/) diff --git a/spec/service_spec.rb b/spec/service_spec.rb index <HASH>..<HASH> 100644 --- a/spec/service_spec.rb +++ b/spec/service_spec.rb @@ -776,6 +776,14 @@ module OData to_return(:status => 200, :body => File.new(File.expand_path("../fixtures/ms_system_center/vm_templates.xml", __FILE__)), :headers => {}) end + + it "should successfully parse null valued string properties" do + svc = OData::Service.new "http://test.com/test.svc/", { :username => "blabla", :password=> "", :verify_ssl => false, :namespace => "VMM" } + svc.VirtualMachines + results = svc.execute + results.first.should be_a_kind_of(VMM::VirtualMachine) + results.first.CostCenter.should be_nil + end it "should successfully return a virtual machine" do svc = OData::Service.new "http://test.com/test.svc/", { :username => "blabla", :password=> "", :verify_ssl => false, :namespace => "VMM" }
Fix parsing of null strings. These got parsed as empty strings
visoft_ruby_odata
train
2a35543b4a8348cd115f4b2f138731e5f3c4a080
diff --git a/lib/active_merchant/billing/gateway.rb b/lib/active_merchant/billing/gateway.rb index <HASH>..<HASH> 100644 --- a/lib/active_merchant/billing/gateway.rb +++ b/lib/active_merchant/billing/gateway.rb @@ -58,6 +58,7 @@ module ActiveMerchant #:nodoc: DEBIT_CARDS = [ :switch, :solo ] CURRENCIES_WITHOUT_FRACTIONS = [ 'BIF', 'BYR', 'CLP', 'CVE', 'DJF', 'GNF', 'HUF', 'ISK', 'JPY', 'KMF', 'KRW', 'PYG', 'RWF', 'TWD', 'UGX', 'VND', 'VUV', 'XAF', 'XOF', 'XPF' ] + CREDIT_DEPRECATION_MESSAGE = "Support for using credit to refund existing transactions is deprecated and will be removed from a future release of ActiveMerchant. Please use the refund method instead." RECURRING_DEPRECATION_MESSAGE = "Recurring functionality in ActiveMerchant is deprecated and will be removed in a future version. Please contact the ActiveMerchant maintainers if you have an interest in taking ownership of a separate gem that continues support for it." @@ -112,6 +113,10 @@ module ActiveMerchant #:nodoc: result.to_s.downcase end + def self.non_fractional_currency?(currency) + CURRENCIES_WITHOUT_FRACTIONS.include?(currency.to_s) + end + def self.supported_countries=(country_codes) country_codes.each do |country_code| unless ActiveMerchant::Country.find(country_code) @@ -197,7 +202,7 @@ module ActiveMerchant #:nodoc: def localized_amount(money, currency) amount = amount(money) - return amount unless non_fractional_currency?(currency) + return amount unless Gateway.non_fractional_currency?(currency) if self.money_format == :cents sprintf("%.0f", amount.to_f / 100) @@ -206,9 +211,6 @@ module ActiveMerchant #:nodoc: end end - def non_fractional_currency?(currency) - CURRENCIES_WITHOUT_FRACTIONS.include?(currency.to_s) - end def currency(money) money.respond_to?(:currency) ? money.currency : self.default_currency diff --git a/lib/active_merchant/billing/gateways/paypal/paypal_common_api.rb b/lib/active_merchant/billing/gateways/paypal/paypal_common_api.rb index <HASH>..<HASH> 100644 --- a/lib/active_merchant/billing/gateways/paypal/paypal_common_api.rb +++ b/lib/active_merchant/billing/gateways/paypal/paypal_common_api.rb @@ -659,7 +659,7 @@ module ActiveMerchant #:nodoc: end def item_amount(amount, currency_code) - if amount.to_i < 0 && non_fractional_currency?(currency_code) + if amount.to_i < 0 && Gateway.non_fractional_currency?(currency_code) amount(amount).to_f.floor else localized_amount(amount, currency_code)
Make a non_fractional_currency? accessor public
activemerchant_active_merchant
train
cdaf6e8b042da9ac8756b017460cfe6af61c0eaa
diff --git a/src/fi/tkk/ics/hadoop/bam/cli/Utils.java b/src/fi/tkk/ics/hadoop/bam/cli/Utils.java index <HASH>..<HASH> 100644 --- a/src/fi/tkk/ics/hadoop/bam/cli/Utils.java +++ b/src/fi/tkk/ics/hadoop/bam/cli/Utils.java @@ -95,8 +95,8 @@ public final class Utils { out.print('\n'); } - private static String argv0 = null; - private static Class argv0Class = null; + private static String argv0 = null; + private static Class<?> argv0Class = null; // For printing something intelligent in "Usage: argv0 <args>" messages. public static String getArgv0() { @@ -122,7 +122,7 @@ public final class Utils { return argv0; } - public static void setArgv0Class(Class cl) { + public static void setArgv0Class(Class<?> cl) { argv0Class = cl; argv0 = null; }
Silence [rawtypes] warnings in non-custom code
HadoopGenomics_Hadoop-BAM
train
3f71497d53d6149158a3199a02ac23f337c35367
diff --git a/lib/codesake/dawn/kb/basic_check.rb b/lib/codesake/dawn/kb/basic_check.rb index <HASH>..<HASH> 100644 --- a/lib/codesake/dawn/kb/basic_check.rb +++ b/lib/codesake/dawn/kb/basic_check.rb @@ -196,6 +196,9 @@ module Codesake def rubysec_advisories_link "http://www.rubysec.com/advisories/#{@name}/" end + def osvdb_link + "http://osvdb.org/show/osvdb/#{@osvdb}" + end def cvss_score return Cvss::Engine.new.score(self.cvss) unless self.cvss.nil?
Added an helper to build link to OSVDB web site
thesp0nge_dawnscanner
train
bba23795c960385f6ea0412c2e229ca5f5e17bf1
diff --git a/src/cobra/test/test_util/test_solver.py b/src/cobra/test/test_util/test_solver.py index <HASH>..<HASH> 100644 --- a/src/cobra/test/test_util/test_solver.py +++ b/src/cobra/test/test_util/test_solver.py @@ -18,25 +18,25 @@ optlang_solvers = [f"optlang-{s}" for s in stable_optlang if s in su.solvers] def test_solver_list() -> None: - """Test if solvers are found.""" + """Expect that at least the GLPK solver is found.""" assert len(su.solvers) >= 1 assert "glpk" in su.solvers def test_interface_str() -> None: - """Test string representation of solver interfaces.""" + """Test the string representation of solver interfaces.""" assert su.interface_to_str("nonsense") == "nonsense" assert su.interface_to_str("optlang.glpk_interface") == "glpk" assert su.interface_to_str("optlang-cplex") == "cplex" def test_solver_name() -> None: - """Test default solver name.""" + """Test that the default LP solver name is GLPK.""" assert su.get_solver_name() == "glpk" def test_choose_solver(model: "Model") -> Optional[su.SolverNotFound]: - """Test if solver switching is working.""" + """Test that solver switching is working.""" so = su.choose_solver(model, "glpk") assert su.interface_to_str(so) == "glpk" @@ -49,7 +49,7 @@ def test_choose_solver(model: "Model") -> Optional[su.SolverNotFound]: def test_linear_reaction_coefficients(model: "Model") -> None: - """Test if linear coefficients are identifiable in objective.""" + """Test that linear coefficients are identifiable in objective.""" coefficients = su.linear_reaction_coefficients(model) assert coefficients == {model.reactions.Biomass_Ecoli_core: 1} diff --git a/src/cobra/util/solver.py b/src/cobra/util/solver.py index <HASH>..<HASH> 100644 --- a/src/cobra/util/solver.py +++ b/src/cobra/util/solver.py @@ -124,7 +124,7 @@ def set_objective( Parameters ---------- - model : cobra model + model : cobra.Model The model to set the objective for. value : optlang.interface.Objective, optlang.symbolics.Basic, dict If the model objective is linear, then the value can be a new
refactor: update docstrings
opencobra_cobrapy
train
a14da799cc232a9477315f7284e826b80eebe5e1
diff --git a/src/js/cms/TabsScroller.js b/src/js/cms/TabsScroller.js index <HASH>..<HASH> 100644 --- a/src/js/cms/TabsScroller.js +++ b/src/js/cms/TabsScroller.js @@ -33,7 +33,7 @@ define(['jquery', 'amplify'], function($, amplify) { } else { offset = offset * -1; - that.$scroller.animate({scrollLeft: position+offset}, animationTime); + that.$scroller.animate({scrollLeft: Math.max(0, position+offset)}, animationTime); } }); @@ -44,7 +44,20 @@ define(['jquery', 'amplify'], function($, amplify) { var $tab = that.$tabsContainer.find('[role="tabs-nav"] > li.active'); if ($tab.length) { - that.$scroller.animate({scrollLeft: $tab.position().left-that.$scroller.innerWidth()/2}, animationTime); + var tabsWidth = 0; + that.$scroller.find('ul > li').each(function() { + tabsWidth += $(this).outerWidth(); + }); + + // the viewport width + var scrollerWidth = that.$scroller.innerWidth(); + + var position = Math.min( + $tab.position().left-scrollerWidth/2, // scroll to the tab beeing in the middle + tabsWidth-scrollerWidth // scroll to the end + ); + + that.$scroller.animate({scrollLeft: position}, animationTime); } }; };
improve scrolling for active tab beeing the last
webforge-labs_cms
train
f9ecdff04493383ede86550624d30d5a21fcb04a
diff --git a/public/js/editors/panel.js b/public/js/editors/panel.js index <HASH>..<HASH> 100644 --- a/public/js/editors/panel.js +++ b/public/js/editors/panel.js @@ -1,7 +1,8 @@ /*globals $, CodeMirror, jsbin, jshintEnabled, RSVP */ var $document = $(document), - $source = $('#source'); + $source = $('#source'), + userResizeable = !$('html').hasClass('layout'); var editorModes = { html: 'htmlmixed', @@ -266,7 +267,7 @@ Panel.prototype = { // update the splitter - but do it on the next tick // required to allow the splitter to see it's visible first setTimeout(function () { - if (panel.splitter.length) { + if (userResizeable) { if (x !== undefined) { panel.splitter.trigger('init', x); } else { @@ -486,8 +487,8 @@ Panel.prototype = { $source[0].style.paddingLeft = '1px'; setTimeout(function () { $source[0].style.paddingLeft = '0'; - }, 0) - }, 0) + }, 0); + }, 0); } });
Fixed switching back to resizable The issue was that there's no splitter on the HTML panel, so it was skipping some key code.
jsbin_jsbin
train
7227386b8b11aa025e00d62d6b124cbc16d6519d
diff --git a/core/commands/dns.go b/core/commands/dns.go index <HASH>..<HASH> 100644 --- a/core/commands/dns.go +++ b/core/commands/dns.go @@ -26,7 +26,7 @@ This command resolves those links to the referenced object. For example, with this DNS TXT record: - ipfs.io. TXT "dnslink=/ipfs/QmRzTuh2Lpuz7Gr39stNr6mTFdqAghsZec1JoUnfySUzcy ..." + ipfs.io TXT "dnslink=/ipfs/QmRzTuh2Lpuz7Gr39stNr6mTFdqAghsZec1JoUnfySUzcy ..." The resolver will give: @@ -35,7 +35,7 @@ The resolver will give: And with this DNS TXT record: - ipfs.ipfs.io. TXT "dnslink=/dns/ipfs.io ..." + ipfs.ipfs.io TXT "dnslink=/dns/ipfs.io ..." The resolver will give:
Remove period from dns text examples It is not useful. See <URL>
ipfs_go-ipfs
train
310eec062a00c2d9be5f813ab34a2aa0dde0eff0
diff --git a/openquake/output/risk.py b/openquake/output/risk.py index <HASH>..<HASH> 100644 --- a/openquake/output/risk.py +++ b/openquake/output/risk.py @@ -257,8 +257,6 @@ class BCRMapXMLWriter(BaseMapXMLWriter): The following metadata is added to the map container: Interest rate, Asset life expectancy. """ - # TODO: unittest - METADATA = BaseMapXMLWriter.METADATA + [ 'interestRate', 'assetLifeExpectancy' ] diff --git a/openquake/risk/job/general.py b/openquake/risk/job/general.py index <HASH>..<HASH> 100644 --- a/openquake/risk/job/general.py +++ b/openquake/risk/job/general.py @@ -90,7 +90,6 @@ def write_output_bcr(mixin): """ Write BCR map in NRML format. """ - # TODO: unittest path = os.path.join(mixin.base_path, mixin.params['OUTPUT_DIR'], "bcr-map.xml")
risk: removed "TODO: unittest" comments from the code that is tested Former-commit-id: 6fa<I>f1d5a<I>a<I>e<I>cc<I>c<I>af4dea
gem_oq-engine
train