content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Ruby
Ruby
fix timezone test
34929fed2254570941309a731a386de573e64b5a
<ide><path>activesupport/test/time_zone_test.rb <ide> def test_utc_offset_lazy_loaded_from_tzinfo_when_not_passed_in_to_initialize <ide> end <ide> <ide> def test_seconds_to_utc_offset_with_colon <del> assert_equal "-06:00", TimeZone.seconds_to_utc_offset(-21_600) <del> assert_equal "+00:00", TimeZone.seconds_to_utc_offset(0) <del> assert_equal "+05:00", TimeZone.seconds_to_utc_offset(18_000) <add> assert_equal "-06:00", ActiveSupport::TimeZone.seconds_to_utc_offset(-21_600) <add> assert_equal "+00:00", ActiveSupport::TimeZone.seconds_to_utc_offset(0) <add> assert_equal "+05:00", ActiveSupport::TimeZone.seconds_to_utc_offset(18_000) <ide> end <ide> <ide> def test_seconds_to_utc_offset_without_colon <del> assert_equal "-0600", TimeZone.seconds_to_utc_offset(-21_600) <del> assert_equal "+0000", TimeZone.seconds_to_utc_offset(0) <del> assert_equal "+0500", TimeZone.seconds_to_utc_offset(18_000) <add> assert_equal "-0600", ActiveSupport::TimeZone.seconds_to_utc_offset(-21_600, false) <add> assert_equal "+0000", ActiveSupport::TimeZone.seconds_to_utc_offset(0, false) <add> assert_equal "+0500", ActiveSupport::TimeZone.seconds_to_utc_offset(18_000, false) <ide> end <ide> <ide> def test_formatted_offset_positive
1
Ruby
Ruby
assign developer_dir before it is needed
ca14962ab9a16ccd50a50fbbefe4c1486662bf9c
<ide><path>Library/Homebrew/superenv.rb <ide> def setup_build_environment <ide> check <ide> ENV['CC'] = 'cc' <ide> ENV['CXX'] = 'c++' <add> ENV['DEVELOPER_DIR'] = determine_developer_dir # effects later settings <ide> ENV['MAKEFLAGS'] ||= "-j#{determine_make_jobs}" <ide> ENV['PATH'] = determine_path <ide> ENV['PKG_CONFIG_PATH'] = determine_pkg_config_path <ide> def setup_build_environment <ide> ENV['CMAKE_LIBRARY_PATH'] = determine_cmake_library_path <ide> ENV['ACLOCAL_PATH'] = determine_aclocal_path <ide> ENV['VERBOSE'] = '1' if ARGV.verbose? <del> ENV['DEVELOPER_DIR'] = determine_developer_dir <ide> end <ide> <ide> def check
1
Javascript
Javascript
update safari preload test
6f4d46aa2addae90c2ba8c72afd4096ec08a5dd6
<ide><path>test/integration/production/test/index.test.js <ide> describe('Production Usage', () => { <ide> <ide> if (browserName === 'safari') { <ide> const elements = await browser.elementsByCss('link[rel=preload]') <del> // 4 page preloads and 5 existing preloads for _app, commons, main, etc <del> expect(elements.length).toBe(5) <add> // optimized preloading uses defer instead of preloading and prefetches <add> // aren't generated client-side since safari does not support prefetch <add> expect(elements.length).toBe(0) <ide> } else { <ide> const elements = await browser.elementsByCss('link[rel=prefetch]') <ide> expect(elements.length).toBe(4)
1
Javascript
Javascript
add more http token/value checking tests
c8fa79f35128388496b42e41adcec486797f6122
<ide><path>test/parallel/test-http-invalidheaderfield2.js <add>'use strict'; <add>require('../common'); <add>const assert = require('assert'); <add>const inspect = require('util').inspect; <add>const checkIsHttpToken = require('_http_common')._checkIsHttpToken; <add>const checkInvalidHeaderChar = require('_http_common')._checkInvalidHeaderChar; <add> <add>// Good header field names <add>[ <add> 'TCN', <add> 'ETag', <add> 'date', <add> 'alt-svc', <add> 'Content-Type', <add> '0', <add> 'Set-Cookie2', <add> 'Set_Cookie', <add> 'foo`bar^', <add> 'foo|bar', <add> '~foobar', <add> 'FooBar!', <add> '#Foo', <add> '$et-Cookie', <add> '%%Test%%', <add> 'Test&123', <add> 'It\'s_fun', <add> '2*3', <add> '4+2', <add> '3.14159265359' <add>].forEach(function(str) { <add> assert.strictEqual(checkIsHttpToken(str), <add> true, <add> 'checkIsHttpToken(' + <add> inspect(str) + <add> ') unexpectedly failed'); <add>}); <add>// Bad header field names <add>[ <add> ':', <add> '@@', <add> '中文呢', // unicode <add> '((((())))', <add> ':alternate-protocol', <add> 'alternate-protocol:', <add> 'foo\nbar', <add> 'foo\rbar', <add> 'foo\r\nbar', <add> 'foo\x00bar', <add> '\x7FMe!', <add> '{Start', <add> '(Start', <add> '[Start', <add> 'End}', <add> 'End)', <add> 'End]', <add> '"Quote"', <add> 'This,That' <add>].forEach(function(str) { <add> assert.strictEqual(checkIsHttpToken(str), <add> false, <add> 'checkIsHttpToken(' + <add> inspect(str) + <add> ') unexpectedly succeeded'); <add>}); <add> <add> <add>// Good header field values <add>[ <add> 'foo bar', <add> 'foo\tbar', <add> '0123456789ABCdef', <add> '!@#$%^&*()-_=+\\;\':"[]{}<>,./?|~`' <add>].forEach(function(str) { <add> assert.strictEqual(checkInvalidHeaderChar(str), <add> false, <add> 'checkInvalidHeaderChar(' + <add> inspect(str) + <add> ') unexpectedly failed'); <add>}); <add> <add>// Bad header field values <add>[ <add> 'foo\rbar', <add> 'foo\nbar', <add> 'foo\r\nbar', <add> '中文呢', // unicode <add> '\x7FMe!', <add> 'Testing 123\x00', <add> 'foo\vbar', <add> 'Ding!\x07' <add>].forEach(function(str) { <add> assert.strictEqual(checkInvalidHeaderChar(str), <add> true, <add> 'checkInvalidHeaderChar(' + <add> inspect(str) + <add> ') unexpectedly succeeded'); <add>});
1
Javascript
Javascript
drop unused argument
51a560a3cf5254eecc3f3b6466a1ac3205d17264
<ide><path>packages/ember-htmlbars/lib/system/component-node.js <ide> function ComponentNode(component, scope, renderNode, block, expectElement) { <ide> <ide> export default ComponentNode; <ide> <del>ComponentNode.create = function(renderNode, env, attrs, found, parentView, path, contentScope, contentTemplate, visitor) { <add>ComponentNode.create = function(renderNode, env, attrs, found, parentView, path, contentScope, contentTemplate) { <ide> found = found || lookupComponent(env, path); <ide> Ember.assert('HTMLBars error: Could not find component named "' + path + '" (no component or template with that name was found)', function() { <ide> if (path) { <ide> export function createOrUpdateComponent(component, options, renderNode) { <ide> renderNode.emberView = component; <ide> return component; <ide> } <del>
1
Ruby
Ruby
memoize_ and unmemoize_all
3fc9a67c04bade858e7ac7eb8cd94eec6a63ec27
<ide><path>activesupport/lib/active_support/memoizable.rb <ide> def self.included(base) <ide> end <ide> <ide> def freeze_with_memoizable <del> unless frozen? <del> methods.each do |method| <del> if method.to_s =~ /^_unmemoized_(.*)/ <del> begin <del> __send__($1).freeze <del> rescue ArgumentError <del> end <add> memoize_all unless frozen? <add> freeze_without_memoizable <add> end <add> <add> def memoize_all <add> methods.each do |m| <add> if m.to_s =~ /^_unmemoized_(.*)/ <add> if method(m).arity == 0 <add> __send__($1) <add> else <add> ivar = :"@_memoized_#{$1}" <add> instance_variable_set(ivar, {}) <ide> end <ide> end <ide> end <add> end <ide> <del> freeze_without_memoizable <add> def unmemoize_all <add> methods.each do |m| <add> if m.to_s =~ /^_unmemoized_(.*)/ <add> ivar = :"@_memoized_#{$1}" <add> instance_variable_get(ivar).clear if instance_variable_defined?(ivar) <add> end <add> end <ide> end <ide> end <ide> <ide> def memoize(*symbols) <ide> symbols.each do |symbol| <del> original_method = "_unmemoized_#{symbol}" <del> memoized_ivar = "@_memoized_#{symbol.to_s.sub(/\?\Z/, '_query').sub(/!\Z/, '_bang')}" <add> original_method = :"_unmemoized_#{symbol}" <add> memoized_ivar = :"@_memoized_#{symbol.to_s.sub(/\?\Z/, '_query').sub(/!\Z/, '_bang')}" <ide> <ide> class_eval <<-EOS, __FILE__, __LINE__ <ide> include Freezable <ide> def memoize(*symbols) <ide> <ide> if instance_method(:#{symbol}).arity == 0 <ide> def #{symbol}(reload = false) <del> if !reload && defined? #{memoized_ivar} <del> #{memoized_ivar} <del> else <del> #{memoized_ivar} = #{original_method} <add> if reload || !defined?(#{memoized_ivar}) || #{memoized_ivar}.empty? <add> #{memoized_ivar} = [#{original_method}] <ide> end <add> #{memoized_ivar}[0] <ide> end <ide> else <ide> def #{symbol}(*args) <del> #{memoized_ivar} ||= {} <add> #{memoized_ivar} ||= {} unless frozen? <ide> reload = args.pop if args.last == true || args.last == :reload <ide> <del> if !reload && #{memoized_ivar} && #{memoized_ivar}.has_key?(args) <del> #{memoized_ivar}[args] <add> if #{memoized_ivar} <add> if !reload && #{memoized_ivar}.has_key?(args) <add> #{memoized_ivar}[args] <add> elsif #{memoized_ivar} <add> #{memoized_ivar}[args] = #{original_method}(*args) <add> end <ide> else <del> #{memoized_ivar}[args] = #{original_method}(*args) <add> #{original_method}(*args) <ide> end <ide> end <ide> end <ide><path>activesupport/test/memoizable_test.rb <ide> def test_reloadable <ide> assert_equal 3, @calculator.counter <ide> end <ide> <add> def test_unmemoize_all <add> assert_equal 1, @calculator.counter <add> <add> assert @calculator.instance_variable_get(:@_memoized_counter).any? <add> @calculator.unmemoize_all <add> assert @calculator.instance_variable_get(:@_memoized_counter).empty? <add> <add> assert_equal 2, @calculator.counter <add> end <add> <add> def test_memoize_all <add> @calculator.memoize_all <add> assert @calculator.instance_variable_defined?(:@_memoized_counter) <add> end <add> <ide> def test_memoization_cache_is_different_for_each_instance <ide> assert_equal 1, @calculator.counter <ide> assert_equal 2, @calculator.counter(:reload)
2
Javascript
Javascript
replace .bind with .on when building event aliases
5ba98fe32445dd58707949f64a599f6d4b0ee49b
<ide><path>src/event.js <ide> jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblcl <ide> } <ide> <ide> return arguments.length > 0 ? <del> this.bind( name, data, fn ) : <add> this.on( name, null, data, fn ) : <ide> this.trigger( name ); <ide> }; <ide>
1
Python
Python
use backbone factory
f7783e7a32d2624d2876681d4a2f7589fedb9a01
<ide><path>official/projects/detr/configs/detr.py <ide> class Losses(hyperparams.Config): <ide> lambda_box: float = 5.0 <ide> lambda_giou: float = 2.0 <ide> background_cls_weight: float = 0.1 <add> l2_weight_decay: float = 1e-4 <ide> <ide> @dataclasses.dataclass <ide> class Detr(hyperparams.Config): <ide> class Detr(hyperparams.Config): <ide> input_size: List[int] = dataclasses.field(default_factory=list) <ide> backbone: backbones.Backbone = backbones.Backbone( <ide> type='resnet', resnet=backbones.ResNet( <del> model_id=101, <add> model_id=50, <ide> bn_trainable=False)) <ide> norm_activation: common.NormActivation = common.NormActivation() <ide> <ide> def detr_coco() -> cfg.ExperimentConfig: <ide> decay_at = train_steps - 100 * steps_per_epoch # 400 epochs <ide> config = cfg.ExperimentConfig( <ide> task=DetrTask( <del> init_checkpoint='gs://ghpark-imagenet-tfrecord/ckpt/resnet101_imagenet', <add> init_checkpoint='gs://ghpark-imagenet-tfrecord/ckpt/resnet50_imagenet', <ide> init_checkpoint_modules='backbone', <ide> annotation_file=os.path.join(COCO_INPUT_PATH_BASE, <ide> 'instances_val2017.json'), <ide><path>official/projects/detr/modeling/detr.py <ide> <ide> from official.modeling import tf_utils <ide> from official.projects.detr.modeling import transformer <del>#from official.vision.modeling.backbones import resnet <add>from official.vision.modeling.backbones import resnet <ide> <ide> <ide> def position_embedding_sine(attention_mask, <ide> def __init__(self, backbone, num_queries, hidden_size, num_classes, <ide> raise ValueError("hidden_size must be a multiple of 2.") <ide> # TODO(frederickliu): Consider using the backbone factory. <ide> # TODO(frederickliu): Add to factory once we get skeleton code in. <del> #self._backbone = resnet.ResNet(50, bn_trainable=False) <add> #self._backbone = resnet.ResNet(101, bn_trainable=False) <ide> # (gunho) use backbone factory <ide> self._backbone = backbone <ide> <ide><path>official/projects/detr/tasks/detection.py <ide> def build_model(self): <ide> input_specs = tf.keras.layers.InputSpec( <ide> shape=[None] + self._task_config.model.input_size) <ide> <add> l2_weight_decay = self.task_config.losses.l2_weight_decay <add> # Divide weight decay by 2.0 to match the implementation of tf.nn.l2_loss. <add> # (https://www.tensorflow.org/api_docs/python/tf/keras/regularizers/l2) <add> # (https://www.tensorflow.org/api_docs/python/tf/nn/l2_loss) <add> l2_regularizer = (tf.keras.regularizers.l2( <add> l2_weight_decay / 2.0) if l2_weight_decay else None) <ide> <ide> backbone = backbones.factory.build_backbone( <ide> input_specs=input_specs, <ide> backbone_config=self._task_config.model.backbone, <ide> norm_activation_config=self._task_config.model.norm_activation) <del> <ide> model = detr.DETR( <ide> backbone, <ide> self._task_config.model.num_queries,
3
PHP
PHP
add test for empty meridian select
3176422baf9dd4560940a6cbc1861da9a5d9327d
<ide><path>tests/TestCase/View/Input/DateTimeTest.php <ide> public function testRenderEmptyValues() { <ide> 'hour' => ['empty' => 'HOUR'], <ide> 'minute' => ['empty' => 'MINUTE'], <ide> 'second' => ['empty' => 'SECOND'], <add> 'meridian' => ['empty' => 'MERIDIAN'], <ide> ]); <ide> $this->assertContains('<option value="" selected="selected">YEAR</option>', $result); <ide> $this->assertContains('<option value="" selected="selected">MONTH</option>', $result); <ide> $this->assertContains('<option value="" selected="selected">DAY</option>', $result); <ide> $this->assertContains('<option value="" selected="selected">HOUR</option>', $result); <ide> $this->assertContains('<option value="" selected="selected">MINUTE</option>', $result); <ide> $this->assertContains('<option value="" selected="selected">SECOND</option>', $result); <add> $this->assertContains('<option value="" selected="selected">MERIDIAN</option>', $result); <ide> } <ide> <ide> /**
1
Python
Python
send nodelocation objec in create_volume
52bfed1676b4b92a1de8b03e7371ab0cefdd2b89
<ide><path>libcloud/compute/drivers/digitalocean.py <ide> def create_volume(self, size, name, filesystem_type=None, location=None, <ide> :return: The newly created volume. <ide> :rtype: :class:`StorageVolume` <ide> """ <del> attr = {'name': name, 'size_gigabytes': size, 'region': location, <add> attr = {'name': name, 'size_gigabytes': size, 'region': location.id, <ide> 'filesystem_type': filesystem_type} <ide> <ide> res = self.connection.request('/v2/volumes', data=json.dumps(attr),
1
PHP
PHP
add a function for clearing the detector cache
bc8d224cbed652b2e769f8aa8fab85dc4d82fd99
<ide><path>src/Network/Request.php <ide> class Request implements \ArrayAccess { <ide> * <ide> * @var array <ide> */ <del> protected $_isResults = []; <add> protected $_detectorCache = []; <ide> <ide> /** <ide> * Copy of php://input. Since this stream can only be read once in most SAPI's <ide> public function is($type) { <ide> return false; <ide> } <ide> <del> if (!isset($this->_isResults[$type])) { <del> $this->_isResults[$type] = $this->_is($type); <add> if (!isset($this->_detectorCache[$type])) { <add> $this->_detectorCache[$type] = $this->_is($type); <ide> } <ide> <del> return $this->_isResults[$type]; <add> return $this->_detectorCache[$type]; <add> } <add> <add>/** <add> * Clears the instance detector cache, used by the is() function <add> * <add> * @return void <add> */ <add> public function clearDetectorCache() { <add> $this->_detectorCache = []; <ide> } <ide> <ide> /** <ide> public function cookie($key) { <ide> public function env($key, $value = null) { <ide> if ($value !== null) { <ide> $this->_environment[$key] = $value; <del> $this->_isResults = []; <add> $this->clearDetectorCache(); <ide> return $this; <ide> } <ide> <ide><path>tests/TestCase/Network/RequestTest.php <ide> public function testAddDetector() { <ide> <ide> Request::addDetector('index', array('param' => 'action', 'value' => 'index')); <ide> $request->params['action'] = 'index'; <add> $request->clearDetectorCache(); <ide> $this->assertTrue($request->isIndex()); <ide> <ide> $request->params['action'] = 'add'; <add> $request->clearDetectorCache(); <ide> $this->assertFalse($request->isIndex()); <ide> <ide> $request->return = true; <add> $request->clearDetectorCache(); <ide> $this->assertTrue($request->isCallMe()); <ide> <ide> $request->return = false; <add> $request->clearDetectorCache(); <ide> $this->assertFalse($request->isCallMe()); <ide> <ide> Request::addDetector('callme', array($this, 'detectCallback')); <ide> $request->return = true; <add> $request->clearDetectorCache(); <ide> $this->assertTrue($request->isCallMe()); <ide> <ide> Request::addDetector('extension', array('param' => 'ext', 'options' => array('pdf', 'png', 'txt'))); <ide> $request->params['ext'] = 'pdf'; <add> $request->clearDetectorCache(); <ide> $this->assertTrue($request->is('extension')); <ide> <ide> $request->params['ext'] = 'exe'; <add> $request->clearDetectorCache(); <ide> $this->assertFalse($request->isExtension()); <ide> } <ide>
2
Javascript
Javascript
fix execfile timeouts, improve tests
6570cd99e51adb082d0e0076a1205cd867f4ce41
<ide><path>lib/child_process.js <ide> exports.execFile = function (file /* args, options, callback */) { <ide> var stdout = ""; <ide> var stderr = ""; <ide> var killed = false; <add> var exited = false; <add> <add> function exithandler (code, signal) { <add> if (timeoutId) clearTimeout(timeoutId); <add> if (exited) return; <add> exited = true; <add> if (!callback) return; <add> <add> if (code === 0 && signal === null) { <add> callback(null, stdout, stderr); <add> } else { <add> var e = new Error("Command failed: " + stderr); <add> e.killed = killed; <add> e.code = code; <add> e.signal = signal; <add> callback(e, stdout, stderr); <add> } <add> } <ide> <ide> function kill () { <del> child.kill(options.killSignal); <add> var c = child.kill(options.killSignal); <ide> killed = true; <add> process.nextTick(function () { <add> exithandler(null, options.killSignal) <add> }); <ide> } <ide> <ide> var timeoutId; <ide> exports.execFile = function (file /* args, options, callback */) { <ide> } <ide> }); <ide> <del> child.addListener("exit", function (code, signal) { <del> if (timeoutId) clearTimeout(timeoutId); <del> if (!callback) return; <del> if (code === 0 && signal === null) { <del> callback(null, stdout, stderr); <del> } else { <del> var e = new Error("Command failed: " + stderr); <del> e.killed = killed; <del> e.code = code; <del> e.signal = signal; <del> callback(e, stdout, stderr); <del> } <del> }); <add> var pid = child.pid; <add> <add> child.addListener("exit", exithandler); <ide> <ide> return child; <ide> }; <ide><path>test/simple/test-exec.js <ide> exec("ls /DOES_NOT_EXIST", function (err, stdout, stderr) { <ide> } <ide> }); <ide> <add> <add> <add>var sleeperStart = new Date(); <ide> exec("sleep 3", { timeout: 50 }, function (err, stdout, stderr) { <add> var diff = (new Date()) - sleeperStart; <add> console.log("sleep 3 with timeout 50 took %d ms", diff); <add> assert.ok(diff < 500); <ide> assert.ok(err); <ide> assert.ok(err.killed); <ide> assert.equal(err.signal, 'SIGKILL'); <ide> }); <ide> <ide> <del>var killMeTwice = exec("sleep 3", { timeout: 1000 }, function killMeTwiceCallback(err, stdout, stderr) { <del> assert.ok(err); <del> assert.ok(err.killed); <del> assert.equal(err.signal, 'SIGTERM'); <del>}); <add> <add> <add>var startSleep3 = new Date(); <add>var killMeTwice = exec("sleep 3", { timeout: 1000 }, killMeTwiceCallback); <add> <ide> process.nextTick(function(){ <add> console.log("kill pid %d", killMeTwice.pid); <add> // make sure there is no race condition in starting the process <add> // the PID SHOULD exist directly following the exec() call. <add> assert.equal('number', typeof killMeTwice._internal.pid); <add> // Kill the process <ide> killMeTwice.kill(); <ide> }); <ide> <add>function killMeTwiceCallback(err, stdout, stderr) { <add> var diff = (new Date()) - startSleep3; <add> // We should have already killed this process. Assert that <add> // the timeout still works and that we are getting the proper callback <add> // parameters. <add> assert.ok(err); <add> assert.ok(err.killed); <add> assert.equal(err.signal, 'SIGKILL'); <add> <add> // the timeout should still be in effect <add> console.log("'sleep 3' was already killed. Took %d ms", diff); <add> assert.ok(diff < 1500); <add>} <add> <add> <ide> <ide> exec('python -c "print 200000*\'C\'"', { maxBuffer: 1000 }, function (err, stdout, stderr) { <ide> assert.ok(err);
2
Javascript
Javascript
add hascrypto to worker-cleanexit-with-moduleload
d6a32cfe7ca32b8382d195a1dee460d78f749562
<ide><path>test/parallel/test-worker-cleanexit-with-moduleload.js <ide> 'use strict'; <del>require('../common'); <add>const common = require('../common'); <ide> <ide> // Harden the thread interactions on the exit path. <ide> // Ensure workers are able to bail out safe at <ide> require('../common'); <ide> // preferrably in the C++ land. <ide> <ide> const { Worker } = require('worker_threads'); <add>const modules = [ 'fs', 'assert', 'async_hooks', 'buffer', 'child_process', <add> 'net', 'http', 'os', 'path', 'v8', 'vm' <add>]; <add>if (common.hasCrypto) { <add> modules.push('https'); <add>} <add> <ide> for (let i = 0; i < 10; i++) { <del> new Worker("const modules = ['fs', 'assert', 'async_hooks'," + <del> "'buffer', 'child_process', 'net', 'http', 'https', 'os'," + <del> "'path', 'v8', 'vm'];" + <add> new Worker(`const modules = [${modules.map((m) => `'${m}'`)}];` + <ide> 'modules.forEach((module) => {' + <ide> 'const m = require(module);' + <ide> '});', { eval: true });
1
Javascript
Javascript
fix typo in {{partial}} docs
4cffb68da64abf56bbf34f6e3d5144a13b321287
<ide><path>packages/ember-handlebars/lib/helpers/partial.js <ide> require('ember-handlebars/ext'); <ide> {{partial user_info}} <ide> {{/with}} <ide> </script> <add> ``` <ide> <ide> The `data-template-name` attribute of a partial template <ide> is prefixed with an underscore.
1
Javascript
Javascript
replace var for (let|const) in utilities module
ca79fc5373f2fde4a3a3cfd86c34c3848df37a1a
<ide><path>lib/internal/util.js <ide> function deprecate(fn, msg, code) { <ide> if (code !== undefined && typeof code !== 'string') <ide> throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'code', 'string'); <ide> <del> var warned = false; <add> let warned = false; <ide> function deprecated(...args) { <ide> if (!warned) { <ide> warned = true; <ide> function assertCrypto() { <ide> // Return undefined if there is no match. <ide> function normalizeEncoding(enc) { <ide> if (!enc) return 'utf8'; <del> var retried; <add> let retried; <ide> while (true) { <ide> switch (enc) { <ide> case 'utf8': <ide> function filterDuplicateStrings(items, low) { <ide> } <ide> <ide> function cachedResult(fn) { <del> var result; <add> let result; <ide> return () => { <ide> if (result === undefined) <ide> result = fn(); <ide> function convertToValidSignal(signal) { <ide> <ide> function getConstructorOf(obj) { <ide> while (obj) { <del> var descriptor = Object.getOwnPropertyDescriptor(obj, 'constructor'); <add> const descriptor = Object.getOwnPropertyDescriptor(obj, 'constructor'); <ide> if (descriptor !== undefined && <ide> typeof descriptor.value === 'function' && <ide> descriptor.value.name !== '') { <ide> promisify.custom = kCustomPromisifiedSymbol; <ide> <ide> // The build-in Array#join is slower in v8 6.0 <ide> function join(output, separator) { <del> var str = ''; <add> let str = ''; <ide> if (output.length !== 0) { <ide> for (var i = 0; i < output.length - 1; i++) { <ide> // It is faster not to use a template string here <ide><path>lib/util.js <ide> const regExpToString = RegExp.prototype.toString; <ide> const dateToISOString = Date.prototype.toISOString; <ide> const errorToString = Error.prototype.toString; <ide> <del>var CIRCULAR_ERROR_MESSAGE; <add>let CIRCULAR_ERROR_MESSAGE; <ide> <ide> /* eslint-disable */ <ide> const strEscapeSequencesRegExp = /[\x00-\x1f\x27\x5c]/; <ide> function strEscape(str) { <ide> return `'${str}'`; <ide> if (str.length > 100) <ide> return `'${str.replace(strEscapeSequencesReplacer, escapeFn)}'`; <del> var result = ''; <del> var last = 0; <add> let result = ''; <add> let last = 0; <ide> for (var i = 0; i < str.length; i++) { <ide> const point = str.charCodeAt(i); <ide> if (point === 39 || point === 92 || point < 32) { <ide> function tryStringify(arg) { <ide> } <ide> <ide> function format(f) { <del> var i, tempStr; <add> let i, tempStr; <ide> if (typeof f !== 'string') { <ide> if (arguments.length === 0) return ''; <del> var res = ''; <add> let res = ''; <ide> for (i = 0; i < arguments.length - 1; i++) { <ide> res += inspect(arguments[i]); <ide> res += ' '; <ide> function format(f) { <ide> <ide> if (arguments.length === 1) return f; <ide> <del> var str = ''; <del> var a = 1; <del> var lastPos = 0; <add> let str = ''; <add> let a = 1; <add> let lastPos = 0; <ide> for (i = 0; i < f.length - 1; i++) { <ide> if (f.charCodeAt(i) === 37) { // '%' <ide> const nextChar = f.charCodeAt(++i); <ide> function debuglog(set) { <ide> set = set.toUpperCase(); <ide> if (!debugs[set]) { <ide> if (debugEnvRegex.test(set)) { <del> var pid = process.pid; <add> const pid = process.pid; <ide> debugs[set] = function() { <del> var msg = exports.format.apply(exports, arguments); <add> const msg = exports.format.apply(exports, arguments); <ide> console.error('%s %d: %s', set, pid, msg); <ide> }; <ide> } else { <ide> function formatValue(ctx, value, recurseTimes, ln) { <ide> } <ide> } <ide> <del> var keys; <del> var symbols = Object.getOwnPropertySymbols(value); <add> let keys; <add> let symbols = Object.getOwnPropertySymbols(value); <ide> <ide> // Look up the keys of the object. <ide> if (ctx.showHidden) { <ide> function formatValue(ctx, value, recurseTimes, ln) { <ide> const keyLength = keys.length + symbols.length; <ide> <ide> const { constructor, tag } = getIdentificationOf(value); <del> var prefix = ''; <add> let prefix = ''; <ide> if (constructor && tag && constructor !== tag) <ide> prefix = `${constructor} [${tag}] `; <ide> else if (constructor) <ide> prefix = `${constructor} `; <ide> else if (tag) <ide> prefix = `[${tag}] `; <ide> <del> var base = ''; <del> var formatter = formatObject; <del> var braces; <del> var noIterator = true; <del> var raw; <add> let base = ''; <add> let formatter = formatObject; <add> let braces; <add> let noIterator = true; <add> let raw; <ide> <ide> // Iterators and the rest are split to reduce checks <ide> if (value[Symbol.iterator]) { <ide> function formatPrimitive(fn, value, ctx) { <ide> // eslint-disable-next-line max-len <ide> const averageLineLength = Math.ceil(value.length / Math.ceil(value.length / minLineLength)); <ide> const divisor = Math.max(averageLineLength, MIN_LINE_LENGTH); <del> var res = ''; <add> let res = ''; <ide> if (readableRegExps[divisor] === undefined) { <ide> // Build a new RegExp that naturally breaks text into multiple lines. <ide> // <ide> function formatObject(ctx, value, recurseTimes, keys) { <ide> function formatSpecialArray(ctx, value, recurseTimes, keys, maxLength, valLen) { <ide> const output = []; <ide> const keyLen = keys.length; <del> var visibleLength = 0; <del> var i = 0; <add> let visibleLength = 0; <add> let i = 0; <ide> if (keyLen !== 0 && numberRegExp.test(keys[0])) { <ide> for (const key of keys) { <ide> if (visibleLength === maxLength) <ide> function formatSpecialArray(ctx, value, recurseTimes, keys, maxLength, valLen) { <ide> } else if (keys[keyLen - 1] !== `${valLen - 1}`) { <ide> const extra = []; <ide> // Only handle special keys <del> var key; <add> let key; <ide> for (i = keys.length - 1; i >= 0; i--) { <ide> key = keys[i]; <ide> if (numberRegExp.test(key) && +key < 2 ** 32 - 1) <ide> function formatTypedArray(ctx, value, recurseTimes, keys) { <ide> <ide> function formatSet(ctx, value, recurseTimes, keys) { <ide> const output = new Array(value.size + keys.length + (ctx.showHidden ? 1 : 0)); <del> var i = 0; <add> let i = 0; <ide> for (const v of value) <ide> output[i++] = formatValue(ctx, v, recurseTimes); <ide> // With `showHidden`, `length` will display as a hidden property for <ide> function formatSet(ctx, value, recurseTimes, keys) { <ide> <ide> function formatMap(ctx, value, recurseTimes, keys) { <ide> const output = new Array(value.size + keys.length + (ctx.showHidden ? 1 : 0)); <del> var i = 0; <add> let i = 0; <ide> for (const [k, v] of value) <ide> output[i++] = `${formatValue(ctx, k, recurseTimes)} => ` + <ide> formatValue(ctx, v, recurseTimes); <ide> function formatMap(ctx, value, recurseTimes, keys) { <ide> <ide> function formatCollectionIterator(preview, ctx, value, recurseTimes, <ide> visibleKeys, keys) { <del> var nextRecurseTimes = recurseTimes === null ? null : recurseTimes - 1; <del> var vals = preview(value, 100); <del> var output = []; <add> const nextRecurseTimes = recurseTimes === null ? null : recurseTimes - 1; <add> const vals = preview(value, 100); <add> const output = []; <ide> for (const o of vals) { <ide> output.push(formatValue(ctx, o, nextRecurseTimes)); <ide> } <ide> function formatSetIterator(ctx, value, recurseTimes, visibleKeys, keys) { <ide> } <ide> <ide> function formatPromise(ctx, value, recurseTimes, keys) { <del> var output; <add> let output; <ide> const [state, result] = getPromiseDetails(value); <ide> if (state === kPending) { <ide> output = ['<pending>']; <ide> function formatPromise(ctx, value, recurseTimes, keys) { <ide> } <ide> <ide> function formatProperty(ctx, value, recurseTimes, key, array) { <del> var name, str; <add> let name, str; <ide> const desc = Object.getOwnPropertyDescriptor(value, key) || <ide> { value: value[key], enumerable: true }; <ide> if (desc.value !== undefined) { <ide> function formatProperty(ctx, value, recurseTimes, key, array) { <ide> <ide> function reduceToSingleString(ctx, output, base, braces, addLn) { <ide> const breakLength = ctx.breakLength; <del> var i = 0; <add> let i = 0; <ide> if (ctx.compact === false) { <ide> const indentation = ' '.repeat(ctx.indentationLvl); <del> var res = `${base ? `${base} ` : ''}${braces[0]}\n${indentation} `; <add> let res = `${base ? `${base} ` : ''}${braces[0]}\n${indentation} `; <ide> for (; i < output.length - 1; i++) { <ide> res += `${output[i]},\n${indentation} `; <ide> } <ide> res += `${output[i]}\n${indentation}${braces[1]}`; <ide> return res; <ide> } <ide> if (output.length * 2 <= breakLength) { <del> var length = 0; <add> let length = 0; <ide> for (; i < output.length && length <= breakLength; i++) { <ide> if (ctx.colors) { <ide> length += removeColors(output[i]).length + 1; <ide> const months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', <ide> <ide> // 26 Feb 16:19:34 <ide> function timestamp() { <del> var d = new Date(); <del> var time = [pad(d.getHours()), <del> pad(d.getMinutes()), <del> pad(d.getSeconds())].join(':'); <add> const d = new Date(); <add> const time = [pad(d.getHours()), <add> pad(d.getMinutes()), <add> pad(d.getSeconds())].join(':'); <ide> return [d.getDate(), months[d.getMonth()], time].join(' '); <ide> } <ide> <ide> function _extend(target, source) { <ide> // Don't do anything if source isn't an object <ide> if (source === null || typeof source !== 'object') return target; <ide> <del> var keys = Object.keys(source); <del> var i = keys.length; <add> const keys = Object.keys(source); <add> let i = keys.length; <ide> while (i--) { <ide> target[keys[i]] = source[keys[i]]; <ide> }
2
Python
Python
adapt safe_eval for py3 ast module
ff26cec7eba491cf4cf48542b21f44932baf9572
<ide><path>numpy/lib/utils.py <ide> class SafeEval(object): <ide> <ide> """ <ide> <del> def visit(self, node, **kw): <del> cls = node.__class__ <del> meth = getattr(self,'visit'+cls.__name__,self.default) <del> return meth(node, **kw) <add> if sys.version_info[0] < 3: <add> def visit(self, node, **kw): <add> cls = node.__class__ <add> meth = getattr(self,'visit'+cls.__name__,self.default) <add> return meth(node, **kw) <ide> <del> def default(self, node, **kw): <del> raise SyntaxError("Unsupported source construct: %s" % node.__class__) <add> def default(self, node, **kw): <add> raise SyntaxError("Unsupported source construct: %s" <add> % node.__class__) <ide> <del> def visitExpression(self, node, **kw): <del> for child in node.getChildNodes(): <del> return self.visit(child, **kw) <add> def visitExpression(self, node, **kw): <add> for child in node.getChildNodes(): <add> return self.visit(child, **kw) <ide> <del> def visitConst(self, node, **kw): <del> return node.value <add> def visitConst(self, node, **kw): <add> return node.value <ide> <del> def visitDict(self, node,**kw): <del> return dict([(self.visit(k),self.visit(v)) for k,v in node.items]) <add> def visitDict(self, node,**kw): <add> return dict([(self.visit(k),self.visit(v)) for k,v in node.items]) <ide> <del> def visitTuple(self, node, **kw): <del> return tuple([self.visit(i) for i in node.nodes]) <add> def visitTuple(self, node, **kw): <add> return tuple([self.visit(i) for i in node.nodes]) <ide> <del> def visitList(self, node, **kw): <del> return [self.visit(i) for i in node.nodes] <add> def visitList(self, node, **kw): <add> return [self.visit(i) for i in node.nodes] <ide> <del> def visitUnaryAdd(self, node, **kw): <del> return +self.visit(node.getChildNodes()[0]) <add> def visitUnaryAdd(self, node, **kw): <add> return +self.visit(node.getChildNodes()[0]) <ide> <del> def visitUnarySub(self, node, **kw): <del> return -self.visit(node.getChildNodes()[0]) <add> def visitUnarySub(self, node, **kw): <add> return -self.visit(node.getChildNodes()[0]) <ide> <del> def visitName(self, node, **kw): <del> if node.name == 'False': <del> return False <del> elif node.name == 'True': <del> return True <del> elif node.name == 'None': <del> return None <del> else: <del> raise SyntaxError("Unknown name: %s" % node.name) <add> def visitName(self, node, **kw): <add> if node.name == 'False': <add> return False <add> elif node.name == 'True': <add> return True <add> elif node.name == 'None': <add> return None <add> else: <add> raise SyntaxError("Unknown name: %s" % node.name) <add> else: <add> <add> def visit(self, node): <add> cls = node.__class__ <add> meth = getattr(self, 'visit' + cls.__name__, self.default) <add> return meth(node) <add> <add> def default(self, node): <add> raise SyntaxError("Unsupported source construct: %s" <add> % node.__class__) <add> <add> def visitExpression(self, node): <add> return self.visit(node.body) <add> <add> def visitNum(self, node): <add> return node.n <add> <add> def visitStr(self, node): <add> return node.s <add> <add> def visitBytes(self, node): <add> return node.s <add> <add> def visitDict(self, node,**kw): <add> return dict([(self.visit(k), self.visit(v)) <add> for k, v in zip(node.keys, node.values)]) <add> <add> def visitTuple(self, node): <add> return tuple([self.visit(i) for i in node.elts]) <add> <add> def visitList(self, node): <add> return [self.visit(i) for i in node.elts] <add> <add> def visitUnaryOp(self, node): <add> import ast <add> if isinstance(node.op, ast.UAdd): <add> return +self.visit(node.operand) <add> elif isinstance(node.op, ast.USub): <add> return -self.visit(node.operand) <add> else: <add> raise SyntaxError("Unknown unary op: %r" % node.op) <add> <add> def visitName(self, node): <add> if node.id == 'False': <add> return False <add> elif node.id == 'True': <add> return True <add> elif node.id == 'None': <add> return None <add> else: <add> raise SyntaxError("Unknown name: %s" % node.id) <ide> <ide> def safe_eval(source): <ide> """ <ide> def safe_eval(source): <ide> <ide> """ <ide> # Local import to speed up numpy's import time. <del> import compiler <add> try: <add> import compiler <add> except ImportError: <add> import ast as compiler <ide> walker = SafeEval() <ide> try: <del> ast = compiler.parse(source, "eval") <add> ast = compiler.parse(source, mode="eval") <ide> except SyntaxError, err: <ide> raise <ide> try:
1
PHP
PHP
move viewvars assignment after callbacks
457dd73ed069125f4f8ff69fbb1c781f00592045
<ide><path>lib/Cake/View/Helper/CacheHelper.php <ide> protected function _writeFile($content, $timestamp, $useCallbacks = false) { <ide> $controller->helpers = $this->helpers = unserialize(base64_decode(\'' . base64_encode(serialize($this->_View->helpers)) . '\')); <ide> $controller->layout = $this->layout = \'' . $this->_View->layout. '\'; <ide> $controller->theme = $this->theme = \'' . $this->_View->theme . '\'; <del> $controller->viewVars = $this->viewVars = unserialize(base64_decode(\'' . base64_encode(serialize($this->_View->viewVars)) . '\')); <add> $controller->viewVars = unserialize(base64_decode(\'' . base64_encode(serialize($this->_View->viewVars)) . '\')); <ide> Router::setRequestInfo($controller->request); <ide> $this->request = $request;'; <ide> <ide> protected function _writeFile($content, $timestamp, $useCallbacks = false) { <ide> } <ide> <ide> $file .= ' <add> $this->viewVars = $controller->viewVars; <ide> $this->loadHelpers(); <ide> extract($this->viewVars, EXTR_SKIP); <ide> ?>';
1
Text
Text
fix typo in assert.md
7d9976237ae0567e5f19941a6fd590c3bc88d009
<ide><path>doc/api/assert.md <ide> assert.fail(1, 2, new TypeError('need array')); <ide> // TypeError: need array <ide> ``` <ide> <del>*Note*: Is the last two cases `actual`, `expected`, and `operator` have no <add>*Note*: In the last two cases `actual`, `expected`, and `operator` have no <ide> influence on the error message. <ide> <ide> ```js
1
Javascript
Javascript
use correct abbreviations for months and weekdays
0d5da35702747199ef0e32d4f671e836392466f1
<ide><path>src/locale/nb.js <ide> //! locale : Norwegian Bokmål [nb] <ide> //! authors : Espen Hovlandsdal : https://github.com/rexxars <ide> //! Sigurd Gartmann : https://github.com/sigurdga <add>//! Stephen Ramthun : https://github.com/stephenramthun <ide> <ide> import moment from '../moment'; <ide> <ide> export default moment.defineLocale('nb', { <ide> months : 'januar_februar_mars_april_mai_juni_juli_august_september_oktober_november_desember'.split('_'), <del> monthsShort : 'jan._feb._mars_april_mai_juni_juli_aug._sep._okt._nov._des.'.split('_'), <add> monthsShort : 'jan._feb._mars_apr._mai_juni_juli_aug._sep._okt._nov._des.'.split('_'), <ide> monthsParseExact : true, <ide> weekdays : 'søndag_mandag_tirsdag_onsdag_torsdag_fredag_lørdag'.split('_'), <ide> weekdaysShort : 'sø._ma._ti._on._to._fr._lø.'.split('_'), <ide><path>src/locale/nn.js <ide> //! moment.js locale configuration <ide> //! locale : Nynorsk [nn] <del>//! author : https://github.com/mechuwind <add>//! authors : https://github.com/mechuwind <add>//! Stephen Ramthun : https://github.com/stephenramthun <ide> <ide> import moment from '../moment'; <ide> <ide> export default moment.defineLocale('nn', { <ide> months : 'januar_februar_mars_april_mai_juni_juli_august_september_oktober_november_desember'.split('_'), <del> monthsShort : 'jan_feb_mar_apr_mai_jun_jul_aug_sep_okt_nov_des'.split('_'), <add> monthsShort : 'jan._feb._mars_apr._mai_juni_juli_aug._sep._okt._nov._des.'.split('_'), <add> monthsParseExact : true, <ide> weekdays : 'sundag_måndag_tysdag_onsdag_torsdag_fredag_laurdag'.split('_'), <del> weekdaysShort : 'sun_mån_tys_ons_tor_fre_lau'.split('_'), <del> weekdaysMin : 'su_må_ty_on_to_fr_lø'.split('_'), <add> weekdaysShort : 'su._må._ty._on._to._fr._lau.'.split('_'), <add> weekdaysMin : 'su_må_ty_on_to_fr_la'.split('_'), <add> weekdaysParseExact : true, <ide> longDateFormat : { <ide> LT : 'HH:mm', <ide> LTS : 'HH:mm:ss', <ide><path>src/test/locale/nb.js <ide> import moment from '../../moment'; <ide> localeModule('nb'); <ide> <ide> test('parse', function (assert) { <del> var tests = 'januar jan._februar feb._mars mars_april april_mai mai_juni juni_juli juli_august aug._september sep._oktober okt._november nov._desember des.'.split('_'), <add> var tests = 'januar jan._februar feb._mars mars_april apr._mai mai_juni juni_juli juli_august aug._september sep._oktober okt._november nov._desember des.'.split('_'), <ide> i; <ide> function equalTest(input, mmm, i) { <ide> assert.equal(moment(input, mmm).month(), i, input + ' should be month ' + (i + 1)); <ide> test('format ordinal', function (assert) { <ide> }); <ide> <ide> test('format month', function (assert) { <del> var expected = 'januar jan._februar feb._mars mars_april april_mai mai_juni juni_juli juli_august aug._september sep._oktober okt._november nov._desember des.'.split('_'), i; <add> var expected = 'januar jan._februar feb._mars mars_april apr._mai mai_juni juni_juli juli_august aug._september sep._oktober okt._november nov._desember des.'.split('_'), i; <ide> for (i = 0; i < expected.length; i++) { <ide> assert.equal(moment([2011, i, 1]).format('MMMM MMM'), expected[i], expected[i]); <ide> } <ide><path>src/test/locale/nn.js <ide> import moment from '../../moment'; <ide> localeModule('nn'); <ide> <ide> test('parse', function (assert) { <del> var tests = 'januar jan_februar feb_mars mar_april apr_mai mai_juni jun_juli jul_august aug_september sep_oktober okt_november nov_desember des'.split('_'), i; <add> var tests = 'januar jan._februar feb._mars mars_april apr._mai mai_juni juni_juli juli_august aug._september sep._oktober okt._november nov._desember des.'.split('_'), i; <ide> function equalTest(input, mmm, i) { <ide> assert.equal(moment(input, mmm).month(), i, input + ' should be month ' + (i + 1)); <ide> } <ide> test('parse', function (assert) { <ide> test('format', function (assert) { <ide> var a = [ <ide> ['dddd, MMMM Do YYYY, h:mm:ss a', 'sundag, februar 14. 2010, 3:25:50 pm'], <del> ['ddd, hA', 'sun, 3PM'], <del> ['M Mo MM MMMM MMM', '2 2. 02 februar feb'], <add> ['ddd, hA', 'su., 3PM'], <add> ['M Mo MM MMMM MMM', '2 2. 02 februar feb.'], <ide> ['YYYY YY', '2010 10'], <ide> ['D Do DD', '14 14. 14'], <del> ['d do dddd ddd dd', '0 0. sundag sun su'], <add> ['d do dddd ddd dd', '0 0. sundag su. su'], <ide> ['DDD DDDo DDDD', '45 45. 045'], <ide> ['w wo ww', '6 6. 06'], <ide> ['h hh', '3 03'], <ide> test('format', function (assert) { <ide> ['LLL', '14. februar 2010 kl. 15:25'], <ide> ['LLLL', 'sundag 14. februar 2010 kl. 15:25'], <ide> ['l', '14.2.2010'], <del> ['ll', '14. feb 2010'], <del> ['lll', '14. feb 2010 kl. 15:25'], <del> ['llll', 'sun 14. feb 2010 kl. 15:25'] <add> ['ll', '14. feb. 2010'], <add> ['lll', '14. feb. 2010 kl. 15:25'], <add> ['llll', 'su. 14. feb. 2010 kl. 15:25'] <ide> ], <ide> b = moment(new Date(2010, 1, 14, 15, 25, 50, 125)), <ide> i; <ide> test('format ordinal', function (assert) { <ide> }); <ide> <ide> test('format month', function (assert) { <del> var expected = 'januar jan_februar feb_mars mar_april apr_mai mai_juni jun_juli jul_august aug_september sep_oktober okt_november nov_desember des'.split('_'), i; <add> var expected = 'januar jan._februar feb._mars mars_april apr._mai mai_juni juni_juli juli_august aug._september sep._oktober okt._november nov._desember des.'.split('_'), i; <ide> for (i = 0; i < expected.length; i++) { <ide> assert.equal(moment([2011, i, 1]).format('MMMM MMM'), expected[i], expected[i]); <ide> } <ide> }); <ide> <ide> test('format week', function (assert) { <del> var expected = 'sundag sun su_måndag mån må_tysdag tys ty_onsdag ons on_torsdag tor to_fredag fre fr_laurdag lau lø'.split('_'), i; <add> var expected = 'sundag su. su_måndag må. må_tysdag ty. ty_onsdag on. on_torsdag to. to_fredag fr. fr_laurdag lau. la'.split('_'), i; <ide> for (i = 0; i < expected.length; i++) { <ide> assert.equal(moment([2011, 0, 2 + i]).format('dddd ddd dd'), expected[i], expected[i]); <ide> }
4
Mixed
Text
add hooks to activejob around retries and discards
26dc9bc8ee8639b1ad393f4f3f4fa3e1a397b70e
<ide><path>activejob/CHANGELOG.md <add>* Added `enqueue_retry.active_job`, `retry_stopped.active_job`, and `discard.active_job` hooks. <add> <add> *steves* <add> <ide> * Allow `assert_performed_with` to be called without a block. <ide> <ide> *bogdanvlviv* <ide><path>activejob/lib/active_job/exceptions.rb <ide> module ClassMethods <ide> # end <ide> def retry_on(*exceptions, wait: 3.seconds, attempts: 5, queue: nil, priority: nil) <ide> rescue_from(*exceptions) do |error| <add> payload = { <add> job: self, <add> adapter: self.class.queue_adapter, <add> error: error, <add> } <add> <ide> if executions < attempts <del> logger.error "Retrying #{self.class} in #{wait} seconds, due to a #{error.class}. The original exception was #{error.cause.inspect}." <del> retry_job wait: determine_delay(wait), queue: queue, priority: priority <add> ActiveSupport::Notifications.instrument("enqueue_retry.active_job", payload) do <add> logger.error "Retrying #{self.class} in #{wait} seconds, due to a #{error.class}. The original exception was #{error.cause.inspect}." <add> retry_job wait: determine_delay(wait), queue: queue, priority: priority <add> end <ide> else <ide> if block_given? <del> yield self, error <add> ActiveSupport::Notifications.instrument("retry_stopped.active_job", payload) do <add> yield self, error <add> end <ide> else <ide> logger.error "Stopped retrying #{self.class} due to a #{error.class}, which reoccurred on #{executions} attempts. The original exception was #{error.cause.inspect}." <ide> raise error <ide> def retry_on(*exceptions, wait: 3.seconds, attempts: 5, queue: nil, priority: ni <ide> # end <ide> def discard_on(*exceptions) <ide> rescue_from(*exceptions) do |error| <del> if block_given? <del> yield self, error <del> else <del> logger.error "Discarded #{self.class} due to a #{error.class}. The original exception was #{error.cause.inspect}." <add> payload = { <add> job: self, <add> adapter: self.class.queue_adapter, <add> error: error, <add> } <add> <add> ActiveSupport::Notifications.instrument("discard.active_job", payload) do <add> if block_given? <add> yield self, error <add> else <add> logger.error "Discarded #{self.class} due to a #{error.class}. The original exception was #{error.cause.inspect}." <add> end <ide> end <ide> end <ide> end <ide><path>guides/source/active_support_instrumentation.md <ide> Active Job <ide> | `:adapter` | QueueAdapter object processing the job | <ide> | `:job` | Job object | <ide> <add>### enqueue_retry.active_job <add> <add>| Key | Value | <add>| ------------ | -------------------------------------- | <add>| `:job` | Job object | <add>| `:adapter` | QueueAdapter object processing the job | <add>| `:error` | The error that caused the retry | <add> <ide> ### perform_start.active_job <ide> <ide> | Key | Value | <ide> Active Job <ide> | `:adapter` | QueueAdapter object processing the job | <ide> | `:job` | Job object | <ide> <add>### retry_stopped.active_job <add> <add>| Key | Value | <add>| ------------ | -------------------------------------- | <add>| `:adapter` | QueueAdapter object processing the job | <add>| `:job` | Job object | <add>| `:error` | The error that caused the retry | <add> <add>### discard.active_job <add> <add>| Key | Value | <add>| ------------ | -------------------------------------- | <add>| `:adapter` | QueueAdapter object processing the job | <add>| `:job` | Job object | <add>| `:error` | The error that caused the discard | <add> <ide> Action Cable <ide> ------------ <ide>
3
Text
Text
improve ios' running on device instructions
cdcd6204802f1be73288069af64d4c4801c3af37
<ide><path>docs/RunningOnDevice.md <ide> Installing an app on an iOS device requires a Mac, an Apple ID, and a USB cable. <ide> <ide> <block class="mac ios" /> <ide> <del>Connect your device to your Mac via USB, then open Xcode. In the project navigator, choose your device from the Product > Destination toolbar menu. Xcode will then register your device for development. <add>Connect your device to your Mac via USB, then open Xcode. Navigate into `ios` folder of your project and open the file ending .xcodeproj inside it. In the project navigator, choose your device from the Product > Destination toolbar menu. Xcode will then register your device for development. <ide> <ide> > If you run into any issues, please take a look at Apple's [Launching Your App on a Device docs](https://developer.apple.com/library/content/documentation/IDEs/Conceptual/AppDistributionGuide/LaunchingYourApponDevices/LaunchingYourApponDevices.html#//apple_ref/doc/uid/TP40012582-CH27-SW4). <ide>
1
Javascript
Javascript
add unit tests
3f0b6bbca7406738e304cc404278f98f2d236b8c
<ide><path>test/moment/is_valid.js <ide> exports.is_valid = { <ide> test.equal(moment([2000, 1, 29, 23, 59, 59, 1000]).invalidAt(), 6, '1000 is invalid millisecond'); <ide> test.equal(moment([2000, 1, 29, 23, 59, 59, 999]).invalidAt(), -1, '-1 if everything is fine'); <ide> test.done(); <del> } <add> }, <add> <add> "valid Unix timestamp" : function (test) { <add> test.expect(22); <add> test.equal(moment(1371065286, "X").isValid(), true, 'number integer'); <add> test.equal(moment(1379066897., "X").isValid(), true, 'number trailing .'); <add> test.equal(moment(1379066897.0, "X").isValid(), true, 'number whole 1dp'); <add> test.equal(moment(1379066897.7, "X").isValid(), true, 'number 1dp'); <add> test.equal(moment(1379066897.00, "X").isValid(), true, 'number whole 2dp'); <add> test.equal(moment(1379066897.07, "X").isValid(), true, 'number 2dp'); <add> test.equal(moment(1379066897.17, "X").isValid(), true, 'number 2dp'); <add> test.equal(moment(1379066897.000, "X").isValid(), true, 'number whole 3dp'); <add> test.equal(moment(1379066897.007, "X").isValid(), true, 'number 3dp'); <add> test.equal(moment(1379066897.017, "X").isValid(), true, 'number 3dp'); <add> test.equal(moment(1379066897.157, "X").isValid(), true, 'number 3dp'); <add> test.equal(moment("1371065286", "X").isValid(), true, 'string integer'); <add> test.equal(moment("1379066897.", "X").isValid(), true, 'string trailing .'); <add> test.equal(moment("1379066897.0", "X").isValid(), true, 'string whole 1dp'); <add> test.equal(moment("1379066897.7", "X").isValid(), true, 'string 1dp'); <add> test.equal(moment("1379066897.00", "X").isValid(), true, 'string whole 2dp'); <add> test.equal(moment("1379066897.07", "X").isValid(), true, 'string 2dp'); <add> test.equal(moment("1379066897.17", "X").isValid(), true, 'string 2dp'); <add> test.equal(moment("1379066897.000", "X").isValid(), true, 'string whole 3dp'); <add> test.equal(moment("1379066897.007", "X").isValid(), true, 'string 3dp'); <add> test.equal(moment("1379066897.017", "X").isValid(), true, 'string 3dp'); <add> test.equal(moment("1379066897.157", "X").isValid(), true, 'string 3dp'); <add> test.done(); <add> }, <ide> <add> "invalid Unix timestamp" : function (test) { <add> test.expect(8); <add> test.equal(moment(undefined, "X").isValid(), false, 'undefined'); <add> test.equal(moment("undefined", "X").isValid(), false, 'string undefined'); <add> try { <add> test.equal(moment(null, "X").isValid(), false, 'null'); <add> } catch(e) { <add> test.ok(true, 'null'); <add> } <add> <add> test.equal(moment("null", "X").isValid(), false, 'string null'); <add> test.equal(moment([], "X").isValid(), false, 'array'); <add> test.equal(moment("{}", "X").isValid(), false, 'object'); <add> test.equal(moment("", "X").isValid(), false, 'string empty'); <add> test.equal(moment(" ", "X").isValid(), false, 'string space'); <add> test.done(); <add> } <ide> };
1
Text
Text
fix changelog indent [ci skip]
caf2390aee4ea0f097a219ee414d705dda63cc33
<ide><path>activerecord/CHANGELOG.md <ide> <ide> * Added ActiveRecord::QueryMethods#rewhere which will overwrite an existing, named where condition. <ide> <del> Examples: <del> <del> Post.where(trashed: true).where(trashed: false) #=> WHERE `trashed` = 1 AND `trashed` = 0 <del> Post.where(trashed: true).rewhere(trashed: false) #=> WHERE `trashed` = 0 <del> Post.where(active: true).where(trashed: true).rewhere(trashed: false) #=> WHERE `active` = 1 AND `trashed` = 0 <add> Examples: <ide> <del> *DHH* <add> Post.where(trashed: true).where(trashed: false) #=> WHERE `trashed` = 1 AND `trashed` = 0 <add> Post.where(trashed: true).rewhere(trashed: false) #=> WHERE `trashed` = 0 <add> Post.where(active: true).where(trashed: true).rewhere(trashed: false) #=> WHERE `active` = 1 AND `trashed` = 0 <add> <add> *DHH* <ide> <ide> * Extend ActiveRecord::Base#cache_key to take an optional list of timestamp attributes of which the highest will be used. <ide>
1
Text
Text
use systemd drop-ins instead of copying
2d51d71561565987fc6a600234f2e2d15e0ecf31
<ide><path>docs/sources/articles/systemd.md <ide> This example overrides the default `docker.service` file. <ide> If you are behind a HTTP proxy server, for example in corporate settings, <ide> you will need to add this configuration in the Docker systemd service file. <ide> <del>Copy file `/usr/lib/systemd/system/docker.service` to `/etc/systemd/system/docker/service`. <add>First, create a systemd drop-in directory for the docker service: <ide> <del>Add the following to the `[Service]` section in the new file: <add> mkdir /etc/systemd/system/docker.service.d <ide> <add>Now create a file called `/etc/systemd/system/docker.service.d/http-proxy.conf` <add>that adds the `HTTP_PROXY` environment variable: <add> <add> [Service] <ide> Environment="HTTP_PROXY=http://proxy.example.com:80/" <ide> <ide> If you have internal Docker registries that you need to contact without
1
Javascript
Javascript
improve test case
23728e10bd23fed8bcecaf56ff5a5c030d86dfc0
<ide><path>test/WatchSuspend.test.js <ide> describe("WatchSuspend", () => { <ide> watching.resume(); <ide> }); <ide> <del> it("should not ignore changes during resumed compilation", async () => { <del> // aggregateTimeout must be long enough for this test <del> // So set-up new watcher and wait when initial compilation is done <del> await new Promise(resolve => { <del> watching.close(); <del> watching = compiler.watch({ aggregateTimeout: 1000 }, resolve); <del> }); <del> return new Promise(resolve => { <del> watching.suspend(); <del> fs.writeFileSync(filePath, "'baz'", "utf-8"); <del> <del> // Run resume between "changed" and "aggregated" events <del> setTimeout(() => { <del> watching.resume(); <del> <del> setTimeout(() => { <del> expect(fs.readFileSync(outputPath, "utf-8")).toContain("'baz'"); <del> resolve(); <del> }, 2000); <del> }, 200); <del> }); <del> }); <add> for (const changeBefore of [false, true]) <add> for (const delay of [200, 1500]) { <add> // eslint-disable-next-line no-loop-func <add> it(`should not ignore changes during resumed compilation (changeBefore: ${changeBefore}, delay: ${delay}ms)`, async () => { <add> // aggregateTimeout must be long enough for this test <add> // So set-up new watcher and wait when initial compilation is done <add> await new Promise(resolve => { <add> watching.close(() => { <add> watching = compiler.watch({ aggregateTimeout: 1000 }, () => { <add> resolve(); <add> }); <add> }); <add> }); <add> return new Promise(resolve => { <add> if (changeBefore) fs.writeFileSync(filePath, "'bar'", "utf-8"); <add> setTimeout(() => { <add> watching.suspend(); <add> fs.writeFileSync(filePath, "'baz'", "utf-8"); <add> <add> onChange = "throw"; <add> setTimeout(() => { <add> onChange = () => { <add> expect(fs.readFileSync(outputPath, "utf-8")).toContain( <add> "'baz'" <add> ); <add> expect( <add> compiler.modifiedFiles && <add> Array.from(compiler.modifiedFiles).sort() <add> ).toEqual([filePath]); <add> expect( <add> compiler.removedFiles && Array.from(compiler.removedFiles) <add> ).toEqual([]); <add> onChange = null; <add> resolve(); <add> }; <add> watching.resume(); <add> }, delay); <add> }, 200); <add> }); <add> }); <add> } <ide> <ide> it("should not drop changes when suspended", done => { <ide> const aggregateTimeout = 50;
1
Javascript
Javascript
make compilation.chunks a set
7a43a27eda7ac7c3f6f64abe7b477db7dd8c8e09
<ide><path>lib/Compilation.js <ide> class Compilation { <ide> <ide> /** @type {SyncHook} */ <ide> beforeChunks: new SyncHook([]), <del> /** @type {SyncHook<Chunk[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>>} */ <ide> afterChunks: new SyncHook(["chunks"]), <ide> <ide> /** @type {SyncBailHook<Iterable<Module>>} */ <ide> class Compilation { <ide> /** @type {SyncHook<Iterable<Module>>} */ <ide> afterOptimizeModules: new SyncHook(["modules"]), <ide> <del> /** @type {SyncBailHook<Chunk[], ChunkGroup[]>} */ <add> /** @type {SyncBailHook<Iterable<Chunk>, ChunkGroup[]>} */ <ide> optimizeChunks: new SyncBailHook(["chunks", "chunkGroups"]), <del> /** @type {SyncHook<Chunk[], ChunkGroup[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>, ChunkGroup[]>} */ <ide> afterOptimizeChunks: new SyncHook(["chunks", "chunkGroups"]), <ide> <del> /** @type {AsyncSeriesHook<Chunk[], Iterable<Module>>} */ <add> /** @type {AsyncSeriesHook<Iterable<Chunk>, Iterable<Module>>} */ <ide> optimizeTree: new AsyncSeriesHook(["chunks", "modules"]), <del> /** @type {SyncHook<Chunk[], Iterable<Module>>} */ <add> /** @type {SyncHook<Iterable<Chunk>, Iterable<Module>>} */ <ide> afterOptimizeTree: new SyncHook(["chunks", "modules"]), <ide> <del> /** @type {SyncBailHook<Chunk[], Iterable<Module>>} */ <add> /** @type {SyncBailHook<Iterable<Chunk>, Iterable<Module>>} */ <ide> optimizeChunkModules: new SyncBailHook(["chunks", "modules"]), <del> /** @type {SyncHook<Chunk[], Iterable<Module>>} */ <add> /** @type {SyncHook<Iterable<Chunk>, Iterable<Module>>} */ <ide> afterOptimizeChunkModules: new SyncHook(["chunks", "modules"]), <ide> /** @type {SyncBailHook} */ <ide> shouldRecord: new SyncBailHook([]), <ide> class Compilation { <ide> /** @type {SyncHook<Iterable<Module>>} */ <ide> afterOptimizeModuleIds: new SyncHook(["modules"]), <ide> <del> /** @type {SyncHook<Chunk[], any>} */ <add> /** @type {SyncHook<Iterable<Chunk>, any>} */ <ide> reviveChunks: new SyncHook(["chunks", "records"]), <del> /** @type {SyncHook<Chunk[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>>} */ <ide> beforeChunkIds: new SyncHook(["chunks"]), <del> /** @type {SyncHook<Chunk[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>>} */ <ide> chunkIds: new SyncHook(["chunks"]), <del> /** @type {SyncHook<Chunk[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>>} */ <ide> optimizeChunkIds: new SyncHook(["chunks"]), <del> /** @type {SyncHook<Chunk[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>>} */ <ide> afterOptimizeChunkIds: new SyncHook(["chunks"]), <ide> <ide> /** @type {SyncHook<Iterable<Module>, any>} */ <ide> recordModules: new SyncHook(["modules", "records"]), <del> /** @type {SyncHook<Chunk[], any>} */ <add> /** @type {SyncHook<Iterable<Chunk>, any>} */ <ide> recordChunks: new SyncHook(["chunks", "records"]), <ide> <ide> /** @type {SyncHook} */ <ide> class Compilation { <ide> shouldGenerateChunkAssets: new SyncBailHook([]), <ide> /** @type {SyncHook} */ <ide> beforeChunkAssets: new SyncHook([]), <del> /** @type {SyncHook<Chunk[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>>} */ <ide> additionalChunkAssets: new SyncHook(["chunks"]), <ide> <ide> /** @type {AsyncSeriesHook} */ <ide> additionalAssets: new AsyncSeriesHook([]), <del> /** @type {AsyncSeriesHook<Chunk[]>} */ <add> /** @type {AsyncSeriesHook<Iterable<Chunk>>} */ <ide> optimizeChunkAssets: new AsyncSeriesHook(["chunks"]), <del> /** @type {SyncHook<Chunk[]>} */ <add> /** @type {SyncHook<Iterable<Chunk>>} */ <ide> afterOptimizeChunkAssets: new SyncHook(["chunks"]), <ide> /** @type {AsyncSeriesHook<CompilationAssets>} */ <ide> optimizeAssets: new AsyncSeriesHook(["assets"]), <ide> class Compilation { <ide> this.entryDependencies = new Map(); <ide> /** @type {Map<string, Entrypoint>} */ <ide> this.entrypoints = new Map(); <del> /** @type {Chunk[]} */ <del> this.chunks = []; <add> /** @type {Set<Chunk>} */ <add> this.chunks = new Set(); <add> arrayToSetDeprecation(this.chunks, "Compilation.chunks"); <ide> /** @type {ChunkGroup[]} */ <ide> this.chunkGroups = []; <ide> /** @type {Map<string, ChunkGroup>} */ <ide> class Compilation { <ide> <ide> unseal() { <ide> this.hooks.unseal.call(); <del> this.chunks.length = 0; <add> this.chunks.clear(); <ide> this.chunkGroups.length = 0; <ide> this.namedChunks.clear(); <ide> this.namedChunkGroups.clear(); <ide> class Compilation { <ide> } <ide> } <ide> const chunk = new Chunk(name); <del> this.chunks.push(chunk); <add> this.chunks.add(chunk); <ide> ChunkGraph.setChunkGraphForChunk(chunk, this.chunkGraph); <ide> if (name) { <ide> this.namedChunks.set(name, chunk); <ide> class Compilation { <ide> for (const chunkGroup of allCreatedChunkGroups) { <ide> if (chunkGroup.getNumberOfParents() === 0) { <ide> for (const chunk of chunkGroup.chunks) { <del> const idx = this.chunks.indexOf(chunk); <del> if (idx >= 0) this.chunks.splice(idx, 1); <add> this.chunks.delete(chunk); <ide> chunkGraph.disconnectChunk(chunk); <ide> } <ide> chunkGraph.disconnectChunkGroup(chunkGroup); <ide> class Compilation { <ide> chunkGroup.sortItems(); <ide> } <ide> <del> this.chunks.sort(byId); <del> <ide> /** <ide> * Used to sort errors and warnings in compilation. this.warnings, and <ide> * this.errors contribute to the compilation hash and therefore should be <ide> class Compilation { <ide> ); <ide> } <ide> // clone needed as sort below is inplace mutation <del> const chunks = this.chunks.slice(); <add> const chunks = Array.from(this.chunks); <ide> /** <ide> * sort here will bring all "falsy" values to the beginning <ide> * this is needed as the "hasRuntime()" chunks are dependent on the <ide> class Compilation { <ide> const cachedSourceMap = new Map(); <ide> /** @type {Map<string, {hash: string, source: Source, chunk: Chunk}>} */ <ide> const alreadyWrittenFiles = new Map(); <del> for (let i = 0; i < this.chunks.length; i++) { <del> const chunk = this.chunks[i]; <add> for (const chunk of this.chunks) { <ide> chunk.files = []; <ide> let source; <ide> let file; <ide> class Compilation { <ide> usedIds.add(moduleId); <ide> } <ide> <del> const chunks = this.chunks; <del> for (let indexChunk = 0; indexChunk < chunks.length; indexChunk++) { <del> const chunk = chunks[indexChunk]; <del> if (chunks.indexOf(chunk) !== indexChunk) { <del> throw new Error( <del> `checkConstraints: duplicate chunk in compilation ${chunk.debugId}` <del> ); <del> } <add> for (const chunk of this.chunks) { <ide> for (const module of chunkGraph.getChunkModulesIterable(chunk)) { <ide> if (!this.modules.has(module)) { <ide> throw new Error( <ide><path>lib/HotModuleReplacementPlugin.js <ide> const Template = require("./Template"); <ide> const ConstDependency = require("./dependencies/ConstDependency"); <ide> const ModuleHotAcceptDependency = require("./dependencies/ModuleHotAcceptDependency"); <ide> const ModuleHotDeclineDependency = require("./dependencies/ModuleHotDeclineDependency"); <add>const { find } = require("./util/SetHelpers"); <ide> const { compareModulesById } = require("./util/comparators"); <ide> <ide> /** @typedef {import("./Compiler")} Compiler */ <ide> module.exports = class HotModuleReplacementPlugin { <ide> }; <ide> for (const key of Object.keys(records.chunkHashs)) { <ide> const chunkId = isNaN(+key) ? key : +key; <del> const currentChunk = compilation.chunks.find( <add> const currentChunk = find( <add> compilation.chunks, <ide> chunk => chunk.id === chunkId <ide> ); <ide> if (currentChunk) { <ide><path>lib/LibManifestPlugin.js <ide> class LibManifestPlugin { <ide> (compilation, callback) => { <ide> const moduleGraph = compilation.moduleGraph; <ide> asyncLib.forEach( <del> compilation.chunks, <add> Array.from(compilation.chunks), <ide> (chunk, callback) => { <ide> if (!chunk.isOnlyInitial()) { <ide> callback(); <ide><path>lib/Stats.js <ide> class Stats { <ide> return obj; <ide> }; <ide> if (showChunks) { <del> obj.chunks = compilation.chunks.map(chunk => { <add> obj.chunks = Array.from(compilation.chunks).map(chunk => { <ide> const parents = new Set(); <ide> const children = new Set(); <ide> const siblings = new Set(); <ide><path>lib/ids/ChunkModuleIdRangePlugin.js <ide> <ide> "use strict"; <ide> <add>const { find } = require("../util/SetHelpers"); <ide> const { <ide> compareModulesByPreOrderIndexOrIdentifier, <ide> compareModulesByPostOrderIndexOrIdentifier <ide> class ChunkModuleIdRangePlugin { <ide> const moduleGraph = compilation.moduleGraph; <ide> compilation.hooks.moduleIds.tap("ChunkModuleIdRangePlugin", modules => { <ide> const chunkGraph = compilation.chunkGraph; <del> const chunk = compilation.chunks.find( <add> const chunk = find( <add> compilation.chunks, <ide> chunk => chunk.name === options.name <ide> ); <ide> if (!chunk) { <ide><path>lib/optimize/AggressiveMergingPlugin.js <ide> class AggressiveMergingPlugin { <ide> const chunkGraph = compilation.chunkGraph; <ide> /** @type {{a: Chunk, b: Chunk, improvement: number}[]} */ <ide> let combinations = []; <del> chunks.forEach((a, idx) => { <del> if (a.canBeInitial()) return; <del> for (let i = 0; i < idx; i++) { <del> const b = chunks[i]; <add> for (const a of chunks) { <add> if (a.canBeInitial()) continue; <add> for (const b of chunks) { <ide> if (b.canBeInitial()) continue; <add> if (b === a) break; <ide> if (!chunkGraph.canChunksBeIntegrated(a, b)) { <ide> continue; <ide> } <ide> class AggressiveMergingPlugin { <ide> improvement <ide> }); <ide> } <del> }); <add> } <ide> <ide> combinations.sort((a, b) => { <ide> return b.improvement - a.improvement; <ide> class AggressiveMergingPlugin { <ide> if (pair.improvement < minSizeReduce) return; <ide> <ide> chunkGraph.integrateChunks(pair.b, pair.a); <del> chunks.splice(chunks.indexOf(pair.a), 1); <add> compilation.chunks.delete(pair.a); <ide> return true; <ide> } <ide> ); <ide><path>lib/optimize/AggressiveSplittingPlugin.js <ide> class AggressiveSplittingPlugin { <ide> // for any chunk which isn't splitted yet, split it and create a new entry <ide> // start with the biggest chunk <ide> const cmpFn = compareModulesById(chunkGraph); <del> const sortedChunks = chunks.slice().sort((a, b) => { <add> const sortedChunks = Array.from(chunks).sort((a, b) => { <ide> const diff1 = <ide> chunkGraph.getChunkModulesSize(b) - <ide> chunkGraph.getChunkModulesSize(a); <ide><path>lib/optimize/LimitChunkCountPlugin.js <ide> class LimitChunkCountPlugin { <ide> const maxChunks = options.maxChunks; <ide> if (!maxChunks) return; <ide> if (maxChunks < 1) return; <del> if (chunks.length <= maxChunks) return; <add> if (compilation.chunks.size <= maxChunks) return; <ide> <del> const sortedExtendedPairCombinations = chunks <add> const sortedExtendedPairCombinations = Array.from(chunks) <ide> .reduce((/** @type {[Chunk, Chunk][]} */ combinations, a, idx) => { <ide> // create combination pairs <del> for (let i = 0; i < idx; i++) { <del> const b = chunks[i]; <add> for (const b of chunks) { <add> if (b === a) break; <ide> // filter pairs that can NOT be integrated! <ide> if (chunkGraph.canChunksBeIntegrated(b, a)) { <ide> combinations.push([b, a]); <ide> class LimitChunkCountPlugin { <ide> <ide> if (pair) { <ide> chunkGraph.integrateChunks(pair[2], pair[3]); <del> chunks.splice(chunks.indexOf(pair[3]), 1); <add> compilation.chunks.delete(pair[3]); <ide> return true; <ide> } <ide> } <ide><path>lib/optimize/MergeDuplicateChunksPlugin.js <ide> class MergeDuplicateChunksPlugin { <ide> // merge them <ide> if (chunkGraph.canChunksBeIntegrated(chunk, otherChunk)) { <ide> chunkGraph.integrateChunks(chunk, otherChunk); <del> chunks.splice(chunks.indexOf(otherChunk), 1); <add> compilation.chunks.delete(otherChunk); <ide> } <ide> } <ide> } <ide><path>lib/optimize/MinChunkSizePlugin.js <ide> class MinChunkSizePlugin { <ide> entryChunkMultiplicator: 1 <ide> }; <ide> <del> const sortedSizeFilteredExtendedPairCombinations = chunks <add> const sortedSizeFilteredExtendedPairCombinations = Array.from(chunks) <ide> .reduce((/** @type {[Chunk, Chunk][]} */ combinations, a, idx) => { <ide> // create combination pairs <del> for (let i = 0; i < idx; i++) { <del> const b = chunks[i]; <add> for (const b of chunks) { <add> if (b === a) break; <ide> combinations.push([b, a]); <ide> } <ide> return combinations; <ide> class MinChunkSizePlugin { <ide> const pair = sortedSizeFilteredExtendedPairCombinations[0]; <ide> <ide> chunkGraph.integrateChunks(pair[2], pair[3]); <del> chunks.splice(chunks.indexOf(pair[3]), 1); <add> compilation.chunks.delete(pair[3]); <ide> return true; <ide> } <ide> ); <ide><path>lib/optimize/RemoveEmptyChunksPlugin.js <ide> class RemoveEmptyChunksPlugin { <ide> apply(compiler) { <ide> compiler.hooks.compilation.tap("RemoveEmptyChunksPlugin", compilation => { <ide> /** <del> * @param {Chunk[]} chunks the chunks array <add> * @param {Iterable<Chunk>} chunks the chunks array <ide> * @returns {void} <ide> */ <ide> const handler = chunks => { <ide> const chunkGraph = compilation.chunkGraph; <del> for (let i = chunks.length - 1; i >= 0; i--) { <del> const chunk = chunks[i]; <add> for (const chunk of chunks) { <ide> if ( <ide> chunkGraph.getNumberOfChunkModules(chunk) === 0 && <ide> !chunk.hasRuntime() && <ide> chunkGraph.getNumberOfEntryModules(chunk) === 0 <ide> ) { <ide> compilation.chunkGraph.disconnectChunk(chunk); <del> chunks.splice(i, 1); <add> compilation.chunks.delete(chunk); <ide> } <ide> } <ide> }; <ide><path>lib/optimize/SplitChunksPlugin.js <ide> module.exports = class SplitChunksPlugin { <ide> } <ide> <ide> // Make sure that maxSize is fulfilled <del> for (const chunk of compilation.chunks.slice()) { <add> for (const chunk of Array.from(compilation.chunks)) { <ide> const { minSize, maxSize, automaticNameDelimiter } = <ide> maxSizeQueueMap.get(chunk) || this.options.fallbackCacheGroup; <ide> if (!maxSize) continue; <ide><path>lib/performance/SizeLimitsPlugin.js <ide> <ide> "use strict"; <ide> <add>const { find } = require("../util/SetHelpers"); <ide> const AssetsOverSizeLimitWarning = require("./AssetsOverSizeLimitWarning"); <ide> const EntrypointsOverSizeLimitWarning = require("./EntrypointsOverSizeLimitWarning"); <ide> const NoAsyncChunksWarning = require("./NoAsyncChunksWarning"); <ide> module.exports = class SizeLimitsPlugin { <ide> } <ide> <ide> if (warnings.length > 0) { <del> const hasAsyncChunks = <del> compilation.chunks.filter(chunk => !chunk.canBeInitial()).length > <del> 0; <add> const someAsyncChunk = find( <add> compilation.chunks, <add> chunk => !chunk.canBeInitial() <add> ); <ide> <del> if (!hasAsyncChunks) { <add> if (!someAsyncChunk) { <ide> warnings.push(new NoAsyncChunksWarning()); <ide> } <ide> <ide><path>lib/util/SetHelpers.js <ide> const isSubset = (bigSet, smallSet) => { <ide> return true; <ide> }; <ide> <add>/** <add> * @template T <add> * @param {Set<T>} set a set <add> * @param {function(T): boolean} fn selector function <add> * @returns {T | undefined} found item <add> */ <add>const find = (set, fn) => { <add> for (const item of set) { <add> if (fn(item)) return item; <add> } <add>}; <add> <ide> exports.intersect = intersect; <ide> exports.isSubset = isSubset; <add>exports.find = find;
14
Go
Go
initialize the daemon pidfile early
848e837698922fed0fa67930aa0b56a96b1b832d
<ide><path>builtins/builtins.go <ide> func remote(eng *engine.Engine) error { <ide> // These components should be broken off into plugins of their own. <ide> // <ide> func daemon(eng *engine.Engine) error { <add> if err := eng.Register("initserverpidfile", server.InitPidfile); err != nil { <add> return err <add> } <ide> if err := eng.Register("initserver", server.InitServer); err != nil { <ide> return err <ide> } <ide><path>docker/docker.go <ide> func main() { <ide> if err := builtins.Register(eng); err != nil { <ide> log.Fatal(err) <ide> } <add> <add> // handle the pidfile early. https://github.com/dotcloud/docker/issues/6973 <add> if len(*pidfile) > 0 { <add> job := eng.Job("initserverpidfile", *pidfile) <add> if err := job.Run(); err != nil { <add> log.Fatal(err) <add> } <add> } <add> <ide> // load the daemon in the background so we can immediately start <ide> // the http api so that connections don't fail while the daemon <ide> // is booting <ide> go func() { <ide> // Load plugin: httpapi <ide> job := eng.Job("initserver") <add> // include the variable here too, for the server config <ide> job.Setenv("Pidfile", *pidfile) <ide> job.Setenv("Root", realRoot) <ide> job.SetenvBool("AutoRestart", *flAutoRestart) <ide><path>server/server.go <ide> func (srv *Server) handlerWrap(h engine.Handler) engine.Handler { <ide> } <ide> } <ide> <add>func InitPidfile(job *engine.Job) engine.Status { <add> if len(job.Args) == 0 { <add> return job.Error(fmt.Errorf("no pidfile provided to initialize")) <add> } <add> job.Logf("Creating pidfile") <add> if err := utils.CreatePidFile(job.Args[0]); err != nil { <add> return job.Error(err) <add> } <add> return engine.StatusOK <add>} <add> <ide> // jobInitApi runs the remote api server `srv` as a daemon, <ide> // Only one api server can run at the same time - this is enforced by a pidfile. <ide> // The signals SIGINT, SIGQUIT and SIGTERM are intercepted for cleanup. <ide> func InitServer(job *engine.Job) engine.Status { <ide> if err != nil { <ide> return job.Error(err) <ide> } <del> if srv.daemon.Config().Pidfile != "" { <del> job.Logf("Creating pidfile") <del> if err := utils.CreatePidFile(srv.daemon.Config().Pidfile); err != nil { <del> // FIXME: do we need fatal here instead of returning a job error? <del> log.Fatal(err) <del> } <del> } <ide> job.Logf("Setting up signal traps") <ide> c := make(chan os.Signal, 1) <ide> gosignal.Notify(c, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT)
3
PHP
PHP
fix validation with multiple passes() calls
0e52e470933b02163519ce96fce15711ba164ae1
<ide><path>src/Illuminate/Validation/Validator.php <ide> public function passes() <ide> <ide> $this->distinctValues = []; <ide> <add> $this->failedRules = []; <add> <ide> // We'll spin through each rule, validating the attributes attached to that <ide> // rule. Any error messages will be added to the containers with each of <ide> // the other error messages, returning true if we don't have messages. <ide><path>tests/Validation/ValidationValidatorTest.php <ide> public function testValidatedNotValidateTwiceData() <ide> $this->assertEquals(1, $validateCount); <ide> } <ide> <add> public function testMultiplePassesCalls() <add> { <add> $trans = $this->getIlluminateArrayTranslator(); <add> $v = new Validator($trans, [], ['foo' => 'string|required']); <add> $this->assertFalse($v->passes()); <add> $this->assertFalse($v->passes()); <add> } <add> <ide> /** <ide> * @dataProvider validUuidList <ide> */
2
Ruby
Ruby
fix word case
dbccaff23e605a3ab0bacb2433cc6f7567405fda
<ide><path>activesupport/lib/active_support/testing/isolation.rb <ide> def run_in_isolation(&blk) <ide> module Subprocess <ide> ORIG_ARGV = ARGV.dup unless defined?(ORIG_ARGV) <ide> <del> # Complicated H4X to get this working in windows / jruby with <add> # Complicated H4X to get this working in Windows / JRuby with <ide> # no forking. <ide> def run_in_isolation(&blk) <ide> require "tempfile"
1
Python
Python
add shuffle to dataset records
733a752dc9b0a1203b40c451c8f42136efe5b2d8
<ide><path>official/transformer/v2/data_pipeline.py <ide> def _read_and_batch_from_files( <ide> <ide> # Remove examples where the input or target length exceeds the maximum length, <ide> dataset = dataset.filter(lambda x, y: _filter_max_length((x, y), max_length)) <del> <add> <add> if shuffle: <add> dataset = dataset.shuffle(20000) <add> <ide> if static_batch: <ide> dataset = dataset.padded_batch( <ide> batch_size // max_length, ([max_length], [max_length]),
1
Javascript
Javascript
increase time for db connect in production
877c69255be4264d127cd5d51707126db4532c02
<ide><path>server/production-start.js <ide> var timeoutHandler = setTimeout(function() { <ide> // purposely shutdown server <ide> // pm2 should restart this in production <ide> throw new Error(message); <del>}, 5000); <add>}, 15000); <ide> <ide> app.dataSources.db.on('connected', onConnect);
1
Text
Text
add changelog entry for &
71a83a9cfde71293abcebd98e874f317aa83160e
<ide><path>railties/CHANGELOG.md <ide> ## Rails 4.0.0 (unreleased) ## <ide> <add>* Improved `rake routes` output for redirects *Łukasz Strzałkowski & Andrew White* <add> <ide> * Load all environments available in `config.paths["config/environments"]`. *Piotr Sarnacki* <ide> <ide> * The application generator generates `public/humans.txt` with some basic data. *Paul Campbell*
1
Javascript
Javascript
add missing closing parentheses in code example
b1366c32d4742acd66ebc6ed1e3cb3954c50b558
<ide><path>src/ng/q.js <ide> * // Propagate promise resolution to 'then' functions using $apply(). <ide> * $rootScope.$apply(); <ide> * expect(resolvedValue).toEqual(123); <del> * }); <add> * })); <ide> * </pre> <ide> */ <ide> function $QProvider() {
1
Text
Text
add changelog entry for
c8e92f174d9190fe4cb38199bc2e59d62ace0003
<ide><path>activerecord/CHANGELOG.md <add>* Raise `ActiveRecord::InvalidForeignKey` when a foreign key constraint fails on Sqlite3. <add> <add> *Ryuta Kamizono* <add> <ide> * Add the touch option to ActiveRecord#increment! and decrement! <ide> <ide> *Hiroaki Izu*
1
Text
Text
prettify docs for next script
650ebed77e9f08f1a459bbfe30593bd4f1c87865
<ide><path>docs/basic-features/script.md <ide> Previously, you needed to define `script` tags inside the `Head` of your Next.js <ide> // pages/index.js <ide> import Head from 'next/head' <ide> <del>function Home() { <add>export default function Home() { <ide> return ( <ide> <> <ide> <Head> <ide> With `next/script`, you no longer need to wrap scripts in `next/head`. Further, <ide> // pages/index.js <ide> import Script from 'next/script' <ide> <del>function Home() { <add>export default function Home() { <ide> return ( <ide> <> <ide> <Script src="https://www.google-analytics.com/analytics.js" /> <ide> function Home() { <ide> <ide> ```js <ide> import Script from 'next/script' <del>;<Script <del> src="https://polyfill.io/v3/polyfill.min.js?features=IntersectionObserverEntry%2CIntersectionObserver" <del> strategy="beforeInteractive" <del>/> <add> <add>export default function Home() { <add> return ( <add> <> <add> <Script <add> src="https://polyfill.io/v3/polyfill.min.js?features=IntersectionObserverEntry%2CIntersectionObserver" <add> strategy="beforeInteractive" <add> /> <add> </> <add> ) <add>} <ide> ``` <ide> <ide> ### Lazy-Loading <ide> <ide> ```js <ide> import Script from 'next/script' <del>;<Script <del> src="https://connect.facebook.net/en_US/sdk.js" <del> strategy="lazyOnload" <del>/> <add> <add>export default function Home() { <add> return ( <add> <> <add> <Script <add> src="https://connect.facebook.net/en_US/sdk.js" <add> strategy="lazyOnload" <add> /> <add> </> <add> ) <add>} <ide> ``` <ide> <ide> ### Executing Code After Loading (`onLoad`) <ide> <ide> ```js <ide> import Script from 'next/script' <del>;<Script <del> id="stripe-js" <del> src="https://js.stripe.com/v3/" <del> onLoad={() => { <del> this.setState({ stripe: window.Stripe('pk_test_12345') }) <del> }} <del>/> <add> <add>export default function Home() { <add> return ( <add> <> <add> <Script <add> id="stripe-js" <add> src="https://js.stripe.com/v3/" <add> onLoad={() => { <add> this.setState({ stripe: window.Stripe('pk_test_12345') }) <add> }} <add> /> <add> </> <add> ) <add>} <ide> ``` <ide> <ide> ### Inline Scripts <ide> import Script from 'next/script' <ide> <ide> ```js <ide> import Script from 'next/script' <del>;<Script <del> src="https://www.google-analytics.com/analytics.js" <del> id="analytics" <del> nonce="XUENAJFW" <del> data-test="analytics" <del>/> <add> <add>export default function Home() { <add> return ( <add> <> <add> <Script <add> src="https://www.google-analytics.com/analytics.js" <add> id="analytics" <add> nonce="XUENAJFW" <add> data-test="analytics" <add> /> <add> </> <add> ) <add>} <ide> ```
1
Text
Text
fix image urls
21cfe11edbe9ae70d8b5a7b9268e3bd48941a17e
<ide><path>docs/russian/how-to-catch-outgoing-emails-locally.md <ide> mailhog <ide> <ide> Теперь вы должны увидеть экран, как показано ниже: <ide> <del>![MailHog Скриншот 1](../images/mailhog/1.jpg) <add>![MailHog Скриншот 1](/docs/images/mailhog/1.jpg) <ide> <ide> Когда ваша freeCodeCamp сборка отправит электронное письмо, вы увидите его на экране, как показано ниже: <ide> <del>![MailHog Скриншот 2](../images/mailhog/2.jpg) <add>![MailHog Скриншот 2](/docs/images/mailhog/2.jpg) <ide> <ide> Откройте почту, и вы увидите две вкладки, где вы можете просмотреть содержимое: обычный текст и источник. Убедитесь, что вы находитесь на вкладке обычного текста. <ide> <del>![MailHog Скриншот 3](../images/mailhog/3.jpg) <add>![MailHog Скриншот 3](/docs/images/mailhog/3.jpg) <ide> <ide> Любые ссылки в письме так же должны быть доступны для просмотра. <ide>
1
Text
Text
unify pg spelling in changelogs. [ci skip]
fda7bc36e2df0d2ff20d770150cfe775b2ac8c22
<ide><path>activerecord/CHANGELOG.md <ide> <ide> *Kenn Ejima* <ide> <del>* Add support for Postgresql JSONB. <add>* Add support for PostgreSQL JSONB. <ide> <ide> Example: <ide>
1
Text
Text
use less specfic language in ultajson notes
128bda5712ef041514c5e2feadef0ad248f33f54
<ide><path>docs/api-guide/renderers.md <ide> Comma-separated values are a plain-text tabular data format, that can be easily <ide> <ide> ## UltraJSON <ide> <del>[UltraJSON][ultrajson] is a blazing-fast C JSON encoder which can give 2-10x performance increases on typical workloads. [Jacob Haslehurst][hzy] maintains the [drf-ujson-renderer][drf-ujson-renderer] package which implements JSON rendering using the UJSON package. <add>[UltraJSON][ultrajson] is an optimized C JSON encoder which can give significantly faster JSON rendering. [Jacob Haslehurst][hzy] maintains the [drf-ujson-renderer][drf-ujson-renderer] package which implements JSON rendering using the UJSON package. <ide> <ide> [cite]: https://docs.djangoproject.com/en/dev/ref/template-response/#the-rendering-process <ide> [conneg]: content-negotiation.md
1
Ruby
Ruby
reuse path formatter from the non-optimized path
4d1b3a13129eb7d5fdb1564706acbcd25a5ffa9b
<ide><path>actionpack/lib/action_dispatch/journey/route.rb <ide> def format(path_options) <ide> @path_formatter.evaluate path_options <ide> end <ide> <del> def optimized_path <del> Visitors::OptimizedPath.new.accept(path.spec) <del> end <del> <ide> def optional_parts <ide> path.optional_names.map { |n| n.to_sym } <ide> end <ide><path>actionpack/lib/action_dispatch/journey/visitors.rb <ide> def evaluate(hash) <ide> <ide> @parameters.each do |index| <ide> param = parts[index] <del> value = hash.fetch(param.name) { return ''.freeze } <add> value = hash[param.name] <add> return ''.freeze unless value <ide> parts[index] = param.escape value <ide> end <ide> <ide> def visit_GROUP(node) <ide> end <ide> end <ide> <del> class OptimizedPath < Visitor # :nodoc: <del> def accept(node) <del> Array(visit(node)) <del> end <del> <del> private <del> <del> def visit_CAT(node) <del> [visit(node.left), visit(node.right)].flatten <del> end <del> <del> def visit_SYMBOL(node) <del> node.left[1..-1].to_sym <del> end <del> <del> def visit_STAR(node) <del> visit(node.left) <del> end <del> <del> def visit_GROUP(node) <del> [] <del> end <del> <del> %w{ LITERAL SLASH DOT }.each do |t| <del> class_eval %{ def visit_#{t}(n); n.left; end }, __FILE__, __LINE__ <del> end <del> end <del> <ide> class Dot < Visitor # :nodoc: <ide> def initialize <ide> @nodes = [] <ide><path>actionpack/lib/action_dispatch/routing/route_set.rb <ide> def initialize(route, options) <ide> @klass = Journey::Router::Utils <ide> @required_parts = @route.required_parts <ide> @arg_size = @required_parts.size <del> @optimized_path = @route.optimized_path <ide> end <ide> <ide> def call(t, args) <ide> def optimized_helper(args) <ide> raise_generation_error(params, missing_keys) <ide> end <ide> <del> @optimized_path.map{ |segment| replace_segment(params, segment) }.join <del> end <del> <del> def replace_segment(params, segment) <del> Symbol === segment ? @klass.escape_segment(params[segment]) : segment <add> @route.format params <ide> end <ide> <ide> def optimize_routes_generation?(t)
3
Python
Python
add portuguese stopwords
63024466a95293ad96b7c981a7a6a9fc3ade73c4
<ide><path>spacy/pt/language_data.py <ide> def strings_to_exc(orths): <ide> } <ide> <ide> STOP_WORDS = set(""" <add>à às acerca adeus agora ainda algmas algo algumas alguns ali além ambos ano <add>anos antes ao aos apenas apoio apontar após aquela aquelas aquele aqueles aqui <add>aquilo area área as assim através atrás até aí <ide> <add>baixo bastante bem bom breve <add> <add>cada caminho catorze cedo cento certamente certeza cima cinco coisa com como <add>comprido conhecido conselho contra corrente custa cá <add> <add>da daquela daquele dar das de debaixo demais dentro depois desde desligado <add>dessa desse desta deste deve devem deverá dez dezanove dezasseis dezassete <add>dezoito dia diante direita diz dizem dizer do dois dos doze duas dá dão dúvida <add> <add>é ela elas ele eles em embora enquanto entre então era és essa essas esse esses <add>esta estado estar estará estas estava este estes esteve estive estivemos <add>estiveram estiveste estivestes estou está estás estão eu exemplo <add> <add>falta fará favor faz fazeis fazem fazemos fazer fazes fazia faço fez fim final <add>foi fomos for fora foram forma foste fostes fui <add> <add>geral grande grandes grupo <add> <add>hoje horas há <add> <add>iniciar inicio ir irá isso ista iste isto já <add> <add>lado ligado local logo longe lugar lá <add> <add>maior maioria maiorias mais mal mas me meio menor menos meses mesmo meu meus <add>mil minha minhas momento muito muitos máximo mês <add> <add>na nada naquela naquele nas nem nenhuma nessa nesse nesta neste no noite nome <add>nos nossa nossas nosso nossos nova nove novo novos num numa nunca não nível nós <add>número <add> <add>obra obrigada obrigado oitava oitavo oito onde ontem onze os ou outra outras <add>outro outros <add> <add>para parece parte partir pegar pela pelas pelo pelos perto pessoas pode podem <add>poder poderá podia ponto pontos por porque porquê posição possivelmente posso <add>possível pouca pouco povo primeira primeiro promeiro próprio próximo puderam <add>pôde põe põem <add> <add>qual qualquer quando quanto quarta quarto quatro que quem quer quero questão <add>quieto quinta quinto quinze quê relação <add> <add>sabe saber se segunda segundo sei seis sem sempre ser seria sete seu seus sexta <add>sexto sim sistema sob sobre sois somente somos sou sua suas são sétima sétimo <add> <add>tal talvez também tanto tarde te tem temos tempo tendes tenho tens tentar <add>tentaram tente tentei ter terceira terceiro teu teus teve tipo tive tivemos <add>tiveram tiveste tivestes toda todas todo todos trabalhar trabalho treze três tu <add>tua tuas tudo tão têm <add> <add>último um uma umas uns usa usar <add> <add>vai vais valor veja vem vens ver verdade verdadeiro vez vezes viagem vindo <add>vinte você vocês vos vossa vossas vosso vossos vários vão vêm vós <add> <add>zero <ide> """.split()) <ide> <ide>
1
Go
Go
move daemon check back centrally
62a75fca689d235276a6aeaa91a5230de522a3f1
<ide><path>daemon/daemon.go <ide> var ( <ide> validContainerNameChars = `[a-zA-Z0-9][a-zA-Z0-9_.-]` <ide> validContainerNamePattern = regexp.MustCompile(`^/?` + validContainerNameChars + `+$`) <ide> <del> // TODO Windows. Change this once daemon is up and running. <del> ErrSystemNotSupported = errors.New("The Docker daemon is only supported on linux") <add> ErrSystemNotSupported = errors.New("The Docker daemon is not supported on this platform.") <ide> ) <ide> <ide> type contStore struct { <ide> func NewDaemon(config *Config, registryService *registry.Service) (daemon *Daemo <ide> // Do we have a disabled network? <ide> config.DisableBridge = isBridgeNetworkDisabled(config) <ide> <del> // Check that the system is supported and we have sufficient privileges <add> // Verify the platform is supported as a daemon <add> if runtime.GOOS != "linux" && runtime.GOOS != "windows" { <add> return nil, ErrSystemNotSupported <add> } <add> <add> // Validate platform-specific requirements <ide> if err := checkSystem(); err != nil { <ide> return nil, err <ide> } <ide><path>daemon/daemon_unix.go <ide> import ( <ide> "net/http" <ide> "os" <ide> "path/filepath" <del> "runtime" <ide> "strings" <ide> "syscall" <ide> <ide> func checkConfigOptions(config *Config) error { <ide> return nil <ide> } <ide> <del>// checkSystem validates the system is supported and we have sufficient privileges <add>// checkSystem validates platform-specific requirements <ide> func checkSystem() error { <del> // TODO Windows. Once daemon is running on Windows, move this code back to <del> // NewDaemon() in daemon.go, and extend the check to support Windows. <del> if runtime.GOOS != "linux" { <del> return ErrSystemNotSupported <del> } <ide> if os.Geteuid() != 0 { <ide> return fmt.Errorf("The Docker daemon needs to be run as root") <ide> } <ide><path>daemon/daemon_windows.go <ide> package daemon <ide> import ( <ide> "fmt" <ide> "os" <del> "runtime" <ide> "syscall" <ide> <ide> "github.com/Sirupsen/logrus" <ide> func checkConfigOptions(config *Config) error { <ide> return nil <ide> } <ide> <del>// checkSystem validates the system is supported and we have sufficient privileges <add>// checkSystem validates platform-specific requirements <ide> func checkSystem() error { <ide> var dwVersion uint32 <ide> <del> // TODO Windows. Once daemon is running on Windows, move this code back to <del> // NewDaemon() in daemon.go, and extend the check to support Windows. <del> if runtime.GOOS != "linux" && runtime.GOOS != "windows" { <del> return ErrSystemNotSupported <del> } <del> <ide> // TODO Windows. May need at some point to ensure have elevation and <ide> // possibly LocalSystem. <ide>
3
PHP
PHP
fix bug with has events
c5a62901589e313474376e241549ac31d9d3c695
<ide><path>src/Illuminate/Database/Eloquent/Concerns/HasEvents.php <ide> protected function fireModelEvent($event, $halt = true) <ide> <ide> $result = $this->fireCustomModelEvent($event, $method); <ide> <del> return ! is_null($result) ? $result : static::$dispatcher->{$method}( <add> if ($result === false) { <add> return false; <add> } <add> <add> return ! empty($result) ? $result : static::$dispatcher->{$method}( <ide> "eloquent.{$event}: ".static::class, $this <ide> ); <ide> }
1
Text
Text
add section about updating specs
6520587ba82163c15cf06784452ce474de81ad4d
<ide><path>docs/upgrading/upgrading-your-package.md <ide> class CommandPaletteView extends SelectListView <ide> * And check out the [conversion of CommandPaletteView][selectlistview-example] as a real-world example. <ide> * See the [SelectListView docs][SelectListView] for all options. <ide> <del>## Specs <add>## Updating Specs <ide> <del>TODO: come up with patterns for converting away from using `workspaceView` and `editorView`s everywhere. <add>`WorkspaceView` and `EditorView` have been deprecated. These two objects are used heavily throughout specs, mostly to dispatch events and commands. This section will explain how to remove them while still retaining the ability to dispatch events and commands. <add> <add>### Removing WorkspaceView references <add> <add>`WorkspaceView` has been deprecated. Everything you could do on the view, you can now do on the `Workspace` model. <add> <add>Requiring `WorkspaceView` from `atom` and accessing any methods on it will throw a deprecation warning. Many specs lean heavily on `WorkspaceView` to trigger commands and fetch `EditorView` objects. <add> <add>Your specs might contain something like this: <add> <add>```coffee <add># Old! <add>{WorkspaceView} = require 'atom' <add>describe 'FindView', -> <add> beforeEach -> <add> atom.workspaceView = new WorkspaceView() <add>``` <add> <add>Instead, we will use the `atom.views.getView()` method. This will return a plain `HTMLElement`, not a `WorkspaceView` or jQuery object. <add> <add>```coffee <add># New! <add>describe 'FindView', -> <add> workspaceElement = null <add> beforeEach -> <add> workspaceElement = atom.views.getView(atom.workspace) <add>``` <add> <add>### Attaching the workspace to the DOM <add> <add>The workspace needs to be attached to the DOM in some cases. For example, view hooks only work (`attached()` on `View`, `attachedCallback()` on custom elements) when there is a descendant attached to the DOM. <add> <add>You might see this in your specs: <add> <add>```coffee <add># Old! <add>atom.workspaceView.attachToDom() <add>``` <add> <add>Change it to: <add> <add>```coffee <add># New! <add>jasmine.attachToDOM(workspaceElement) <add>``` <add> <add>### Removing EditorView references <add> <add>Like `WorkspaceView`, `EditorView` has been deprecated. Everything you needed to do on the view you are now able to do on the `Editor` model. <add> <add>In many cases, you will not even need to get the editor's view anymore. Any of those instances should be updated to use the `Editor` instance. You should really only need the editor's view when you plan on triggering a command on the view in a spec. <add> <add>Your specs might contain something like this: <add> <add>```coffee <add># Old! <add>describe 'Something', -> <add> [editorView] = [] <add> beforeEach -> <add> editorView = atom.workspaceView.getActiveView() <add>``` <add> <add>We're going to use `atom.views.getView()` again to get the editor element. As in the case of the `workspaceElement`, `getView` will return a plain `HTMLElement` rather than an `EditorView` or jQuery object. <add> <add>```coffee <add># New! <add>describe 'Something', -> <add> [editor, editorElement] = [] <add> beforeEach -> <add> editor = atom.workspace.getActiveTextEditor() <add> editorElement = atom.views.getView(editor) <add>``` <add> <add>### Dispatching commands <add> <add>Since the `editorView` objects are no longer `jQuery` objects, they no longer support `trigger()`. Additionally, Atom has a new command dispatcher, `atom.commands`, that we use rather than commandeering jQuery's `trigger` method. <add> <add>From this: <add> <add>```coffee <add># Old! <add>workspaceView.trigger 'a-package:toggle' <add>editorView.trigger 'find-and-replace:show' <add>``` <add> <add>To this: <add> <add>```coffee <add># New! <add>atom.commands.dispatch workspaceElement, 'a-package:toggle' <add>atom.commands.dispatch editorElement, 'find-and-replace:show' <add>``` <ide> <ide> <ide> [texteditorview]:https://github.com/atom/atom-space-pen-views#texteditorview
1
Python
Python
convert linode create_node to options
b8cb63093b22e6bd2863ca0b8032f5aa83a7d3a5
<ide><path>libcloud/drivers/linode.py <ide> def destroy_node(self, node): <ide> self.connection.request(LINODE_ROOT, params=params) <ide> return True <ide> <del> def create_node(self, name, image, size, **kwargs): <add> def create_node(self, options, **kwargs): <ide> # Create <ide> # <ide> # Creates a Linode instance. <ide> def create_node(self, name, image, size, **kwargs): <ide> # <ide> # Please note that for safety, only 5 Linodes can be created per hour. <ide> <del> # Step -1: Do the datacenter logic <del> fail = LinodeException(0xFC, <del> "Can't pick DC; choose a datacenter with linode_set_datacenter()") <del> if not self.datacenter: <del> # Okay, one has not been chosen. We need to determine. <del> nodes = self.list_nodes() <del> num = len(nodes) <del> if num == 0: <del> # Won't assume where to deploy the first one. <del> # FIXME: Maybe we should? <del> raise fail <del> else: <del> # One or more nodes, so create the next one there. <del> chosen = nodes[0].extra["DATACENTERID"] <del> for node in nodes[1:]: <del> # Check to make sure they're all the same <del> if chosen != node.extra["DATACENTERID"]: <del> raise fail <del> else: <del> # linode_set_datacenter() was used, cool. <del> chosen = self.datacenter <add> chosen = options.location.id <add> image = options.image <add> size = options.size <ide> <ide> # Step 0: Parameter validation before we purchase <ide> # We're especially careful here so we don't fail after purchase, rather
1
Text
Text
add notes for react native monthly #5
3c5a55ddc21197cfa013dc6222e398138759b5d3
<ide><path>blog/2017-11-06-react-native-monthly-5.md <add>--- <add>title: React Native Monthly #5 <add>author: Tomislav Tenodi <add>authorTitle: Founder at Speck <add>authorURL: https://github.com/tenodi <add>authorImage: https://pbs.twimg.com/profile_images/877237660225609729/bKFDwfAq.jpg <add>authorTwitter: TomislavTenodi <add>category: engineering <add>--- <add> <add>The React Native monthly meeting continues! Let's see what our teams are up to. <add> <add>### Callstack <add> <add>- We’ve been working on React Native CI. Most importantly, we have migrated from Travis to Circle, leaving React Native with a single, unified CI pipeline. <add>- We’ve organised [Hacktoberfest - React Native edition](https://blog.callstack.io/announcing-hacktoberfest-7313ea5ccf4f) where, together with attendees, we tried to submit many pull requests to open source projects. <add>- We keep working on [Haul](https://github.com/callstack/haul). Last month, we have submitted two new releases, including Webpack 3 support. We plan to add [CRNA](https://github.com/react-community/create-react-native-app) and [Expo](https://github.com/expo/expo) support as well as work on better HMR. Our roadmap is public on the issue tracker. If you would like to suggest improvements or give feedback, let us know! <add> <add>### Expo <add> <add>- Released [Expo SDK 22](https://blog.expo.io/expo-sdk-v22-0-0-is-now-available-7745bfe97fc6) (using React Native 0.49) and updated [CRNA](https://github.com/react-community/create-react-native-app) for it. <add> - Includes improved splash screen API, basic ARKit support, “DeviceMotion” API, SFAuthenticationSession support on iOS11, and [more](https://blog.expo.io/expo-sdk-v22-0-0-is-now-available-7745bfe97fc6). <add>- Your [snacks](https://snack.expo.io) can now have multiple JavaScript files and you can upload images and other assets by just dragging them into the editor. <add>- Contribute to [react-navigation](https://github.com/react-community/react-navigation) to add support for iPhone X. <add>- Focus our attention on rough edges when building large applications with Expo. For example: <add> - First-class support for deploying to multiple environments: staging, production, and arbitrary channels. Channels will support rolling back and setting the active release for a given channel. Let us know if you want to be an early tester, [@expo_io](https://twitter.com/expo_io). <add> - We are also working on improving our standalone app building infrastructure and adding support for bundling images and other non-code assets in standalone app builds while keeping the ability to update assets over the air. <add> <add>### Facebook <add> <add>- Better RTL support: <add> - We’re introducing a number of direction-aware styles. <add> - Position: <add> - (left|right) → (start|end) <add> - Margin: <add> - margin(Left|Right) → margin(Start|End) <add> - Padding: <add> - padding(Left|Right) → padding(Start|End) <add> - Border: <add> - borderTop(Left|Right)Radius → borderTop(Start|End)Radius <add> - borderBottom(Left|Right)Radius → borderBottom(Start|End)Radius <add> - border(Left|Right)Width → border(Start|End)Width <add> - border(Left|Right)Color → border(Start|End)Color <add> - The meaning of “left” and “right” were swapped in RTL for position, margin, padding, and border styles. Within a few months, we’re going to remove this behaviour and make “left” always mean “left,” and “right” always mean “right”. The breaking changes are hidden under a flag. Use `I18nManager.swapLeftAndRightInRTL(false)` in your React Native components to opt into them. <add>- Working on [Flow](https://github.com/facebook/flow) typing our internal native modules and using those to generate interfaces in Java and protocols in ObjC that the native implementations must implement. We hope this codegen becomes open source next year, at the earliest. <add> <add> <add>### Infinite Red <add> <add>- New OSS tool for helping React Native and other projects. More [here](https://shift.infinite.red/solidarity-the-cli-for-developer-sanity-672fa81b98e9). <add>- Revamping [Ignite](https://github.com/infinitered/ignite) for a new boilerplate release (Code name: Bowser) <add> <add>### Shoutem <add> <add>- Improving the development flow on Shoutem. We want to streamline the process from creating an app to first custom screen and make it really easy, thus lowering the barrier for new React Native developers. Prepared a few workshops to test out new features. We also improved [Shoutem CLI](https://github.com/shoutem/cli) to support new flows. <add>- [Shoutem UI](https://github.com/shoutem/ui) received a few component improvements and bugfixes. We also checked compatibility with latest React Native versions. <add>- Shoutem platform received a few notable updates, new integrations are available as part of the [open-source extensions project](https://github.com/shoutem/extensions). We are really excited to see active development on Shoutem extensions from other developers. We actively contact and offer advice and guidance about their extensions. <add> <add>## Next session <add> <add>The next session is scheduled for Wednesday 6, December 2017. Feel free to ping me [on Twitter](https://twitter.com/TomislavTenodi) if you have any suggestion on how we should improve the output of the meeting.
1
Text
Text
add v3.3.2 to changelog
26bf0d2b1a163ed7afe995dd6d318b75e8c96397
<ide><path>CHANGELOG.md <ide> - [#16794](https://github.com/emberjs/ember.js/pull/16794) [BUGFIX] Fix instance-initializer-test blueprint for new QUnit testing API ([emberjs/rfcs#232](https://github.com/emberjs/rfcs/pull/232)) <ide> - [#16797](https://github.com/emberjs/ember.js/pull/16797) [BUGFIX] Drop autorun assertion <ide> <add>### v3.3.2 (August 20, 2018) <add> <add>- [#16853](https://github.com/emberjs/ember.js/pull/16853) [BUGFIX] Allow ArrayProxy#pushObjects to accept ArrayProxy again <add>- [#16870](https://github.com/emberjs/ember.js/pull/16870) [BUGFIX] Enable @ember/object#get to be called with an empty string <add> <ide> ### v3.3.1 (July 23, 2018) <ide> <ide> - [#16836](https://github.com/emberjs/ember.js/pull/16836/commits) [DOC] Fix Broken 3.3 API Documentation <ide> ### v3.1.4 (August 07, 2018) <ide> <ide> - [#16565](https://github.com/emberjs/ember.js/pull/16565) Fix template / component caching during rendering. <del>- [#16853](https://github.com/emberjs/ember.js/pull/16853) [BUGFIX beta] Allow ArrayProxy#pushObjects to accept ArrayProxy again <add>- [#16853](https://github.com/emberjs/ember.js/pull/16853) [BUGFIX] Allow ArrayProxy#pushObjects to accept ArrayProxy again <ide> <ide> ### v3.1.3 (June 21, 2018) <ide> - [#16754](https://github.com/emberjs/ember.js/pull/16754) [BUGFIX] Fix container destroy timing
1
Text
Text
fix typo in 'creating package' docs
f942bafe549905f9b83348822f0ffd864377bf2d
<ide><path>docs/creating-a-package.md <ide> _snippets_ directory are added alphabetically. <ide> trigger your package's activation. You can delay the loading of your package <ide> until one of these events is triggered. <ide> - `providedServices` (**Optional**): an Object describing the services that your <del>packages provides, which can be used by other packages. The keys of this object <add>package provides, which can be used by other packages. The keys of this object <ide> are the names of the services, and the values are Objects with the following <ide> keys: <ide> - `description` (**Optional**) a String describing the service
1
Text
Text
add note on endpointinfo semantics
991f50a10c5be2ca1355beb5100353ce89264032
<ide><path>libnetwork/docs/design.md <ide> Drivers are essentially an extension of libnetwork and provides the actual imple <ide> * `driver.Join` <ide> * `driver.Leave` <ide> <del>These Driver facing APIs makes use of unique identifiers (`networkid`,`endpointid`,...) instead of names (as seen in user-facing APIs). <add>These Driver facing APIs makes use of unique identifiers (`networkid`,`endpointid`,...) instead of names (as seen in user-facing APIs). <ide> <ide> The APIs are still work in progress and there can be changes to these based on the driver requirements especially when it comes to Multi-host networking. <ide> <add>### Driver semantics <add> <add> * `Driver.CreateEndpoint` <add> <add>This method is passed an interface `EndpointInfo`, with methods `Interfaces` and `AddInterface`. <add> <add>If the slice returned by `Interfaces` is non-empty, the driver is expected to make use of the interface infomation therein (e.g., treating the address or addresses as statically supplied), and must return an error if it cannot. If the slice is empty, the driver should allocate zero or more _fresh_ interfaces, and use `AddInterface` to record them; or return an error if it cannot. <add> <add>It is forbidden to use `AddInterface` if `Interfaces` is non-empty. <add> <ide> ## Implementations <ide> <ide> Libnetwork includes the following driver packages:
1
PHP
PHP
add configure values for debugger
3226ef889596e680e3b78be5094b24df441ea406
<ide><path>src/Error/Debugger.php <ide> */ <ide> namespace Cake\Error; <ide> <add>use Cake\Core\Configure; <ide> use Cake\Core\InstanceConfigTrait; <ide> use Cake\Error\Debug\ArrayItemNode; <ide> use Cake\Error\Debug\ArrayNode; <ide> class Debugger <ide> public function __construct() <ide> { <ide> $docRef = ini_get('docref_root'); <del> <ide> if (empty($docRef) && function_exists('ini_set')) { <ide> ini_set('docref_root', 'https://secure.php.net/'); <ide> } <ide> if (!defined('E_RECOVERABLE_ERROR')) { <ide> define('E_RECOVERABLE_ERROR', 4096); <ide> } <ide> <add> $config = array_intersect_key((array)Configure::read('Debugger'), $this->_defaultConfig); <add> $this->setConfig($config); <add> <ide> $e = '<pre class="cake-error">'; <ide> $e .= '<a href="javascript:void(0);" onclick="document.getElementById(\'{:id}-trace\')'; <ide> $e .= '.style.display = (document.getElementById(\'{:id}-trace\').style.display == '; <ide><path>tests/TestCase/Error/DebuggerTest.php <ide> use Cake\Controller\Controller; <ide> use Cake\Core\Configure; <ide> use Cake\Error\Debugger; <add>use Cake\Error\Debug\TextFormatter; <ide> use Cake\Log\Log; <ide> use Cake\TestSuite\TestCase; <ide> use RuntimeException; <ide> public function setUp(): void <ide> Configure::write('debug', true); <ide> Log::drop('stderr'); <ide> Log::drop('stdout'); <add> Debugger::configInstance('exportFormatter', TextFormatter::class); <ide> } <ide> <ide> /** <ide> public function tearDown(): void <ide> */ <ide> public function testDocRef() <ide> { <del> $this->skipIf( <del> defined('HHVM_VERSION'), <del> 'HHVM does not output doc references' <del> ); <ide> ini_set('docref_root', ''); <ide> $this->assertEquals(ini_get('docref_root'), ''); <del> new Debugger(); <add> // Force a new instance. <add> Debugger::getInstance(TestDebugger::class); <add> Debugger::getInstance(Debugger::class); <add> <ide> $this->assertEquals(ini_get('docref_root'), 'https://secure.php.net/'); <ide> } <ide> <ide> public function testSetOutputMask() <ide> $this->assertEquals([], Debugger::outputMask()); <ide> } <ide> <add> /** <add> * Test configure based output mask configuration <add> * <add> * @return void <add> */ <add> public function testConfigureOutputMask() <add> { <add> Configure::write('Debugger.outputMask', ['wow' => 'xxx']); <add> Debugger::getInstance(TestDebugger::class); <add> Debugger::getInstance(Debugger::class); <add> <add> $result = Debugger::exportVar(['wow' => 'pass1234']); <add> $this->assertStringContainsString('xxx', $result); <add> $this->assertStringNotContainsString('pass1234', $result); <add> } <add> <ide> /** <ide> * Tests the masking of an array key. <ide> * <ide> public function testSetEditorPredefined() <ide> $this->assertTrue(true); <ide> } <ide> <add> /** <add> * Test configure based editor setup <add> * <add> * @return void <add> */ <add> public function testConfigureEditor() <add> { <add> Configure::write('Debugger.editor', 'emacs'); <add> Debugger::getInstance(TestDebugger::class); <add> Debugger::getInstance(Debugger::class); <add> <add> $result = Debugger::editorUrl('file.php', 123); <add> $this->assertStringContainsString('emacs://', $result); <add> } <add> <ide> /** <ide> * test using a valid editor. <ide> * <ide><path>tests/bootstrap.php <ide> Configure::write('Session', [ <ide> 'defaults' => 'php', <ide> ]); <add>Configure::write('Debugger.exportFormatter', TextFormatter::class); <ide> <ide> Log::setConfig([ <ide> // 'queries' => [ <ide> <ide> Chronos::setTestNow(Chronos::now()); <ide> Security::setSalt('a-long-but-not-random-value'); <del>Debugger::configInstance('exportFormatter', TextFormatter::class); <ide> <ide> ini_set('intl.default_locale', 'en_US'); <ide> ini_set('session.gc_divisor', '1');
3
Javascript
Javascript
fix compiler test for ie9
f9f0905f4ad7b1d0bb9b606a6d25fb1f88354a78
<ide><path>test/CompilerSpec.js <ide> describe('compiler', function(){ <ide> compiler.compile('<div>A</div><span></span>'); <ide> }).toThrow("Cannot compile multiple element roots: " + ie("<div>A</div><span></span>")); <ide> function ie(text) { <del> return msie ? uppercase(text) : text; <add> return msie && msie < 9 ? uppercase(text) : text; <ide> } <ide> }); <ide>
1
Javascript
Javascript
fix lint issues
0f07ddedca5910fa64c0508549003973112b378d
<ide><path>test/cases/chunks/runtime/index.js <add>/* globals it */ <ide> it("should handle duplicate chunks", function(done) { <ide> var firstOne = false, secondOne = false; <ide> require.ensure([], function(require) { <ide> it("should not load a chunk which is included in a already loaded one", function <ide> try { <ide> async.should.be.eql(true); <ide> loadChunk(); <del> } catch(e) { done(e); } <add> } catch(e) { <add> done(e); <add> } <ide> }); <ide> Promise.resolve().then(function() {}).then(function() {}).then(function() { <ide> async = true; <ide> it("should not load a chunk which is included in a already loaded one", function <ide> try { <ide> sync.should.be.eql(true); <ide> done(); <del> } catch(e) { done(e); } <add> } catch(e) { <add> done(e); <add> } <ide> }); <ide> Promise.resolve().then(function() {}).then(function() {}).then(function() { <ide> sync = false;
1
Text
Text
fix some formatting
47d95c8c3cf48005a91c5fa258ca792e70391069
<ide><path>guides/source/migrations.md <ide> no such migrations, it exits. It will run these migrations in order based <ide> on the date of the migration. <ide> <ide> Note that running the `db:migrate` also invokes the `db:schema:dump` task, which <del>will update your db/schema.rb file to match the structure of your database. <add>will update your `db/schema.rb` file to match the structure of your database. <ide> <ide> If you specify a target version, Active Record will run the required migrations <ide> (up, down or change) until it has reached the specified version. The version <ide> The `rake db:reset` task will drop the database, recreate it and load the <ide> current schema into it. <ide> <ide> NOTE: This is not the same as running all the migrations. It will only use the contents <del>of the current schema.rb file. If a migration can't be rolled back, 'rake db:reset' <add>of the current `schema.rb` file. If a migration can't be rolled back, `rake db:reset` <ide> may not help you. To find out more about dumping the schema see [schema.rb](#schema-dumping-and-you). <ide> <ide> ### Running Specific Migrations
1
Ruby
Ruby
fix broken cache tests
b784900208c0ea4343a75fa1cefdb1872fa14f7b
<ide><path>activesupport/lib/active_support/cache.rb <ide> def size <ide> when NilClass <ide> 0 <ide> when String <del> value.bytesize <add> @v.bytesize <ide> else <del> @s = Marshal.dump(value).bytesize <add> @s = Marshal.dump(@v).bytesize <ide> end <ide> end <ide> end <ide><path>activesupport/test/caching_test.rb <ide> def test_log_exception_when_cache_read_fails <ide> <ide> class MemoryStoreTest < ActiveSupport::TestCase <ide> def setup <del> @record_size = Marshal.dump("aaaaaaaaaa").bytesize <add> @record_size = ActiveSupport::Cache::Entry.new("aaaaaaaaaa").size <ide> @cache = ActiveSupport::Cache.lookup_store(:memory_store, :expires_in => 60, :size => @record_size * 10) <ide> end <ide>
2
Javascript
Javascript
add parserhelpers.expressionisunsupported method
086bd9db7949b9f4d40e8418de1bd5414c24b3bc
<ide><path>lib/ParserHelpers.js <ide> "use strict"; <ide> const BasicEvaluatedExpression = require("./BasicEvaluatedExpression"); <ide> const ConstDependency = require("./dependencies/ConstDependency"); <add>const UnsupportedFeatureWarning = require("./UnsupportedFeatureWarning"); <ide> <ide> const ParserHelpers = exports; <ide> <ide> ParserHelpers.setTypeof = function setTypeof(parser, expr, value) { <ide> return true; <ide> }); <ide> }; <add> <add>ParserHelpers.expressionIsUnsupported = function expressionIsUnsupported(message) { <add> return function unsupportedExpression(expr) { <add> var dep = new ConstDependency("(void 0)", expr.range); <add> dep.loc = expr.loc; <add> this.state.current.addDependency(dep); <add> if(!this.state.module) return; <add> this.state.module.warnings.push(new UnsupportedFeatureWarning(this.state.module, message)); <add> return true; <add> }; <add>};
1
PHP
PHP
add queue prefix value
d771ee6c8aaf66f8e3bf549310d0c86f15332106
<ide><path>config/queue.php <ide> <ide> 'default' => env('QUEUE_DRIVER', 'sync'), <ide> <add> /* <add> |-------------------------------------------------------------------------- <add> | Queue Prefix <add> |-------------------------------------------------------------------------- <add> | <add> | If you are running multiple sites on a single server, you may experience <add> | crosstalk among sites if they use the same name for queue tubes. This <add> | optional value defines a prefix that will automatically be applied <add> | to queue tubes as a way to prevent this crosstalk. <add> | <add> */ <add> <add> 'prefix' => env('QUEUE_PREFIX', ''), <add> <ide> /* <ide> |-------------------------------------------------------------------------- <ide> | Queue Connections
1
Javascript
Javascript
address some comments
b34ed237cf374d534b8ee6113ff2395778436559
<ide><path>lib/wasm/WasmFinalizeExportsPlugin.js <ide> */ <ide> "use strict"; <ide> <del>const Queue = require("../util/Queue"); <del>const WebAssemblyImportDependency = require("../dependencies/WebAssemblyImportDependency"); <ide> const UnsupportedWebAssemblyFeatureError = require("../wasm/UnsupportedWebAssemblyFeatureError"); <ide> <add>const error = new UnsupportedWebAssemblyFeatureError( <add> "JavaScript modules can not use a WebAssembly export with an incompatible type signature" <add>); <add> <ide> class WasmFinalizeExportsPlugin { <ide> apply(compiler) { <ide> compiler.hooks.compilation.tap("WasmFinalizeExportsPlugin", compilation => { <ide> compilation.hooks.finishModules.tap( <ide> "WasmFinalizeExportsPlugin", <ide> modules => { <del> const queue = new Queue(); <del> <del> let module; <del> let jsIncompatibleExports = []; <del> <ide> for (const module of modules) { <del> if (module.buildMeta.jsIncompatibleExports) { <del> jsIncompatibleExports.push( <del> ...module.buildMeta.jsIncompatibleExports <del> ); <add> const jsIncompatibleExports = <add> module.buildMeta.jsIncompatibleExports; <add> <add> if ( <add> typeof jsIncompatibleExports === "undefined" || <add> jsIncompatibleExports.length === 0 <add> ) { <add> continue; <ide> } <ide> <del> queue.enqueue(module); <del> } <add> // 1. if a WebAssembly module <add> if (module.type.startsWith("webassembly") === true) { <add> for (const reason of module.reasons) { <add> // 2. is referenced by a non-WebAssembly module <add> if (reason.module.type.startsWith("webassembly") === false) { <add> // const ref = reason.dependency.getReference(); <add> <add> // ref.importedNames // returns true? <add> <add> const names = []; <ide> <del> while (queue.length > 0) { <del> module = queue.dequeue(); <del> <del> // 1. if a non WebAssembly module <del> if (module.type.startsWith("webassembly") === false) { <del> for (const dep of module.dependencies) { <del> // 2. imports a WebAssembly module <del> // FIXME(sven): pseudo code from here <del> if (dep.type === "webassembly") { <del> // 3. if the used import is flaged as invalid <del> if (jsIncompatibleExports.indexOf(dep.usedName)) { <del> throw new UnsupportedWebAssemblyFeatureError( <del> "JavaScript modules can not use WebAssembly export with an incompatible type signature" <del> ); <del> } <add> names.forEach(name => { <add> // 3. and uses a func with an incompatible JS signature <add> if (jsIncompatibleExports.indexOf(name) !== -1) { <add> // 4. error <add> compilation.errors.push(error); <add> } <add> }); <ide> } <ide> } <ide> } <ide><path>test/cases/wasm/js-incompatible-type/index.js <ide> it("should disallow exporting a func signature with result i64", function() { <del> return expect(import("./export-i64-result.wat")).rejects.toThrow(/invalid type/); <add> return import("./export-i64-result.wat").then(({a}) => { <add> expect(a).toThrow(/invalid type/); <add> }); <ide> }); <ide> <ide> it("should disallow exporting a func signature with param i64", function() { <del> return expect(import("./export-i64-param.wat")).rejects.toThrow(/invalid type/); <add> return import("./export-i64-param.wat").then(({a}) => { <add> expect(a).toThrow(/invalid type/); <add> }); <ide> }); <ide> <ide> it("should disallow importing a value type of i64", function() { <del> return expect(import("./import-i64.wat")).rejects.toThrow(/invalid type/); <add> return expect(import("./import-i64.wat")).rejects.toThrow(/invalid type/); <ide> });
2
PHP
PHP
add tests for sql server computed support
5138a5ccce405751988934eacc627bb4f3cc7bed
<ide><path>tests/Database/DatabaseSqlServerSchemaGrammarTest.php <ide> public function testAddingMultiPolygon() <ide> $this->assertEquals('alter table "geo" add "coordinates" geography not null', $statements[0]); <ide> } <ide> <add> public function testAddingGeneratedColumn() <add> { <add> $blueprint = new Blueprint('products'); <add> $blueprint->integer('price'); <add> $blueprint->virtual('discounted_virtual', 'price - 5'); <add> $blueprint->virtual('discounted_stored', 'price - 5')->persisted(); <add> $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar()); <add> $this->assertCount(1, $statements); <add> $this->assertEquals('alter table "products" add "price" int not null, add "discounted_virtual" as (price - 5), add "discounted_stored" as (price - 5) persisted', $statements[0]); <add> } <add> <ide> public function testGrammarsAreMacroable() <ide> { <ide> // compileReplace macro.
1
Python
Python
try a patch for issue
fdd22cfb94e0686b35b9cdccf3914679323ff134
<ide><path>glances/plugins/glances_batpercent.py <ide> psutil_tag = True <ide> try: <ide> psutil.sensors_battery() <del>except AttributeError: <add>except (OSError, AttributeError) as e: <ide> logger.debug("Cannot grab battery status. Platform not supported.") <add> logger.error(e) <ide> psutil_tag = False <ide> <ide>
1
Ruby
Ruby
follow dry principle and remove duplication
9a57e7f9bb50ef3d4090d75354c254906d4905b7
<ide><path>actionview/lib/action_view/routing_url_for.rb <ide> def url_for(options = nil) <ide> when Hash <ide> options = options.symbolize_keys <ide> unless options.key?(:only_path) <del> if options[:host].nil? <del> options[:only_path] = _generate_paths_by_default <del> else <del> options[:only_path] = false <del> end <add> options[:only_path] = only_path?(options[:host]) <ide> end <ide> <ide> super(options) <ide> when ActionController::Parameters <ide> unless options.key?(:only_path) <del> if options[:host].nil? <del> options[:only_path] = _generate_paths_by_default <del> else <del> options[:only_path] = false <del> end <add> options[:only_path] = only_path?(options[:host]) <ide> end <ide> <ide> super(options) <ide> def optimize_routes_generation? #:nodoc: <ide> def _generate_paths_by_default <ide> true <ide> end <add> <add> def only_path?(host) <add> _generate_paths_by_default unless host <add> end <ide> end <ide> end
1
Text
Text
improve spanish translate
fbf3e83248c74d6f6d6311ee0f423533ef8b9892
<ide><path>guide/spanish/git/git-bisect/index.md <ide> localeTitle: Git Bisect <ide> --- <ide> ## Git Bisect <ide> <del>El comando `git bisect` ayuda a encontrar confirmaciones que agregaron cambios específicos en su proyecto. Esto es particularmente útil si necesita encontrar qué cambio introdujo un error. <add>El comando `git bisect` ayuda a encontrar commits que agregaron cambios específicos en su proyecto. Esto es particularmente útil si necesita encontrar qué cambio generó un error. <ide> <del>Este comando funciona al proporcionarle una confirmación "incorrecta" que incluye el error y una confirmación "buena" de antes de que se introdujera la falla. A través de la búsqueda binaria, `git bisect` seleccionará confirmaciones y le pedirá que identifique si la confirmación es "buena" o "mala". Esto continúa hasta que el comando es capaz de encontrar la confirmación exacta que introdujo el cambio. <add>Este comando funciona al proporcionarle una confirmación "incorrecta" que incluye el error y una confirmación "buena" de antes de que se generase el bug. A través de la búsqueda binaria, `git bisect` seleccionará confirmaciones y le pedirá que identifique si la confirmación es "buena" o "mala". Este proceso continúa hasta que el comando es capaz de encontrar la confirmación exacta que introdujo el cambio. <ide> <ide> ### Comandos de Bisect <ide> <ide> git bisect start <ide> git bisect good 4b60707 <ide> ``` <ide> <del>Git comprobará un compromiso entre las versiones "buena" y "mala" y dará como resultado algo como lo siguiente: <add>Git verificará un commit entre las versiones, "buena" y "mala", y dará como resultado algo como lo siguiente: <ide> ``` <ide> Bisecting: 2 revisions left to test after this (roughly 2 steps) <ide> ``` <ide> <del>Ahora debe indicar a git si el compromiso actual funciona con `git bisect good` o si el compromiso actual se rompe con `git bisect bad` . Este proceso se repetirá hasta que el comando pueda imprimir el primer error de confirmación. <add>Ahora debe indicar a git si el commit actual funciona con `git bisect good` o si el commit actual es malo con `git bisect bad` . Este proceso se repetirá hasta que el comando pueda imprimir el primer commit erroneo. <ide> <del>Cuando haya terminado, debe limpiar la sesión bisect. Esto restablecerá su HEAD a lo que era antes de comenzar la sesión bisect: <add>Cuando haya terminado, debe limpiar la sesión bisect. Esto restablecerá su HEAD a lo que version anterior al comienzo de la sesión bisect: <ide> <ide> ```shell <ide> git bisect reset <ide> ``` <ide> <ide> ### Otros recursos <ide> <del>* [Git bisect documentacion](https://git-scm.com/docs/git-bisect) <ide>\ No newline at end of file <add>* [Git bisect documentacion](https://git-scm.com/docs/git-bisect)
1
Ruby
Ruby
store the old logger before calling super
9a1ab6674477642b72cc1760bb7dd69c9a5d6705
<ide><path>actionview/test/activerecord/controller_runtime_test.rb <ide> def db_after_render <ide> tests LogSubscriberController <ide> <ide> def setup <del> super <ide> @old_logger = ActionController::Base.logger <add> super <ide> ActionController::LogSubscriber.attach_to :action_controller <ide> end <ide>
1
Java
Java
introduce execute(datasource) in resrcdbpopulator
5d049e0de898a12386282fa4f8df7ca1454923e9
<ide><path>spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/ResourceDatabasePopulator.java <ide> <ide> package org.springframework.jdbc.datasource.init; <ide> <add> <ide> import java.sql.Connection; <ide> import java.util.ArrayList; <ide> import java.util.Arrays; <ide> import java.util.List; <ide> <add>import javax.sql.DataSource; <add> <ide> import org.springframework.core.io.Resource; <ide> import org.springframework.core.io.support.EncodedResource; <ide> <ide> /** <del> * Populates or initializes a database from SQL scripts defined in external <del> * resources. <add> * Populates, initializes, or cleans up a database using SQL scripts defined in <add> * external resources. <ide> * <del> * <p>Call {@link #addScript(Resource)} to add a single SQL script location. <del> * Call {@link #addScripts(Resource...)} to add multiple SQL script locations. <del> * Call {@link #setSqlScriptEncoding(String)} to set the encoding for all added <del> * scripts. <add> * <ul> <add> * <li>Call {@link #addScript} to add a single SQL script location. <add> * <li>Call {@link #addScripts} to add multiple SQL script locations. <add> * <li>Consult the setter methods in this class for further configuration options. <add> * <li>Call {@link #populate} or {@link #execute} to initialize or clean up the <add> * database using the configured scripts. <add> * </ul> <ide> * <ide> * @author Keith Donald <ide> * @author Dave Syer <ide> public void setIgnoreFailedDrops(boolean ignoreFailedDrops) { <ide> <ide> /** <ide> * {@inheritDoc} <add> * @see #execute(DataSource) <ide> */ <ide> @Override <ide> public void populate(Connection connection) throws ScriptException { <ide> public void populate(Connection connection) throws ScriptException { <ide> } <ide> <ide> /** <del> * {@link EncodedResource} is not a sub-type of {@link Resource}. Thus we <del> * always need to wrap each script resource in an encoded resource. <add> * Execute this {@code DatabasePopulator} against the given {@link DataSource}. <add> * <p>Delegates to {@link DatabasePopulatorUtils#execute}. <add> * @param dataSource the {@code DataSource} to execute against <add> * @throws ScriptException if an error occurs <add> * @since 4.1 <add> * @see #populate(Connection) <add> */ <add> public void execute(DataSource dataSource) throws ScriptException { <add> DatabasePopulatorUtils.execute(this, dataSource); <add> } <add> <add> /** <add> * {@link EncodedResource} is not a sub-type of {@link Resource}. Thus we always need <add> * to wrap each script resource in an encoded resource. <ide> */ <ide> private EncodedResource encodeScript(Resource script) { <ide> return new EncodedResource(script, this.sqlScriptEncoding); <ide><path>spring-test/src/main/java/org/springframework/test/context/junit4/AbstractTransactionalJUnit4SpringContextTests.java <ide> import org.springframework.core.io.Resource; <ide> import org.springframework.dao.DataAccessException; <ide> import org.springframework.jdbc.core.JdbcTemplate; <del>import org.springframework.jdbc.datasource.init.DatabasePopulator; <del>import org.springframework.jdbc.datasource.init.DatabasePopulatorUtils; <ide> import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; <ide> import org.springframework.test.context.ContextConfiguration; <ide> import org.springframework.test.context.TestExecutionListeners; <ide> protected void dropTables(String... names) { <ide> * exception in the event of an error <ide> * @throws DataAccessException if there is an error executing a statement <ide> * @see ResourceDatabasePopulator <del> * @see DatabasePopulatorUtils <ide> * @see #setSqlScriptEncoding <ide> */ <ide> protected void executeSqlScript(String sqlResourcePath, boolean continueOnError) throws DataAccessException { <ide> Resource resource = this.applicationContext.getResource(sqlResourcePath); <del> DatabasePopulator databasePopulator = new ResourceDatabasePopulator(continueOnError, false, <del> this.sqlScriptEncoding, resource); <del> DatabasePopulatorUtils.execute(databasePopulator, jdbcTemplate.getDataSource()); <add> new ResourceDatabasePopulator(continueOnError, false, this.sqlScriptEncoding, resource).execute(jdbcTemplate.getDataSource()); <ide> } <ide> <ide> } <ide><path>spring-test/src/main/java/org/springframework/test/context/testng/AbstractTransactionalTestNGSpringContextTests.java <ide> import org.springframework.core.io.Resource; <ide> import org.springframework.dao.DataAccessException; <ide> import org.springframework.jdbc.core.JdbcTemplate; <del>import org.springframework.jdbc.datasource.init.DatabasePopulator; <del>import org.springframework.jdbc.datasource.init.DatabasePopulatorUtils; <ide> import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; <ide> import org.springframework.test.context.TestExecutionListeners; <ide> import org.springframework.test.context.transaction.TransactionalTestExecutionListener; <ide> protected void dropTables(String... names) { <ide> * exception in the event of an error <ide> * @throws DataAccessException if there is an error executing a statement <ide> * @see ResourceDatabasePopulator <del> * @see DatabasePopulatorUtils <ide> * @see #setSqlScriptEncoding <ide> */ <ide> protected void executeSqlScript(String sqlResourcePath, boolean continueOnError) throws DataAccessException { <ide> Resource resource = this.applicationContext.getResource(sqlResourcePath); <del> DatabasePopulator databasePopulator = new ResourceDatabasePopulator(continueOnError, false, <del> this.sqlScriptEncoding, resource); <del> DatabasePopulatorUtils.execute(databasePopulator, jdbcTemplate.getDataSource()); <add> new ResourceDatabasePopulator(continueOnError, false, this.sqlScriptEncoding, resource).execute(jdbcTemplate.getDataSource()); <ide> } <ide> <ide> } <ide><path>spring-test/src/main/java/org/springframework/test/jdbc/JdbcTestUtils.java <ide> import org.springframework.dao.DataAccessException; <ide> import org.springframework.jdbc.core.JdbcTemplate; <ide> import org.springframework.jdbc.core.SqlParameterValue; <del>import org.springframework.jdbc.datasource.init.DatabasePopulator; <del>import org.springframework.jdbc.datasource.init.DatabasePopulatorUtils; <ide> import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; <ide> import org.springframework.jdbc.datasource.init.ScriptUtils; <ide> import org.springframework.util.StringUtils; <ide> public static void dropTables(JdbcTemplate jdbcTemplate, String... tableNames) { <ide> * @throws DataAccessException if there is an error executing a statement <ide> * and {@code continueOnError} is {@code false} <ide> * @see ResourceDatabasePopulator <del> * @see DatabasePopulatorUtils <ide> * @see #executeSqlScript(JdbcTemplate, Resource, boolean) <ide> * @deprecated as of Spring 4.0.3, in favor of using <ide> * {@link org.springframework.jdbc.datasource.init.ScriptUtils#executeSqlScript} <ide> public static void executeSqlScript(JdbcTemplate jdbcTemplate, ResourceLoader re <ide> * @throws DataAccessException if there is an error executing a statement <ide> * and {@code continueOnError} is {@code false} <ide> * @see ResourceDatabasePopulator <del> * @see DatabasePopulatorUtils <ide> * @see #executeSqlScript(JdbcTemplate, EncodedResource, boolean) <ide> * @deprecated as of Spring 4.0.3, in favor of using <ide> * {@link org.springframework.jdbc.datasource.init.ScriptUtils#executeSqlScript} <ide> public static void executeSqlScript(JdbcTemplate jdbcTemplate, Resource resource <ide> * @throws DataAccessException if there is an error executing a statement <ide> * and {@code continueOnError} is {@code false} <ide> * @see ResourceDatabasePopulator <del> * @see DatabasePopulatorUtils <ide> * @deprecated as of Spring 4.0.3, in favor of using <ide> * {@link org.springframework.jdbc.datasource.init.ScriptUtils#executeSqlScript} <ide> * or {@link org.springframework.jdbc.datasource.init.ResourceDatabasePopulator}. <ide> */ <ide> @Deprecated <ide> public static void executeSqlScript(JdbcTemplate jdbcTemplate, EncodedResource resource, boolean continueOnError) <ide> throws DataAccessException { <del> DatabasePopulator databasePopulator = new ResourceDatabasePopulator(continueOnError, false, <del> resource.getEncoding(), resource.getResource()); <del> DatabasePopulatorUtils.execute(databasePopulator, jdbcTemplate.getDataSource()); <add> new ResourceDatabasePopulator(continueOnError, false, resource.getEncoding(), resource.getResource()).execute(jdbcTemplate.getDataSource()); <ide> } <ide> <ide> /**
4
Python
Python
update decoder_vocab_size when resizing embeds
1471857f131eeead2c9ae9673997fe45c02c56a5
<ide><path>src/transformers/models/marian/modeling_marian.py <ide> def __init__(self, config: MarianConfig): <ide> super().__init__(config) <ide> self.model = MarianModel(config) <ide> <del> self.target_vocab_size = ( <del> config.vocab_size if config.share_encoder_decoder_embeddings else config.decoder_vocab_size <del> ) <del> self.register_buffer("final_logits_bias", torch.zeros((1, self.target_vocab_size))) <del> self.lm_head = nn.Linear(config.d_model, self.target_vocab_size, bias=False) <add> target_vocab_size = config.vocab_size if config.share_encoder_decoder_embeddings else config.decoder_vocab_size <add> self.register_buffer("final_logits_bias", torch.zeros((1, target_vocab_size))) <add> self.lm_head = nn.Linear(config.d_model, target_vocab_size, bias=False) <ide> <ide> # Initialize weights and apply final processing <ide> self.post_init() <ide> def _resize_token_embeddings(self, new_num_tokens: int) -> nn.Embedding: <ide> new_embeddings = self._get_resized_embeddings(old_embeddings, new_num_tokens) <ide> self.set_input_embeddings(new_embeddings) <ide> <add> # update config.decoder_vocab_size if embeddings are tied <add> if self.config.share_encoder_decoder_embeddings: <add> self.config.decoder_vocab_size = new_num_tokens <add> <ide> # if word embeddings are not tied, make sure that lm head is resized as well <ide> if ( <ide> self.config.share_encoder_decoder_embeddings <ide> def forward( <ide> masked_lm_loss = None <ide> if labels is not None: <ide> loss_fct = CrossEntropyLoss() <del> masked_lm_loss = loss_fct(lm_logits.view(-1, self.target_vocab_size), labels.view(-1)) <add> masked_lm_loss = loss_fct(lm_logits.view(-1, self.config.decoder_vocab_size), labels.view(-1)) <ide> <ide> if not return_dict: <ide> output = (lm_logits,) + outputs[1:]
1
Javascript
Javascript
use inspector utils
5c546e1f3c12a12e968b95ed33f1844b137e8662
<ide><path>lib/internal/cluster/primary.js <ide> const { <ide> ArrayPrototypeSome, <ide> ObjectKeys, <ide> ObjectValues, <del> RegExpPrototypeExec, <ide> SafeMap, <ide> StringPrototypeStartsWith, <ide> } = primordials; <add>const { <add> codes: { <add> ERR_SOCKET_BAD_PORT, <add> } <add>} = require('internal/errors'); <ide> <ide> const assert = require('internal/assert'); <ide> const { fork } = require('child_process'); <ide> const EventEmitter = require('events'); <ide> const RoundRobinHandle = require('internal/cluster/round_robin_handle'); <ide> const SharedHandle = require('internal/cluster/shared_handle'); <ide> const Worker = require('internal/cluster/worker'); <add>const { getInspectPort, isUsingInspector } = require('internal/util/inspector'); <ide> const { internal, sendHelper } = require('internal/cluster/utils'); <ide> const cluster = new EventEmitter(); <ide> const intercom = new EventEmitter(); <ide> const SCHED_NONE = 1; <ide> const SCHED_RR = 2; <del>const minPort = 1024; <del>const maxPort = 65535; <del>const { validatePort } = require('internal/validators'); <ide> <ide> module.exports = cluster; <ide> <ide> cluster.SCHED_NONE = SCHED_NONE; // Leave it to the operating system. <ide> cluster.SCHED_RR = SCHED_RR; // Primary distributes connections. <ide> <ide> let ids = 0; <del>let debugPortOffset = 1; <ide> let initialized = false; <ide> <ide> // XXX(bnoordhuis) Fold cluster.schedulingPolicy into cluster.settings? <ide> function setupSettingsNT(settings) { <ide> function createWorkerProcess(id, env) { <ide> const workerEnv = { ...process.env, ...env, NODE_UNIQUE_ID: `${id}` }; <ide> const execArgv = [...cluster.settings.execArgv]; <del> const debugArgRegex = /--inspect(?:-brk|-port)?|--debug-port/; <del> const nodeOptions = process.env.NODE_OPTIONS || ''; <del> <del> // TODO(MoLow): Use getInspectPort from internal/util/inspector <del> if (ArrayPrototypeSome(execArgv, <del> (arg) => RegExpPrototypeExec(debugArgRegex, arg) !== null) || <del> RegExpPrototypeExec(debugArgRegex, nodeOptions) !== null) { <del> let inspectPort; <del> if ('inspectPort' in cluster.settings) { <del> if (typeof cluster.settings.inspectPort === 'function') <del> inspectPort = cluster.settings.inspectPort(); <del> else <del> inspectPort = cluster.settings.inspectPort; <del> <del> validatePort(inspectPort); <del> } else { <del> inspectPort = process.debugPort + debugPortOffset; <del> if (inspectPort > maxPort) <del> inspectPort = inspectPort - maxPort + minPort - 1; <del> debugPortOffset++; <del> } <ide> <del> ArrayPrototypePush(execArgv, `--inspect-port=${inspectPort}`); <add> if (cluster.settings.inspectPort === null) { <add> throw new ERR_SOCKET_BAD_PORT('Port', null, true); <add> } <add> if (isUsingInspector(cluster.settings.execArgv)) { <add> ArrayPrototypePush(execArgv, `--inspect-port=${getInspectPort(cluster.settings.inspectPort)}`); <ide> } <ide> <ide> return fork(cluster.settings.exec, cluster.settings.args, { <ide><path>lib/internal/util/inspector.js <ide> const { <ide> ObjectKeys, <ide> ObjectPrototypeHasOwnProperty, <ide> RegExpPrototypeExec, <add> SafeWeakMap, <ide> } = primordials; <ide> <ide> const { validatePort } = require('internal/validators'); <ide> const kMaxPort = 65535; <ide> const kInspectArgRegex = /--inspect(?:-brk|-port)?|--debug-port/; <ide> const kInspectMsgRegex = /Debugger listening on ws:\/\/\[?(.+?)\]?:(\d+)\/|Debugger attached|Waiting for the debugger to disconnect\.\.\./; <ide> <del>let _isUsingInspector; <del>function isUsingInspector() { <del> _isUsingInspector ??= <del> ArrayPrototypeSome(process.execArgv, (arg) => RegExpPrototypeExec(kInspectArgRegex, arg) !== null) || <del> RegExpPrototypeExec(kInspectArgRegex, process.env.NODE_OPTIONS) !== null; <del> return _isUsingInspector; <add>const _isUsingInspector = new SafeWeakMap(); <add>function isUsingInspector(execArgv = process.execArgv) { <add> if (!_isUsingInspector.has(execArgv)) { <add> _isUsingInspector.set(execArgv, <add> ArrayPrototypeSome(execArgv, (arg) => RegExpPrototypeExec(kInspectArgRegex, arg) !== null) || <add> RegExpPrototypeExec(kInspectArgRegex, process.env.NODE_OPTIONS) !== null); <add> } <add> return _isUsingInspector.get(execArgv); <ide> } <ide> <ide> let debugPortOffset = 1; <ide> function getInspectPort(inspectPort) { <del> if (!isUsingInspector()) { <del> return null; <del> } <ide> if (typeof inspectPort === 'function') { <ide> inspectPort = inspectPort(); <ide> } else if (inspectPort == null) {
2
PHP
PHP
fix orm_cache tool when metadata cache is disabled
5d60a0803711be0943cc9b6567260409ea95fda8
<ide><path>src/Shell/OrmCacheShell.php <ide> public function build($name = null) { <ide> if (!$schema) { <ide> return false; <ide> } <del> if (!$schema->cacheMetadata()) { <del> $this->_io->verbose('Metadata cache was disabled in config. Enabling to write cache.'); <del> $schema->cacheMetadata(true); <del> } <ide> $tables = [$name]; <ide> if (empty($name)) { <ide> $tables = $schema->listTables(); <ide> public function clear($name = null) { <ide> if (empty($name)) { <ide> $tables = $schema->listTables(); <ide> } <del> if (!$schema->cacheMetadata()) { <del> $this->_io->verbose('Metadata cache was disabled in config. Enabling to clear cache.'); <del> $schema->cacheMetadata(true); <del> } <ide> $configName = $schema->cacheMetadata(); <ide> <ide> foreach ($tables as $table) { <ide> protected function _getSchema() { <ide> $this->error($msg); <ide> return false; <ide> } <add> $config = $source->config(); <add> if (empty($config['cacheMetadata'])) { <add> $this->_io->verbose('Metadata cache was disabled in config. Enabling to clear cache.'); <add> $source->cacheMetadata(true); <add> } <ide> return $source->schemaCollection(); <ide> } <ide> <ide><path>tests/TestCase/Shell/OrmCacheShellTest.php <ide> public function tearDown() { <ide> $ds->cacheMetadata(false); <ide> } <ide> <add>/** <add> * Test that clear enables the cache if it was disabled. <add> * <add> * @return void <add> */ <add> public function testClearEnablesMetadataCache() { <add> $ds = ConnectionManager::get('test'); <add> $ds->cacheMetadata(false); <add> <add> $this->shell->params['connection'] = 'test'; <add> $this->shell->clear(); <add> $this->assertInstanceOf('Cake\Database\Schema\CachedCollection', $ds->schemaCollection()); <add> } <add> <add>/** <add> * Test that build enables the cache if it was disabled. <add> * <add> * @return void <add> */ <add> public function testBuildEnablesMetadataCache() { <add> $ds = ConnectionManager::get('test'); <add> $ds->cacheMetadata(false); <add> <add> $this->shell->params['connection'] = 'test'; <add> $this->shell->build(); <add> $this->assertInstanceOf('Cake\Database\Schema\CachedCollection', $ds->schemaCollection()); <add> } <add> <ide> /** <ide> * Test build() with no args. <ide> *
2
Ruby
Ruby
drop pointless use of cannotinstallformulaerror
8c6efd8993215dfa8ae73041300d4d538dcb09b3
<ide><path>Library/Homebrew/cmd/install.rb <ide> def install <ide> ARGV.formulae.each do |f| <ide> # Building head-only without --HEAD is an error <ide> if not ARGV.build_head? and f.stable.nil? <del> raise CannotInstallFormulaError, <<-EOS.undent <add> raise <<-EOS.undent <ide> #{f.name} is a head-only formula <ide> Install with `brew install --HEAD #{f.name}` <ide> EOS <ide> end <ide> <ide> # Building stable-only with --HEAD is an error <ide> if ARGV.build_head? and f.head.nil? <del> raise CannotInstallFormulaError, "No head is defined for #{f.name}" <add> raise "No head is defined for #{f.name}" <ide> end <ide> end <ide>
1
Javascript
Javascript
add draft for euler3.clamp()
4ff095945027b61090b7980d83fb2e559e3c99dd
<ide><path>src/math/Euler3.js <ide> THREE.Euler3.prototype = { <ide> <ide> }, <ide> <add> clamp: function() { <add> <add> // todo <add> <add> }, <add> <ide> reorder: function( newOrder ) { <ide> <ide> // todo. <ide> THREE.Euler3.prototype = { <ide> alternativeSolution: function() { <ide> <ide> // todo. <del> <add> <ide> }, <ide> <ide> equals: function ( e ) {
1
Javascript
Javascript
lower the priority of the notification bar to info
e015ab0d81fe23a647445c4f33ecc95b251c582c
<ide><path>extensions/firefox/components/PdfStreamConverter.js <ide> ChromeActions.prototype = { <ide> } <ide> }]; <ide> notificationBox.appendNotification(message, 'pdfjs-fallback', null, <del> notificationBox.PRIORITY_WARNING_LOW, <add> notificationBox.PRIORITY_INFO_LOW, <ide> buttons, <ide> function eventsCallback(eventType) { <ide> // Currently there is only one event "removed" but if there are any other
1
Javascript
Javascript
remove deprecation warnings in net module
251d03197f05cc17ea35f811803f9acbfb30d016
<ide><path>lib/net.js <ide> Stream.prototype.resume = function () { <ide> }; <ide> <ide> <del>var forceCloseWarning; <del> <del>Stream.prototype.forceClose = function (e) { <del> if (!forceCloseWarning) { <del> forceCloseWarning = "forceClose() has been renamed to destroy()"; <del> sys.error(forceCloseWarning); <del> } <del> return this.destroy(e); <del>}; <del> <del> <ide> Stream.prototype.destroy = function (exception) { <ide> // pool is shared between sockets, so don't need to free it here. <ide> var self = this; <ide> Stream.prototype._shutdown = function () { <ide> } <ide> }; <ide> <del>var closeDepricationWarning; <del> <del>Stream.prototype.close = function (data, encoding) { <del> if (!closeDepricationWarning) { <del> closeDepricationWarning = "Notification: Stream.prototype.close has been renamed to end()"; <del> sys.error(closeDepricationWarning); <del> } <del> return this.end(data, encoding); <del>}; <ide> <ide> Stream.prototype.end = function (data, encoding) { <ide> if (this.writable) { <ide><path>test/simple/test-http-eof-on-connect.js <ide> server.listen(common.PORT); <ide> <ide> server.addListener("listening", function() { <ide> net.createConnection(common.PORT).addListener("connect", function () { <del> this.close(); <add> this.destroy(); <ide> }).addListener("close", function () { <ide> server.close(); <ide> }); <del>}); <ide>\ No newline at end of file <add>});
2
PHP
PHP
fix small typo in comments
e4c9bdb8e3761584c0899661a7667784831467f9
<ide><path>src/Illuminate/Routing/UrlGenerator.php <ide> public function previous() <ide> } <ide> <ide> /** <del> * Generate a absolute URL to the given path. <add> * Generate an absolute URL to the given path. <ide> * <ide> * @param string $path <ide> * @param mixed $extra
1
Python
Python
fix flaky test
d864512631668fbfb75fe7a454c85577cfbe3278
<ide><path>tests/keras/backend/test_backends.py <ide> def test_conv2d(self): <ide> <ide> kernel_val = np.random.random(kernel_shape) - 0.5 <ide> <del> kernel_th = KTH.variable(convert_kernel(kernel_val)) <add> kernel_th = KTH.variable(convert_kernel(kernel_val, dim_ordering='th')) <ide> kernel_tf = KTF.variable(kernel_val) <ide> <ide> zth = KTH.eval(KTH.conv2d(xth, kernel_th, dim_ordering='th')) <ide> def test_conv3d(self): <ide> <ide> kernel_val = np.random.random(kernel_shape) - 0.5 <ide> <del> kernel_th = KTH.variable(convert_kernel(kernel_val)) <add> kernel_th = KTH.variable(convert_kernel(kernel_val, dim_ordering='th')) <ide> kernel_tf = KTF.variable(kernel_val) <ide> <ide> zth = KTH.eval(KTH.conv3d(xth, kernel_th, dim_ordering='th'))
1
Javascript
Javascript
remove duplicate _undestroy
855ca736de018f05bb55e2e9489f058a3b9e57fa
<ide><path>lib/net.js <ide> Socket.prototype.connect = function(...args) { <ide> this.write = Socket.prototype.write; <ide> <ide> if (this.destroyed) { <del> this._undestroy(); <ide> this._handle = null; <ide> this._peername = null; <ide> this._sockname = null;
1
Go
Go
adjust corner case for reconnect logic
500d9f45155a74ea2c2067d85a78977cc7b8c06d
<ide><path>libnetwork/networkdb/cluster.go <ide> func (nDB *NetworkDB) rejoinClusterBootStrap() { <ide> return <ide> } <ide> <add> myself, _ := nDB.nodes[nDB.config.NodeID] <ide> bootStrapIPs := make([]string, 0, len(nDB.bootStrapIP)) <ide> for _, bootIP := range nDB.bootStrapIP { <del> for _, node := range nDB.nodes { <del> if node.Addr.Equal(bootIP) { <del> // One of the bootstrap nodes is part of the cluster, return <del> nDB.RUnlock() <del> return <add> // botostrap IPs are usually IP:port from the Join <add> var bootstrapIP net.IP <add> ipStr, _, err := net.SplitHostPort(bootIP) <add> if err != nil { <add> // try to parse it as an IP with port <add> // Note this seems to be the case for swarm that do not specify any port <add> ipStr = bootIP <add> } <add> bootstrapIP = net.ParseIP(ipStr) <add> if bootstrapIP != nil { <add> for _, node := range nDB.nodes { <add> if node.Addr.Equal(bootstrapIP) && !node.Addr.Equal(myself.Addr) { <add> // One of the bootstrap nodes (and not myself) is part of the cluster, return <add> nDB.RUnlock() <add> return <add> } <ide> } <add> bootStrapIPs = append(bootStrapIPs, bootIP) <ide> } <del> bootStrapIPs = append(bootStrapIPs, bootIP.String()) <ide> } <ide> nDB.RUnlock() <add> if len(bootStrapIPs) == 0 { <add> // this will also avoid to call the Join with an empty list erasing the current bootstrap ip list <add> logrus.Debug("rejoinClusterBootStrap did not find any valid IP") <add> return <add> } <ide> // None of the bootStrap nodes are in the cluster, call memberlist join <ide> logrus.Debugf("rejoinClusterBootStrap, calling cluster join with bootStrap %v", bootStrapIPs) <ide> ctx, cancel := context.WithTimeout(nDB.ctx, rejoinClusterDuration) <ide><path>libnetwork/networkdb/networkdb.go <ide> package networkdb <ide> import ( <ide> "context" <ide> "fmt" <del> "net" <ide> "os" <ide> "strings" <ide> "sync" <ide> type NetworkDB struct { <ide> <ide> // bootStrapIP is the list of IPs that can be used to bootstrap <ide> // the gossip. <del> bootStrapIP []net.IP <add> bootStrapIP []string <ide> <ide> // lastStatsTimestamp is the last timestamp when the stats got printed <ide> lastStatsTimestamp time.Time <ide> func New(c *Config) (*NetworkDB, error) { <ide> // instances passed by the caller in the form of addr:port <ide> func (nDB *NetworkDB) Join(members []string) error { <ide> nDB.Lock() <del> nDB.bootStrapIP = make([]net.IP, 0, len(members)) <del> for _, m := range members { <del> nDB.bootStrapIP = append(nDB.bootStrapIP, net.ParseIP(m)) <del> } <add> nDB.bootStrapIP = append([]string(nil), members...) <add> logrus.Infof("The new bootstrap node list is:%v", nDB.bootStrapIP) <ide> nDB.Unlock() <ide> return nDB.clusterJoin(members) <ide> } <ide><path>libnetwork/networkdb/networkdb_test.go <ide> func TestMain(m *testing.M) { <ide> os.Exit(m.Run()) <ide> } <ide> <add>func launchNode(t *testing.T, conf Config) *NetworkDB { <add> db, err := New(&conf) <add> require.NoError(t, err) <add> return db <add>} <add> <ide> func createNetworkDBInstances(t *testing.T, num int, namePrefix string, conf *Config) []*NetworkDB { <ide> var dbs []*NetworkDB <ide> for i := 0; i < num; i++ { <ide> localConfig := *conf <ide> localConfig.Hostname = fmt.Sprintf("%s%d", namePrefix, i+1) <ide> localConfig.NodeID = stringid.TruncateID(stringid.GenerateRandomID()) <ide> localConfig.BindPort = int(atomic.AddInt32(&dbPort, 1)) <del> db, err := New(&localConfig) <del> require.NoError(t, err) <del> <add> db := launchNode(t, localConfig) <ide> if i != 0 { <del> err = db.Join([]string{fmt.Sprintf("localhost:%d", db.config.BindPort-1)}) <del> assert.NoError(t, err) <add> assert.NoError(t, db.Join([]string{fmt.Sprintf("localhost:%d", db.config.BindPort-1)})) <ide> } <ide> <ide> dbs = append(dbs, db) <ide> func TestParallelDelete(t *testing.T) { <ide> <ide> closeNetworkDBInstances(dbs) <ide> } <add> <add>func TestNetworkDBIslands(t *testing.T) { <add> logrus.SetLevel(logrus.DebugLevel) <add> dbs := createNetworkDBInstances(t, 5, "node", DefaultConfig()) <add> <add> // Get the node IP used currently <add> node, _ := dbs[0].nodes[dbs[0].config.NodeID] <add> baseIPStr := node.Addr.String() <add> // Node 0,1,2 are going to be the 3 bootstrap nodes <add> members := []string{fmt.Sprintf("%s:%d", baseIPStr, dbs[0].config.BindPort), <add> fmt.Sprintf("%s:%d", baseIPStr, dbs[1].config.BindPort), <add> fmt.Sprintf("%s:%d", baseIPStr, dbs[2].config.BindPort)} <add> // Rejoining will update the list of the bootstrap members <add> for i := 3; i < 5; i++ { <add> assert.NoError(t, dbs[i].Join(members)) <add> } <add> <add> // Now the 3 bootstrap nodes will cleanly leave, and will be properly removed from the other 2 nodes <add> for i := 0; i < 3; i++ { <add> logrus.Infof("node %d leaving", i) <add> dbs[i].Close() <add> time.Sleep(2 * time.Second) <add> } <add> <add> // Give some time to let the system propagate the messages and free up the ports <add> time.Sleep(10 * time.Second) <add> <add> // Verify that the nodes are actually all gone and marked appropiately <add> for i := 3; i < 5; i++ { <add> assert.Len(t, dbs[i].leftNodes, 3) <add> assert.Len(t, dbs[i].failedNodes, 0) <add> } <add> <add> // Spawn again the first 3 nodes with different names but same IP:port <add> for i := 0; i < 3; i++ { <add> logrus.Infof("node %d coming back", i) <add> dbs[i].config.NodeID = stringid.TruncateID(stringid.GenerateRandomID()) <add> dbs[i] = launchNode(t, *dbs[i].config) <add> time.Sleep(2 * time.Second) <add> } <add> <add> // Give some time for the reconnect routine to run, it runs every 60s <add> time.Sleep(50 * time.Second) <add> <add> // Verify that the cluster is again all connected. Note that the 3 previous node did not do any join <add> for i := 0; i < 5; i++ { <add> assert.Len(t, dbs[i].nodes, 5) <add> assert.Len(t, dbs[i].failedNodes, 0) <add> if i < 3 { <add> // nodes from 0 to 3 has no left nodes <add> assert.Len(t, dbs[i].leftNodes, 0) <add> } else { <add> // nodes from 4 to 5 has the 3 previous left nodes <add> assert.Len(t, dbs[i].leftNodes, 3) <add> } <add> } <add>}
3
Java
Java
add missing @since tag
9798912557aa9e21885f471efabd2b793ac95735
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/StompSession.java <ide> public interface StompSession { <ide> * "client-individual" in order ot use this. <ide> * @param messageId the id of the message <ide> * @param consumed whether the message was consumed or not <del> * @return a Receiptable for tracking events <add> * @return a Receiptable for tracking receipts <add> * @since 4.3 <ide> */ <ide> Receiptable acknowledge(String messageId, boolean consumed); <ide>
1
Text
Text
add release note for [ci skip]
0613dd0234770ecd66e5f64dfb57aae6d19e9771
<ide><path>guides/source/4_2_release_notes.md <ide> Please refer to the [Changelog][railties] for detailed changes. <ide> * Deprecated `Rails::Rack::LogTailer` without replacement. <ide> ([Commit](https://github.com/rails/rails/commit/84a13e019e93efaa8994b3f8303d635a7702dbce)) <ide> <add>* Deprecated missing `config.log_level` for production environments. <add> ([Pull Request](https://github.com/rails/rails/pull/16622)) <add> <ide> ### Notable changes <ide> <ide> * Introduced `web-console` in the default application Gemfile.
1
Javascript
Javascript
change wording in lib/domain.js comment
e0eb515afc7fdec8d4cc8a90448653b08bd1edb4
<ide><path>lib/domain.js <ide> Domain.prototype.add = function(ee) { <ide> ee.domain.remove(ee); <ide> <ide> // Check for circular Domain->Domain links. <del> // This causes bad insanity! <add> // They cause big issues. <ide> // <ide> // For example: <ide> // var d = domain.create();
1
Java
Java
update copyright year of changed file
8a7a046bf0073e73e85eab4856a7915fc9d46e40
<ide><path>spring-context/src/main/java/org/springframework/scheduling/annotation/EnableAsync.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2021 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License.
1
PHP
PHP
apply fixes from styleci
3a513ad935e95fc45179adca6c8edf5b745a1493
<ide><path>src/Illuminate/Auth/Notifications/ResetPassword.php <ide> public function toMail($notifiable) <ide> } else { <ide> $url = url(config('app.url').route('password.reset', [ <ide> 'token' => $this->token, <del> 'email' => $notifiable->getEmailForPasswordReset() <add> 'email' => $notifiable->getEmailForPasswordReset(), <ide> ], false)); <ide> } <ide>
1
Text
Text
emphasize collaborators in governance.md
b830c976b56a4b9601ffa03980511512ac8ae8e1
<ide><path>GOVERNANCE.md <ide> # Node.js Project Governance <ide> <del>## Core Technical Committee <del> <del>The Node.js project is governed by a Core Technical Committee (CTC) which is <del>responsible for high-level guidance of the project. <del> <del>The CTC has final authority over this project including: <del> <del>* Technical direction <del>* Project governance and process (including this policy) <del>* Contribution policy <del>* GitHub repository hosting <del>* Conduct guidelines <del>* Maintaining the list of additional Collaborators <del> <del>For the current list of CTC members, see the project <del>[README.md](./README.md#current-project-team-members). <add>The Node.js project is governed by its Collaborators, including a Core Technical <add>Committee (CTC) which is responsible for high-level guidance of the project. <ide> <ide> ## Collaborators <ide> <ide> The [nodejs/node](https://github.com/nodejs/node) GitHub repository is <del>maintained by the CTC and additional Collaborators who are added by the <del>CTC on an ongoing basis. <add>maintained by Collaborators who are added by the CTC on an ongoing basis. <ide> <ide> Individuals identified by the CTC as making significant and valuable <ide> contributions are made Collaborators and given commit access to the project. If <ide> be accepted unless: <ide> the change. Previously-objecting Collaborators do not necessarily have to <ide> sign-off on the change, but they should not be opposed to it. <ide> * The change is escalated to the CTC and the CTC votes to approve the change. <del> This should be used only after other options (especially discussion among <del> the disagreeing Collaborators) have been exhausted. <add> This should only happen if disagreements between Collaborators cannot be <add> resolved through discussion. <ide> <ide> Collaborators may opt to elevate significant or controversial modifications to <ide> the CTC by assigning the `ctc-review` label to a pull request or issue. The <ide> CTC should serve as the final arbiter where required. <ide> <del>For the current list of Collaborators, see the project <del>[README.md](./README.md#current-project-team-members). <del> <del>A guide for Collaborators is maintained in <del>[COLLABORATOR_GUIDE.md](./COLLABORATOR_GUIDE.md). <add>* [Current list of Collaborators](./README.md#current-project-team-members) <add>* [A guide for Collaborators](./COLLABORATOR_GUIDE.md) <ide> <ide> ### Collaborator Activities <ide> <ide> The CTC periodically reviews the Collaborator list to identify inactive <ide> Collaborators. Past Collaborators are typically given _Emeritus_ status. Emeriti <ide> may request that the CTC restore them to active status. <ide> <add>## Core Technical Committee <add> <add>The Core Technical Committee (CTC) has final authority over this project <add>including: <add> <add>* Technical direction <add>* Project governance and process (including this policy) <add>* Contribution policy <add>* GitHub repository hosting <add>* Conduct guidelines <add>* Maintaining the list of additional Collaborators <add> <add>* [Current list of CTC members](./README.md#current-project-team-members) <add> <ide> ## CTC Membership <ide> <ide> CTC seats are not time-limited. There is no fixed size of the CTC. The CTC <ide> membership beyond these rules. <ide> <ide> The CTC may add additional members to the CTC by a standard CTC motion. <ide> <del>When a CTC member's participation in [CTC activities](#ctc-activities) has become <del>minimal for a sustained period of time, the CTC will request that the member <del>either indicate an intention to increase participation or voluntarily resign. <add>When a CTC member's participation in [CTC activities](#ctc-activities) has <add>become minimal for a sustained period of time, the CTC will request that the <add>member either indicate an intention to increase participation or voluntarily <add>resign. <ide> <ide> CTC members may only be removed by voluntary resignation or through a standard <ide> CTC motion.
1
Text
Text
replace 'onoe' with 'odie'
f239a448b2750a2f4df5cf7dd7a0df918b130f17
<ide><path>share/doc/homebrew/Formula-Cookbook.md <ide> Three commands are provided for displaying informational messages to the user: <ide> * `opoo` for warning messages <ide> * `odie` for error messages and immediately exiting <ide> <del>In particular, when a test needs to be performed before installation use `onoe` to bail out gracefully. For example: <add>In particular, when a test needs to be performed before installation use `odie` to bail out gracefully. For example: <ide> <ide> ```ruby <ide> if build.with?("qt") && build.with("qt5")
1
Python
Python
drop python 3.7 from setup.py
9a176d0f9e758ccc825669515970f5d7d8d3393a
<ide><path>setup.py <ide> <ide> # Python supported version checks. Keep right after stdlib imports to ensure we <ide> # get a sensible error for older Python versions <add># This needs to be changed to 3.8 for 1.22 release, but 3.7 is needed for LGTM. <ide> if sys.version_info[:2] < (3, 7): <del> raise RuntimeError("Python version >= 3.7 required.") <add> raise RuntimeError("Python version >= 3.8 required.") <ide> <ide> <ide> import versioneer <ide> Programming Language :: C <ide> Programming Language :: Python <ide> Programming Language :: Python :: 3 <del>Programming Language :: Python :: 3.7 <ide> Programming Language :: Python :: 3.8 <ide> Programming Language :: Python :: 3.9 <add>Programming Language :: Python :: 3.10 <ide> Programming Language :: Python :: 3 :: Only <ide> Programming Language :: Python :: Implementation :: CPython <ide> Topic :: Software Development <ide> def setup_package(): <ide> test_suite='pytest', <ide> version=versioneer.get_version(), <ide> cmdclass=cmdclass, <del> python_requires='>=3.7', <add> python_requires='>=3.8', <ide> zip_safe=False, <ide> entry_points={ <ide> 'console_scripts': f2py_cmds
1
Go
Go
make daemonbuilder.docker leaner
14215ed5a1900a88a3b17dd7cd566def50bfcbc9
<ide><path>api/client/build.go <ide> func (cli *DockerCli) CmdBuild(args ...string) error { <ide> cmd.ParseFlags(args, true) <ide> <ide> var ( <del> context io.ReadCloser <del> isRemote bool <del> err error <add> context io.ReadCloser <add> err error <ide> ) <ide> <ide> _, err = exec.LookPath("git") <ide> func (cli *DockerCli) CmdBuild(args ...string) error { <ide> } <ide> } <ide> <del> var remoteContext string <del> if isRemote { <del> remoteContext = cmd.Arg(0) <del> } <del> <ide> options := types.ImageBuildOptions{ <ide> Context: body, <ide> Memory: memory, <ide> MemorySwap: memorySwap, <ide> Tags: flTags.GetAll(), <ide> SuppressOutput: *suppressOutput, <del> RemoteContext: remoteContext, <ide> NoCache: *noCache, <ide> Remove: *rm, <ide> ForceRemove: *forceRm, <ide><path>api/server/router/build/build_routes.go <ide> import ( <ide> "github.com/docker/docker/builder" <ide> "github.com/docker/docker/builder/dockerfile" <ide> "github.com/docker/docker/daemon/daemonbuilder" <del> "github.com/docker/docker/pkg/archive" <del> "github.com/docker/docker/pkg/chrootarchive" <ide> "github.com/docker/docker/pkg/ioutils" <ide> "github.com/docker/docker/pkg/progress" <ide> "github.com/docker/docker/pkg/streamformatter" <ide> func (br *buildRouter) postBuild(ctx context.Context, w http.ResponseWriter, r * <ide> buildOptions.Dockerfile = dockerfileName <ide> } <ide> <del> uidMaps, gidMaps := br.backend.GetUIDGIDMaps() <del> defaultArchiver := &archive.Archiver{ <del> Untar: chrootarchive.Untar, <del> UIDMaps: uidMaps, <del> GIDMaps: gidMaps, <del> } <del> <del> docker := &daemonbuilder.Docker{ <del> Daemon: br.backend, <del> OutOld: output, <del> AuthConfigs: authConfigs, <del> Archiver: defaultArchiver, <del> } <del> if buildOptions.SuppressOutput { <del> docker.OutOld = notVerboseBuffer <del> } <del> <ide> b, err := dockerfile.NewBuilder( <ide> buildOptions, // result of newBuildConfig <del> docker, <add> &daemonbuilder.Docker{br.backend}, <ide> builder.DockerIgnoreContext{ModifiableContext: context}, <ide> nil) <ide> if err != nil { <ide> return errf(err) <ide> } <add> if buildOptions.SuppressOutput { <add> b.Output = notVerboseBuffer <add> } else { <add> b.Output = output <add> } <ide> b.Stdout = &streamformatter.StdoutFormatter{Writer: output, StreamFormatter: sf} <ide> b.Stderr = &streamformatter.StderrFormatter{Writer: output, StreamFormatter: sf} <ide> if buildOptions.SuppressOutput { <ide><path>builder/builder.go <ide> type Backend interface { <ide> // GetImage looks up a Docker image referenced by `name`. <ide> GetImage(name string) (Image, error) <ide> // Pull tells Docker to pull image referenced by `name`. <del> Pull(name string) (Image, error) <add> Pull(name string, authConfigs map[string]types.AuthConfig, output io.Writer) (Image, error) <ide> // ContainerAttach attaches to container. <ide> ContainerAttach(cID string, stdin io.ReadCloser, stdout, stderr io.Writer, stream bool) error <ide> // ContainerCreate creates a new Docker container and returns potential warnings <ide><path>builder/dockerfile/builder.go <ide> type Builder struct { <ide> allowedBuildArgs map[string]bool // list of build-time args that are allowed for expansion/substitution and passing to commands in 'run'. <ide> <ide> // TODO: remove once docker.Commit can receive a tag <del> id string <add> id string <add> Output io.Writer <ide> } <ide> <ide> // NewBuilder creates a new Dockerfile builder from an optional dockerfile and a Config. <ide><path>builder/dockerfile/dispatchers.go <ide> func from(b *Builder, args []string, attributes map[string]bool, original string <ide> // TODO: shouldn't we error out if error is different from "not found" ? <ide> } <ide> if image == nil { <del> image, err = b.docker.Pull(name) <add> image, err = b.docker.Pull(name, b.options.AuthConfigs, b.Output) <ide> if err != nil { <ide> return err <ide> } <ide><path>daemon/daemonbuilder/builder.go <ide> import ( <ide> "github.com/docker/docker/daemon" <ide> "github.com/docker/docker/image" <ide> "github.com/docker/docker/pkg/archive" <add> "github.com/docker/docker/pkg/chrootarchive" <ide> "github.com/docker/docker/pkg/httputils" <ide> "github.com/docker/docker/pkg/idtools" <ide> "github.com/docker/docker/pkg/ioutils" <ide> import ( <ide> // Docker implements builder.Backend for the docker Daemon object. <ide> type Docker struct { <ide> *daemon.Daemon <del> OutOld io.Writer <del> AuthConfigs map[string]types.AuthConfig <del> Archiver *archive.Archiver <ide> } <ide> <ide> // ensure Docker implements builder.Backend <ide> var _ builder.Backend = Docker{} <ide> <ide> // Pull tells Docker to pull image referenced by `name`. <del>func (d Docker) Pull(name string) (builder.Image, error) { <add>func (d Docker) Pull(name string, authConfigs map[string]types.AuthConfig, output io.Writer) (builder.Image, error) { <ide> ref, err := reference.ParseNamed(name) <ide> if err != nil { <ide> return nil, err <ide> } <ide> ref = reference.WithDefaultTag(ref) <ide> <ide> pullRegistryAuth := &types.AuthConfig{} <del> if len(d.AuthConfigs) > 0 { <add> if len(authConfigs) > 0 { <ide> // The request came with a full auth config file, we prefer to use that <ide> repoInfo, err := d.Daemon.RegistryService.ResolveRepository(ref) <ide> if err != nil { <ide> return nil, err <ide> } <ide> <ide> resolvedConfig := registry.ResolveAuthConfig( <del> d.AuthConfigs, <add> authConfigs, <ide> repoInfo.Index, <ide> ) <ide> pullRegistryAuth = &resolvedConfig <ide> } <ide> <del> if err := d.Daemon.PullImage(ref, nil, pullRegistryAuth, ioutils.NopWriteCloser(d.OutOld)); err != nil { <add> if err := d.Daemon.PullImage(ref, nil, pullRegistryAuth, ioutils.NopWriteCloser(output)); err != nil { <ide> return nil, err <ide> } <ide> return d.GetImage(name) <ide> func (d Docker) BuilderCopy(cID string, destPath string, src builder.FileInfo, d <ide> destExists = false <ide> } <ide> <add> uidMaps, gidMaps := d.Daemon.GetUIDGIDMaps() <add> archiver := &archive.Archiver{ <add> Untar: chrootarchive.Untar, <add> UIDMaps: uidMaps, <add> GIDMaps: gidMaps, <add> } <add> <ide> if src.IsDir() { <ide> // copy as directory <del> if err := d.Archiver.CopyWithTar(srcPath, destPath); err != nil { <add> if err := archiver.CopyWithTar(srcPath, destPath); err != nil { <ide> return err <ide> } <ide> return fixPermissions(srcPath, destPath, rootUID, rootGID, destExists) <ide> func (d Docker) BuilderCopy(cID string, destPath string, src builder.FileInfo, d <ide> } <ide> <ide> // try to successfully untar the orig <del> err := d.Archiver.UntarPath(srcPath, tarDest) <add> err := archiver.UntarPath(srcPath, tarDest) <ide> if err != nil { <ide> logrus.Errorf("Couldn't untar to %s: %v", tarDest, err) <ide> } <ide> func (d Docker) BuilderCopy(cID string, destPath string, src builder.FileInfo, d <ide> if err := idtools.MkdirAllNewAs(filepath.Dir(destPath), 0755, rootUID, rootGID); err != nil { <ide> return err <ide> } <del> if err := d.Archiver.CopyFileWithTar(srcPath, destPath); err != nil { <add> if err := archiver.CopyFileWithTar(srcPath, destPath); err != nil { <ide> return err <ide> } <ide>
6
PHP
PHP
fix some failing tests
2c5f2510399c366462d18a944387d0e578df8bc0
<ide><path>lib/Cake/Test/TestCase/Routing/DispatcherTest.php <ide> public function testDispatchActionReturnsResponse() { <ide> * @return void <ide> */ <ide> public function testAdminDispatch() { <del> $_POST = array(); <ide> $Dispatcher = new TestDispatcher(); <ide> Configure::write('Routing.prefixes', array('admin')); <ide> Configure::write('App.baseUrl','/cake/repo/branches/1.2.x.x/index.php'); <ide> $url = new Request('admin/test_dispatch_pages/index/param:value/param2:value2'); <ide> $response = $this->getMock('Cake\Network\Response'); <ide> <ide> Router::reload(); <add> require CAKE . 'Config' . DS . 'routes.php'; <add> <ide> $Dispatcher->dispatch($url, $response, array('return' => 1)); <ide> <ide> $this->assertEquals('TestDispatchPages', $Dispatcher->controller->name); <ide> <del> $this->assertSame($Dispatcher->controller->passedArgs, array('param' => 'value', 'param2' => 'value2')); <ide> $this->assertTrue($Dispatcher->controller->params['admin']); <ide> <ide> $expected = '/cake/repo/branches/1.2.x.x/index.php/admin/test_dispatch_pages/index/param:value/param2:value2'; <ide> public function testPluginDispatch() { <ide> $this->assertSame($Dispatcher->controller->plugin, 'MyPlugin'); <ide> $this->assertSame($Dispatcher->controller->name, 'SomePages'); <ide> $this->assertSame($Dispatcher->controller->params['controller'], 'some_pages'); <del> $this->assertSame($Dispatcher->controller->passedArgs, array('0' => 'home', 'param' => 'value', 'param2' => 'value2')); <ide> } <ide> <ide> /** <ide> public function testAutomaticPluginDispatch() { <ide> $this->assertSame($Dispatcher->controller->plugin, 'MyPlugin'); <ide> $this->assertSame($Dispatcher->controller->name, 'OtherPages'); <ide> $this->assertSame($Dispatcher->controller->action, 'index'); <del> $this->assertSame($Dispatcher->controller->passedArgs, array('param' => 'value', 'param2' => 'value2')); <ide> <ide> $expected = '/cake/repo/branches/1.2.x.x/my_plugin/other_pages/index/param:value/param2:value2'; <ide> $this->assertSame($expected, $url->here); <ide> public function testAutomaticPluginControllerDispatch() { <ide> $this->assertSame($Dispatcher->controller->action, 'admin_add'); <ide> <ide> $expected = array(0 => 5, 'param' => 'value', 'param2' => 'value2'); <del> $this->assertEquals($expected, $Dispatcher->controller->passedArgs); <ide> <ide> Configure::write('Routing.prefixes', array('admin')); <ide> Plugin::load('ArticlesTest', array('path' => '/fake/path')); <ide> public function testPluginShortCutUrlsWithControllerThatNeedsToBeLoaded() { <ide> App::build(); <ide> } <ide> <del>/** <del> * testAutomaticPluginControllerMissingActionDispatch method <del> * <del> * @expectedException Cake\Error\MissingActionException <del> * @expectedExceptionMessage Action MyPluginController::not_here() could not be found. <del> * @return void <del> */ <del> public function testAutomaticPluginControllerMissingActionDispatch() { <del> Router::reload(); <del> $Dispatcher = new TestDispatcher(); <del> <del> $url = new Request('my_plugin/not_here/param:value/param2:value2'); <del> $response = $this->getMock('Cake\Network\Response'); <del> <del> $Dispatcher->dispatch($url, $response, array('return' => 1)); <del> } <del> <del>/** <del> * testAutomaticPluginControllerMissingActionDispatch method <del> * <del> * @expectedException Cake\Error\MissingActionException <del> * @expectedExceptionMessage Action MyPluginController::param:value() could not be found. <del> * @return void <del> */ <del> <del> public function testAutomaticPluginControllerIndexMissingAction() { <del> Router::reload(); <del> $Dispatcher = new TestDispatcher(); <del> <del> $url = new Request('my_plugin/param:value/param2:value2'); <del> $response = $this->getMock('Cake\Network\Response'); <del> <del> $Dispatcher->dispatch($url, $response, array('return' => 1)); <del> } <del> <ide> /** <ide> * Test dispatching into the TestPlugin in the TestApp <ide> *
1
PHP
PHP
fix error message
6662f49fe83d0898cddb8d1beee5c1a507a92514
<ide><path>src/Illuminate/Foundation/Console/VendorPublishCommand.php <ide> protected function publishTag($tag) <ide> } <ide> <ide> if ($published === false) { <del> $this->error('Unable to locate publishable resources.'); <add> $this->comment('No publishable resources for tag ['.$tag.'].'); <ide> } else { <ide> $this->laravel['events']->dispatch(new VendorTagPublished($tag, $pathsToPublish)); <ide> }
1
Javascript
Javascript
use descriptive names for regression tests
90b05382734aca10b51b187eb955a964cbcaed74
<add><path>test/parallel/test-crypto-tostring-segfault.js <del><path>test/parallel/test-regress-GH-9819.js <ide> const common = require('../common'); <ide> if (!common.hasCrypto) <ide> common.skip('missing crypto'); <ide> <add>// This test ensures that node doesn't SEGFAULT when either of <add>// `crypto.createHash` or `crypto.createHmac` are given an object that defines <add>// a throwing `toString`. <add>// https://github.com/nodejs/node/issues/9819 <add> <ide> const assert = require('assert'); <ide> const execFile = require('child_process').execFile; <ide> <add><path>test/parallel/test-http-addrequest-localaddress.js <del><path>test/parallel/test-regress-GH-5051.js <ide> 'use strict'; <ide> require('../common'); <add> <add>// This test ensures that `addRequest`'s Legacy API accepts `localAddress` <add>// correctly instead of accepting `path`. <add>// https://github.com/nodejs/node/issues/5051 <add> <ide> const assert = require('assert'); <ide> const agent = require('http').globalAgent; <ide> <add><path>test/parallel/test-net-listen-invalid-port.js <del><path>test/parallel/test-regress-GH-5727.js <ide> 'use strict'; <ide> const common = require('../common'); <add> <add>// This test ensures that port numbers are validated in *all* kinds of `listen` <add>// calls. If an invalid port is supplied, ensures a `RangeError` is thrown. <add>// https://github.com/nodejs/node/issues/5727 <add> <ide> const assert = require('assert'); <ide> const net = require('net'); <ide> <add><path>test/parallel/test-tty-stdin-pipe.js <del><path>test/parallel/test-regress-GH-5927.js <ide> <ide> 'use strict'; <ide> require('../common'); <add> <add>// This test ensures piping from `stdin` isn't broken. <add>// https://github.com/nodejs/node/issues/5927 <add> <ide> const assert = require('assert'); <ide> const readline = require('readline'); <ide> <add><path>test/parallel/test-v8-global-setter.js <del><path>test/parallel/test-regress-GH-6235.js <ide> 'use strict'; <ide> require('../common'); <ide> <add>// This test ensures v8 correctly sets a property on the global object if it <add>// has a setter interceptor in strict mode. <add>// https://github.com/nodejs/node-v0.x-archive/issues/6235 <add> <ide> require('vm').runInNewContext('"use strict"; var v = 1; v = 2');
5
Python
Python
change topkcat to call sparsetopkcat
ccc9357778bf3ee6d1ae7c7b77e744586a4401d8
<ide><path>keras/metrics/metrics.py <ide> def top_k_categorical_accuracy(y_true, y_pred, k=5): <ide> Returns: <ide> Top K categorical accuracy value. <ide> """ <del> return tf.cast( <del> tf.math.in_top_k( <del> predictions=y_pred, targets=tf.math.argmax(y_true, axis=-1), k=k), <del> dtype=backend.floatx()) <add> y_true = tf.math.argmax(y_true, axis=-1) <add> return sparse_top_k_categorical_accuracy(y_true, y_pred, k=k) <ide> <ide> <ide> @keras_export('keras.metrics.sparse_top_k_categorical_accuracy')
1
Java
Java
avoid use of double constructor of bigdecimal
3f97ab183e08ee0001ee98b9614770dad8d29155
<ide><path>spring-jdbc/src/test/java/org/springframework/jdbc/core/JdbcTemplateQueryTests.java <ide> /* <del> * Copyright 2002-2015 the original author or authors. <add> * Copyright 2002-2016 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public void testQueryForObjectWithBigInteger() throws Exception { <ide> public void testQueryForObjectWithBigDecimal() throws Exception { <ide> String sql = "SELECT AGE FROM CUSTMR WHERE ID = 3"; <ide> given(this.resultSet.next()).willReturn(true, false); <del> given(this.resultSet.getBigDecimal(1)).willReturn(new BigDecimal(22.5)); <del> assertEquals(new BigDecimal(22.5), this.template.queryForObject(sql, BigDecimal.class)); <add> given(this.resultSet.getBigDecimal(1)).willReturn(new BigDecimal("22.5")); <add> assertEquals(new BigDecimal("22.5"), this.template.queryForObject(sql, BigDecimal.class)); <ide> verify(this.resultSet).close(); <ide> verify(this.statement).close(); <ide> }
1
Python
Python
replace deprecated options for ifort
713147a1c1bddfa4124f8e7396348e66c4dcbb67
<ide><path>numpy/distutils/fcompiler/intel.py <ide> def update_executables(self): <ide> module_include_switch = '/I' <ide> <ide> def get_flags(self): <del> opt = ['/nologo', '/MD', '/nbs', '/Qlowercase', '/us'] <add> opt = ['/nologo', '/MD', '/nbs', '/names:lowercase', '/assume:underscore'] <ide> return opt <ide> <ide> def get_flags_free(self):
1
PHP
PHP
add timestamps column to migration stub
a0ac97b8a770f7768e0545f770dfe2b65015794f
<ide><path>src/Illuminate/Database/Migrations/stubs/create.php <ide> public function up() <ide> Schema::create('{{table}}', function(Blueprint $table) <ide> { <ide> $table->increments('id'); <add> $table->timestamps(); <ide> }); <ide> } <ide> <ide> public function down() <ide> Schema::drop('{{table}}'); <ide> } <ide> <del>} <ide>\ No newline at end of file <add>}
1
Python
Python
add tests for dictionary interface to npzfile
a3a99e2f84e36ef876094494464e6bcecb7ee0ab
<ide><path>numpy/lib/tests/test_io.py <ide> def test_gzip_loadtxt_from_string(): <ide> f = gzip.GzipFile(fileobj=s, mode="r") <ide> assert_array_equal(np.loadtxt(f), [1, 2, 3]) <ide> <add>def test_npzfile_dict(): <add> s = StringIO.StringIO() <add> x = np.zeros((3, 3)) <add> y = np.zeros((3, 3)) <add> <add> np.savez(s, x=x, y=y) <add> s.seek(0) <add> <add> z = np.load(s) <add> <add> assert 'x' in z <add> assert 'y' in z <add> assert 'x' in z.keys() <add> assert 'y' in z.keys() <add> <add> for f, a in z.iteritems(): <add> assert f in ['x', 'y'] <add> assert_equal(a.shape, (3, 3)) <add> <add> assert len(z.items()) == 2 <add> <add> for f in z: <add> assert f in ['x', 'y'] <add> <add> assert 'x' in list(z.iterkeys()) <add> <ide> if __name__ == "__main__": <ide> run_module_suite()
1
Mixed
Python
fix issues with dvc commands
858565a5671de61334443d6a2348164bc39216e1
<ide><path>spacy/cli/project/dvc.py <ide> def project_update_dvc_cli( <ide> project_dir: Path = Arg(Path.cwd(), help="Location of project directory. Defaults to current working directory.", exists=True, file_okay=False), <ide> workflow: Optional[str] = Arg(None, help=f"Name of workflow defined in {PROJECT_FILE}. Defaults to first workflow if not set."), <ide> verbose: bool = Opt(False, "--verbose", "-V", help="Print more info"), <add> quiet: bool = Opt(False, "--quiet", "-q", help="Print less info"), <ide> force: bool = Opt(False, "--force", "-F", help="Force update DVC config"), <ide> # fmt: on <ide> ): <ide> def project_update_dvc_cli( <ide> <ide> DOCS: https://spacy.io/api/cli#project-dvc <ide> """ <del> project_update_dvc(project_dir, workflow, verbose=verbose, force=force) <add> project_update_dvc(project_dir, workflow, verbose=verbose, quiet=quiet, force=force) <ide> <ide> <ide> def project_update_dvc( <ide> project_dir: Path, <ide> workflow: Optional[str] = None, <ide> *, <ide> verbose: bool = False, <add> quiet: bool = False, <ide> force: bool = False, <ide> ) -> None: <ide> """Update the auto-generated Data Version Control (DVC) config file. A DVC <ide> def project_update_dvc( <ide> workflow (Optional[str]): Optional name of workflow defined in project.yml. <ide> If not set, the first workflow will be used. <ide> verbose (bool): Print more info. <add> quiet (bool): Print less info. <ide> force (bool): Force update DVC config. <ide> """ <ide> config = load_project_config(project_dir) <ide> updated = update_dvc_config( <del> project_dir, config, workflow, verbose=verbose, force=force <add> project_dir, config, workflow, verbose=verbose, quiet=quiet, force=force <ide> ) <ide> help_msg = "To execute the workflow with DVC, run: dvc repro" <ide> if updated: <ide> def update_dvc_config( <ide> config: Dict[str, Any], <ide> workflow: Optional[str] = None, <ide> verbose: bool = False, <del> silent: bool = False, <add> quiet: bool = False, <ide> force: bool = False, <ide> ) -> bool: <ide> """Re-run the DVC commands in dry mode and update dvc.yaml file in the <ide> def update_dvc_config( <ide> path (Path): The path to the project directory. <ide> config (Dict[str, Any]): The loaded project.yml. <ide> verbose (bool): Whether to print additional info (via DVC). <del> silent (bool): Don't output anything (via DVC). <add> quiet (bool): Don't output anything (via DVC). <ide> force (bool): Force update, even if hashes match. <ide> RETURNS (bool): Whether the DVC config file was updated. <ide> """ <ide> def update_dvc_config( <ide> dvc_config_path.unlink() <ide> dvc_commands = [] <ide> config_commands = {cmd["name"]: cmd for cmd in config.get("commands", [])} <add> <add> # some flags that apply to every command <add> flags = [] <add> if verbose: <add> flags.append("--verbose") <add> if quiet: <add> flags.append("--quiet") <add> <ide> for name in workflows[workflow]: <ide> command = config_commands[name] <ide> deps = command.get("deps", []) <ide> def update_dvc_config( <ide> deps_cmd = [c for cl in [["-d", p] for p in deps] for c in cl] <ide> outputs_cmd = [c for cl in [["-o", p] for p in outputs] for c in cl] <ide> outputs_nc_cmd = [c for cl in [["-O", p] for p in outputs_no_cache] for c in cl] <del> dvc_cmd = ["run", "-n", name, "-w", str(path), "--no-exec"] <add> <add> dvc_cmd = ["run", *flags, "-n", name, "-w", str(path), "--no-exec"] <ide> if command.get("no_skip"): <ide> dvc_cmd.append("--always-changed") <ide> full_cmd = [*dvc_cmd, *deps_cmd, *outputs_cmd, *outputs_nc_cmd, *project_cmd] <ide> dvc_commands.append(join_command(full_cmd)) <add> <add> if not dvc_commands: <add> # If we don't check for this, then there will be an error when reading the <add> # config, since DVC wouldn't create it. <add> msg.fail( <add> "No usable commands for DVC found. This can happen if none of your " <add> "commands have dependencies or outputs.", <add> exits=1, <add> ) <add> <ide> with working_dir(path): <del> dvc_flags = {"--verbose": verbose, "--quiet": silent} <del> run_dvc_commands(dvc_commands, flags=dvc_flags) <add> for c in dvc_commands: <add> dvc_command = "dvc " + c <add> run_command(dvc_command) <ide> with dvc_config_path.open("r+", encoding="utf8") as f: <ide> content = f.read() <ide> f.seek(0, 0) <ide> f.write(f"# {config_hash}\n{DVC_CONFIG_COMMENT}\n{content}") <ide> return True <ide> <ide> <del>def run_dvc_commands( <del> commands: Iterable[str] = SimpleFrozenList(), flags: Dict[str, bool] = {} <del>) -> None: <del> """Run a sequence of DVC commands in a subprocess, in order. <del> <del> commands (List[str]): The string commands without the leading "dvc". <del> flags (Dict[str, bool]): Conditional flags to be added to command. Makes it <del> easier to pass flags like --quiet that depend on a variable or <del> command-line setting while avoiding lots of nested conditionals. <del> """ <del> for c in commands: <del> command = split_command(c) <del> dvc_command = ["dvc", *command] <del> # Add the flags if they are set to True <del> for flag, is_active in flags.items(): <del> if is_active: <del> dvc_command.append(flag) <del> run_command(dvc_command) <del> <del> <ide> def check_workflows(workflows: List[str], workflow: Optional[str] = None) -> None: <ide> """Validate workflows provided in project.yml and check that a given <ide> workflow can be used to generate a DVC config. <ide><path>website/docs/api/cli.md <ide> You'll also need to add the assets you want to track with <ide> </Infobox> <ide> <ide> ```cli <del>$ python -m spacy project dvc [project_dir] [workflow] [--force] [--verbose] <add>$ python -m spacy project dvc [project_dir] [workflow] [--force] [--verbose] [--quiet] <ide> ``` <ide> <ide> > #### Example <ide> $ python -m spacy project dvc [project_dir] [workflow] [--force] [--verbose] <ide> | `workflow` | Name of workflow defined in `project.yml`. Defaults to first workflow if not set. ~~Optional[str] \(option)~~ | <ide> | `--force`, `-F` | Force-updating config file. ~~bool (flag)~~ | <ide> | `--verbose`, `-V` | Print more output generated by DVC. ~~bool (flag)~~ | <add>| `--quiet`, `-q` | Print no output generated by DVC. ~~bool (flag)~~ | <ide> | `--help`, `-h` | Show help message and available arguments. ~~bool (flag)~~ | <ide> | **CREATES** | A `dvc.yaml` file in the project directory, based on the steps defined in the given workflow. | <ide>
2
Javascript
Javascript
update component names
6c41dd2b7cd3f314f3db3197cf18dbf52df69027
<ide><path>client/src/pages/donate-other.js <ide> const paypalOneTimeDonation = { <ide> defaultValue: 'Make a one-time donation' <ide> }; <ide> <del>class IndexPage extends Component { <add>class DonateOtherPage extends Component { <ide> <ide> renderForm(item) { <ide> return ( <ide> class IndexPage extends Component { <ide> } <ide> } <ide> <del>IndexPage.displayName = 'IndexPage'; <add>DonateOtherPage.displayName = 'DonateOtherPage'; <ide> <del>export default IndexPage; <add>export default DonateOtherPage; <ide><path>client/src/pages/donate.js <ide> import React, { Component, Fragment } from 'react'; <ide> import Helmet from 'react-helmet'; <ide> import { StripeProvider, Elements } from 'react-stripe-elements'; <ide> import { Row, Col } from '@freecodecamp/react-bootstrap'; <add>import { Link } from 'gatsby'; <ide> <ide> import { stripePublicKey } from '../../config/env.json'; <ide> <ide> import PoweredByStripe from '../components/Donation/components/poweredByStripe'; <ide> <ide> import './index.css'; <ide> <del>class IndexPage extends Component { <add>class DonatePage extends Component { <ide> constructor(...props) { <ide> super(...props); <ide> this.state = { <ide> class IndexPage extends Component { <ide> </Elements> <ide> </StripeProvider> <ide> <div className='text-center'> <del> <a href='/donate-other'>Other ways to donate.</a> <add> <Link to='/donate-other'>Other ways to donate.</Link> <ide> <Spacer /> <ide> <PoweredByStripe /> <ide> </div> <ide> class IndexPage extends Component { <ide> } <ide> } <ide> <del>IndexPage.displayName = 'IndexPage'; <add>DonatePage.displayName = 'DonatePage'; <ide> <del>export default IndexPage; <add>export default DonatePage;
2
Ruby
Ruby
set the destination_root for plugins
1a566c87cb75b22be28917b0dd24fd7112d3235d
<ide><path>railties/lib/rails/generators/rails/plugin/plugin_generator.rb <ide> def initialize(*args) <ide> end <ide> end <ide> <add> public_task :set_default_accessors! <ide> public_task :create_root <ide> <ide> def create_root_files
1
Mixed
Python
add init cli and init config
4c055f0aa703974ff3d14fb4ea5966c226013a1d
<ide><path>spacy/cli/__init__.py <ide> from .evaluate import evaluate # noqa: F401 <ide> from .convert import convert # noqa: F401 <ide> from .init_model import init_model # noqa: F401 <add>from .init_config import init_config # noqa: F401 <ide> from .validate import validate # noqa: F401 <ide> from .project.clone import project_clone # noqa: F401 <ide> from .project.assets import project_assets # noqa: F401 <ide><path>spacy/cli/_util.py <ide> commands to check and validate your config files, training and evaluation data, <ide> and custom model implementations. <ide> """ <add>INIT_HELP = """Commands for initializing configs and models.""" <ide> <ide> # Wrappers for Typer's annotations. Initially created to set defaults and to <ide> # keep the names short, but not needed at the moment. <ide> app = typer.Typer(name=NAME, help=HELP) <ide> project_cli = typer.Typer(name="project", help=PROJECT_HELP, no_args_is_help=True) <ide> debug_cli = typer.Typer(name="debug", help=DEBUG_HELP, no_args_is_help=True) <add>init_cli = typer.Typer(name="init", help=INIT_HELP, no_args_is_help=True) <ide> <ide> app.add_typer(project_cli) <ide> app.add_typer(debug_cli) <add>app.add_typer(init_cli) <ide> <ide> <ide> def setup_cli() -> None: <ide> def get_checksum(path: Union[Path, str]) -> str: <ide> <ide> <ide> @contextmanager <del>def show_validation_error(title: str = "Config validation error"): <add>def show_validation_error( <add> file_path: Optional[Union[str, Path]] = None, <add> *, <add> title: str = "Config validation error", <add> hint_init: bool = True, <add>): <ide> """Helper to show custom config validation errors on the CLI. <ide> <add> file_path (str / Path): Optional file path of config file, used in hints. <ide> title (str): Title of the custom formatted error. <add> hint_init (bool): Show hint about filling config. <ide> """ <ide> try: <ide> yield <ide> except (ConfigValidationError, InterpolationError) as e: <ide> msg.fail(title, spaced=True) <del> print(str(e).replace("Config validation error", "").strip()) <add> # TODO: This is kinda hacky and we should probably provide a better <add> # helper for this in Thinc <add> err_text = str(e).replace("Config validation error", "").strip() <add> print(err_text) <add> if hint_init and "field required" in err_text: <add> config_path = file_path if file_path is not None else "config.cfg" <add> msg.text( <add> "If your config contains missing values, you can run the 'init " <add> "config' command to fill in all the defaults, if possible:", <add> spaced=True, <add> ) <add> print(f"{COMMAND} init config {config_path} --base {config_path}\n") <ide> sys.exit(1) <ide> <ide> <ide><path>spacy/cli/debug_data.py <ide> def debug_config_cli( <ide> ctx: typer.Context, # This is only used to read additional arguments <ide> config_path: Path = Arg(..., help="Path to config file", exists=True), <ide> code_path: Optional[Path] = Opt(None, "--code-path", "-c", help="Path to Python file with additional code (registered functions) to be imported"), <del> output_path: Optional[Path] = Opt(None, "--output", "-o", help="Output path for filled config or '-' for standard output", allow_dash=True), <ide> auto_fill: bool = Opt(False, "--auto-fill", "-F", help="Whether or not to auto-fill the config with built-in defaults if possible"), <ide> diff: bool = Opt(False, "--diff", "-D", help="Show a visual diff if config was auto-filled") <ide> # fmt: on <ide> def debug_config_cli( <ide> """ <ide> overrides = parse_config_overrides(ctx.args) <ide> import_code(code_path) <del> with show_validation_error(): <add> with show_validation_error(config_path): <ide> config = Config().from_disk(config_path) <ide> try: <ide> nlp, _ = util.load_model_from_config( <ide> config, overrides=overrides, auto_fill=auto_fill <ide> ) <ide> except ValueError as e: <ide> msg.fail(str(e), exits=1) <del> is_stdout = output_path is not None and str(output_path) == "-" <ide> if auto_fill: <ide> orig_config = config.to_str() <ide> filled_config = nlp.config.to_str() <ide> def debug_config_cli( <ide> if diff: <ide> print(diff_strings(config.to_str(), nlp.config.to_str())) <ide> else: <del> msg.good("Original config is valid", show=not is_stdout) <del> if is_stdout: <del> print(nlp.config.to_str()) <del> elif output_path is not None: <del> nlp.config.to_disk(output_path) <del> msg.good(f"Saved updated config to {output_path}") <add> msg.good("Original config is valid") <ide> <ide> <ide> @debug_cli.command( <ide> def debug_data( <ide> msg.fail("Development data not found", dev_path, exits=1) <ide> if not config_path.exists(): <ide> msg.fail("Config file not found", config_path, exists=1) <del> with show_validation_error(): <add> with show_validation_error(config_path): <ide> cfg = Config().from_disk(config_path) <ide> nlp, config = util.load_model_from_config(cfg, overrides=config_overrides) <ide> # TODO: handle base model <ide><path>spacy/cli/debug_model.py <ide> def debug_model_cli( <ide> "print_prediction": P3, <ide> } <ide> config_overrides = parse_config_overrides(ctx.args) <del> cfg = Config().from_disk(config_path) <del> with show_validation_error(): <add> with show_validation_error(config_path): <add> cfg = Config().from_disk(config_path) <ide> try: <ide> _, config = util.load_model_from_config(cfg, overrides=config_overrides) <ide> except ValueError as e: <ide><path>spacy/cli/init_config.py <add>from typing import Optional, List <add>from pathlib import Path <add>from thinc.api import Config <add>from wasabi import msg <add> <add>from ..util import load_model_from_config, get_lang_class, load_model <add>from ._util import init_cli, Arg, Opt, show_validation_error <add> <add> <add>@init_cli.command("config") <add>def init_config_cli( <add> # fmt: off <add> output_path: Path = Arg("-", help="Output path or - for stdout", allow_dash=True), <add> base_path: Optional[Path] = Opt(None, "--base", "-b", help="Optional base config to fill", exists=True, dir_okay=False), <add> model: Optional[str] = Opt(None, "--model", "-m", help="Optional model to copy config from"), <add> lang: Optional[str] = Opt(None, "--lang", "-l", help="Optional language code for blank config"), <add> pipeline: Optional[str] = Opt(None, "--pipeline", "-p", help="Optional pipeline components to use") <add> # fmt: on <add>): <add> """Generate a starter config.cfg for training.""" <add> validate_cli_args(base_path, model, lang) <add> is_stdout = str(output_path) == "-" <add> pipeline = [p.strip() for p in pipeline.split(",")] if pipeline else [] <add> cfg = init_config(output_path, base_path, model, lang, pipeline, silent=is_stdout) <add> if is_stdout: <add> print(cfg.to_str()) <add> else: <add> cfg.to_disk(output_path) <add> msg.good("Saved config", output_path) <add> <add> <add>def init_config( <add> output_path: Path, <add> config_path: Optional[Path], <add> model: Optional[str], <add> lang: Optional[str], <add> pipeline: Optional[List[str]], <add> silent: bool = False, <add>) -> Config: <add> if config_path is not None: <add> msg.info("Generating config from base config", show=not silent) <add> with show_validation_error(config_path, hint_init=False): <add> config = Config().from_disk(config_path) <add> try: <add> nlp, _ = load_model_from_config(config, auto_fill=True) <add> except ValueError as e: <add> msg.fail(str(e), exits=1) <add> return nlp.config <add> if model is not None: <add> ext = f" with pipeline {pipeline}" if pipeline else "" <add> msg.info(f"Generating config from model {model}{ext}", show=not silent) <add> nlp = load_model(model) <add> for existing_pipe_name in nlp.pipe_names: <add> if existing_pipe_name not in pipeline: <add> nlp.remove_pipe(existing_pipe_name) <add> for pipe_name in pipeline: <add> if pipe_name not in nlp.pipe_names: <add> nlp.add_pipe(pipe_name) <add> return nlp.config <add> if lang is not None: <add> ext = f" with pipeline {pipeline}" if pipeline else "" <add> msg.info(f"Generating config for language '{lang}'{ext}", show=not silent) <add> nlp = get_lang_class(lang)() <add> for pipe_name in pipeline: <add> nlp.add_pipe(pipe_name) <add> return nlp.config <add> <add> <add>def validate_cli_args( <add> config_path: Optional[Path], model: Optional[str], lang: Optional[str] <add>) -> None: <add> args = {"--base": config_path, "--model": model, "--lang": lang} <add> if sum(arg is not None for arg in args.values()) != 1: <add> existing = " ".join(f"{a} {v}" for a, v in args.items() if v is not None) <add> msg.fail( <add> "The init config command expects only one of the following arguments: " <add> "--base (base config to fill and update), --lang (language code to " <add> "use for blank config) or --model (base model to copy config from).", <add> f"Got: {existing if existing else 'no arguments'}", <add> exits=1, <add> ) <ide><path>spacy/cli/init_model.py <ide> import zipfile <ide> import srsly <ide> import warnings <del>from wasabi import Printer <add>from wasabi import msg, Printer <add>import typer <ide> <del>from ._util import app, Arg, Opt <add>from ._util import app, init_cli, Arg, Opt <ide> from ..vectors import Vectors <ide> from ..errors import Errors, Warnings <ide> from ..language import Language <ide> from ..util import ensure_path, get_lang_class, load_model, OOV_RANK <del>from ..lookups import Lookups <ide> <ide> try: <ide> import ftfy <ide> DEFAULT_OOV_PROB = -20 <ide> <ide> <del>@app.command("init-model") <add>@init_cli.command("model") <add>@app.command( <add> "init-model", <add> context_settings={"allow_extra_args": True, "ignore_unknown_options": True}, <add> hidden=True, # hide this from main CLI help but still allow it to work with warning <add>) <ide> def init_model_cli( <ide> # fmt: off <add> ctx: typer.Context, # This is only used to read additional arguments <ide> lang: str = Arg(..., help="Model language"), <ide> output_dir: Path = Arg(..., help="Model output directory"), <ide> freqs_loc: Optional[Path] = Arg(None, help="Location of words frequencies file", exists=True), <ide> def init_model_cli( <ide> Create a new model from raw data. If vectors are provided in Word2Vec format, <ide> they can be either a .txt or zipped as a .zip or .tar.gz. <ide> """ <add> if ctx.command.name == "init-model": <add> msg.warn( <add> "The init-model command is now available via the 'init model' " <add> "subcommand (without the hyphen). You can run python -m spacy init " <add> "--help for an overview of the other available initialization commands." <add> ) <ide> init_model( <ide> lang, <ide> output_dir, <ide><path>spacy/cli/pretrain.py <ide> def pretrain( <ide> else: <ide> msg.info("Using CPU") <ide> msg.info(f"Loading config from: {config_path}") <del> config = Config().from_disk(config_path) <del> with show_validation_error(): <add> with show_validation_error(config_path): <add> config = Config().from_disk(config_path) <ide> nlp, config = util.load_model_from_config(config, overrides=config_overrides) <ide> # TODO: validate that [pretraining] block exists <ide> if not output_dir.exists(): <ide><path>spacy/cli/train.py <ide> def train( <ide> else: <ide> msg.info("Using CPU") <ide> msg.info(f"Loading config and nlp from: {config_path}") <del> config = Config().from_disk(config_path) <add> with show_validation_error(config_path): <add> config = Config().from_disk(config_path) <ide> if config.get("training", {}).get("seed") is not None: <ide> fix_random_seed(config["training"]["seed"]) <del> with show_validation_error(): <add> with show_validation_error(config_path): <ide> nlp, config = util.load_model_from_config(config, overrides=config_overrides) <ide> if config["training"]["base_model"]: <ide> # TODO: do something to check base_nlp against regular nlp described in config? <ide> def create_evaluation_callback( <ide> cfg: Union[Config, Dict[str, Any]], <ide> ) -> Callable[[], Tuple[float, Dict[str, float]]]: <ide> def evaluate() -> Tuple[float, Dict[str, float]]: <del> dev_examples = corpus.dev_dataset( <del> nlp, gold_preproc=cfg["gold_preproc"] <del> ) <add> dev_examples = corpus.dev_dataset(nlp, gold_preproc=cfg["gold_preproc"]) <ide> dev_examples = list(dev_examples) <ide> n_words = sum(len(ex.predicted) for ex in dev_examples) <ide> batch_size = cfg["eval_batch_size"] <ide><path>spacy/cli/validate.py <ide> @app.command("validate") <ide> def validate_cli(): <ide> """ <del> Validate that the currently installed version of spaCy is compatible <del> with the installed models. Should be run after `pip install -U spacy`. <add> Validate the currently installed models and spaCy version. Checks if the <add> installed models are compatible and shows upgrade instructions if available. <add> Should be run after `pip install -U spacy`. <ide> """ <ide> validate() <ide> <ide><path>website/docs/api/cli.md <ide> menu: <ide> - ['Download', 'download'] <ide> - ['Info', 'info'] <ide> - ['Validate', 'validate'] <add> - ['Init', 'init'] <ide> - ['Convert', 'convert'] <ide> - ['Debug', 'debug'] <ide> - ['Train', 'train'] <ide> - ['Pretrain', 'pretrain'] <del> - ['Init Model', 'init-model'] <ide> - ['Evaluate', 'evaluate'] <ide> - ['Package', 'package'] <ide> - ['Project', 'project'] <ide> $ python -m spacy validate <ide> | ---------- | -------- | --------------------------------------------------------- | <ide> | **PRINTS** | `stdout` | Details about the compatibility of your installed models. | <ide> <add>## Init {#init new="3"} <add> <add>The `spacy init` CLI includes helpful commands for initializing training config <add>files and model directories. <add> <add>### init config {#init-config new="3"} <add> <add>Initialize and export a [`config.cfg` file](/usage/training#config) for training <add>and update it with all default values, if possible. Config files used for <add>training should always be complete and not contain any hidden defaults or <add>missing values, so this command helps you create your final config. It takes <add>**one** of the following options: <add> <add>- `--base`: Base **config** to auto-fill, e.g. created using the <add> [training quickstart](/usage/training#quickstart) widget. <add>- `--lang`: Base **language** code to use for blank config. <add>- `--model`: Base **model** to copy config from. <add> <add>> ```bash <add>> ### with base config {wrap="true"} <add>> $ python -m spacy init config config.cfg --base base.cfg <add>> ``` <add>> <add>> ```bash <add>> ### blank language {wrap="true"} <add>> $ python -m spacy init config config.cfg --lang en --pipeline tagger,parser <add>> ``` <add> <add>```bash <add>$ python -m spacy init config [output] [--base] [--lang] [--model] [--pipeline] <add>``` <add> <add>| Argument | Type | Description | <add>| ------------------ | ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | <add>| `output` | positional | Path to output `.cfg` file. If not set, the config is written to stdout so you can pipe it forward to a file. | <add>| `--base`, `-b` | option | Optional base config file to auto-fill with defaults. | <add>| `--lang`, `-l` | option | Optional language code to use for blank config. If a `--pipeline` is specified, the components will be added in order. | <add>| `--model`, `-m` | option | Optional base model to copy config from. If a `--pipeline` is specified, only those components will be kept, and all other components not in the model will be added. | <add>| `--pipeline`, `-p` | option | Optional comma-separate pipeline of components to add to blank language or model. | <add>| **CREATES** | config | Complete and auto-filled config file for training. | <add> <add>### init model {#init-model new="2"} <add> <add><!-- TODO: update for v3 --> <add> <add>Create a new model directory from raw data, like word frequencies, Brown <add>clusters and word vectors. This command is similar to the `spacy model` command <add>in v1.x. Note that in order to populate the model's vocab, you need to pass in a <add>JSONL-formatted [vocabulary file](/api/data-formats#vocab-jsonl) as <add>`--jsonl-loc` with optional `id` values that correspond to the vectors table. <add>Just loading in vectors will not automatically populate the vocab. <add> <add><Infobox title="New in v3.0" variant="warning"> <add> <add>The `init-model` command is now available as a subcommand of `spacy init`. <add> <add></Infobox> <add> <add>```bash <add>$ python -m spacy init model [lang] [output_dir] [--jsonl-loc] [--vectors-loc] <add>[--prune-vectors] <add>``` <add> <add>| Argument | Type | Description | <add>| ------------------------------------------------------- | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | <add>| `lang` | positional | Model language [ISO code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes), e.g. `en`. | <add>| `output_dir` | positional | Model output directory. Will be created if it doesn't exist. | <add>| `--jsonl-loc`, `-j` | option | Optional location of JSONL-formatted [vocabulary file](/api/data-formats#vocab-jsonl) with lexical attributes. | <add>| `--vectors-loc`, `-v` | option | Optional location of vectors. Should be a file where the first row contains the dimensions of the vectors, followed by a space-separated Word2Vec table. File can be provided in `.txt` format or as a zipped text file in `.zip` or `.tar.gz` format. | <add>| `--truncate-vectors`, `-t` <Tag variant="new">2.3</Tag> | option | Number of vectors to truncate to when reading in vectors file. Defaults to `0` for no truncation. | <add>| `--prune-vectors`, `-V` | option | Number of vectors to prune the vocabulary to. Defaults to `-1` for no pruning. | <add>| `--vectors-name`, `-vn` | option | Name to assign to the word vectors in the `meta.json`, e.g. `en_core_web_md.vectors`. | <add>| **CREATES** | model | A spaCy model containing the vocab and vectors. | <add> <ide> ## Convert {#convert} <ide> <ide> Convert files into spaCy's <ide> tokenization can be provided. <ide> {"tokens": ["If", "tokens", "are", "provided", "then", "we", "can", "skip", "the", "raw", "input", "text"]} <ide> ``` <ide> <del>## Init Model {#init-model new="2"} <del> <del>Create a new model directory from raw data, like word frequencies, Brown <del>clusters and word vectors. This command is similar to the `spacy model` command <del>in v1.x. Note that in order to populate the model's vocab, you need to pass in a <del>JSONL-formatted [vocabulary file](/api/data-formats#vocab-jsonl) as <del>`--jsonl-loc` with optional `id` values that correspond to the vectors table. <del>Just loading in vectors will not automatically populate the vocab. <del> <del>```bash <del>$ python -m spacy init-model [lang] [output_dir] [--jsonl-loc] [--vectors-loc] <del>[--prune-vectors] <del>``` <del> <del>| Argument | Type | Description | <del>| ----------------------------------------------------------- | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | <del>| `lang` | positional | Model language [ISO code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes), e.g. `en`. | <del>| `output_dir` | positional | Model output directory. Will be created if it doesn't exist. | <del>| `--jsonl-loc`, `-j` | option | Optional location of JSONL-formatted [vocabulary file](/api/data-formats#vocab-jsonl) with lexical attributes. | <del>| `--vectors-loc`, `-v` | option | Optional location of vectors. Should be a file where the first row contains the dimensions of the vectors, followed by a space-separated Word2Vec table. File can be provided in `.txt` format or as a zipped text file in `.zip` or `.tar.gz` format. | <del>| `--truncate-vectors`, `-t` <Tag variant="new">2.3</Tag> | option | Number of vectors to truncate to when reading in vectors file. Defaults to `0` for no truncation. | <del>| `--prune-vectors`, `-V` | option | Number of vectors to prune the vocabulary to. Defaults to `-1` for no pruning. | <del>| `--vectors-name`, `-vn` | option | Name to assign to the word vectors in the `meta.json`, e.g. `en_core_web_md.vectors`. | <del>| `--omit-extra-lookups`, `-OEL` <Tag variant="new">2.3</Tag> | flag | Do not include any of the extra lookups tables (`cluster`/`prob`/`sentiment`) from `spacy-lookups-data` in the model. | <del>| **CREATES** | model | A spaCy model containing the vocab and vectors. | <del> <ide> ## Evaluate {#evaluate new="2"} <ide> <ide> <!-- TODO: document new evaluate command --> <ide><path>website/docs/usage/training.md <ide> following data and information: <ide> 2. A [`config.cfg`](#config) **configuration file** with all settings and <ide> hyperparameters. <ide> 3. An optional **Python file** to register <del> [custom models and architectures](#custom-models). <del> <del><!-- TODO: decide how we want to present the "getting started" workflow here, get a default config etc. --> <add> [custom functions and architectures](#custom-code). <ide> <ide> ```bash <ide> $ python -m spacy train train.spacy dev.spacy config.cfg --output ./output <ide> ``` <ide> <del>> #### Tip: Debug your data <del>> <del>> The [`debug-data` command](/api/cli#debug-data) lets you analyze and validate <del>> your training and development data, get useful stats, and find problems like <del>> invalid entity annotations, cyclic dependencies, low data labels and more. <del>> <del>> ```bash <del>> $ python -m spacy debug-data en train.spacy dev.spacy --verbose <del>> ``` <del> <ide> <Project id="some_example_project"> <ide> <ide> The easiest way to get started with an end-to-end training process is to clone a <ide> workflows, from data preprocessing to training and packaging your model. <ide> <ide> > #### Instructions <ide> > <del>> 1. Select your requirements and settings. The quickstart widget will <del>> auto-generate a recommended starter config for you. <add>> 1. Select your requirements and settings. <ide> > 2. Use the buttons at the bottom to save the result to your clipboard or a <del>> file `config.cfg`. <del>> 3. TOOD: recommended approach for filling config <del>> 4. Run [`spacy train`](/api/cli#train) with your config and data. <add>> file `base_config.cfg`. <add>> 3. Run [`init config`](/api/cli#init-config) to create a full training config. <add>> 4. Run [`train`](/api/cli#train) with your config and data. <ide> <ide> import QuickstartTraining from 'widgets/quickstart-training.js' <ide> <del><QuickstartTraining /> <add><QuickstartTraining download="base_config.cfg" /> <add> <add>After you've saved the starter config to a file `base_config.cfg`, you can use <add>the [`init config`](/api/cli#init-config) command to fill in the remaining <add>defaults. Training configs should always be **complete and without hidden <add>defaults**, to keep your experiments reproducible. <add> <add>```bash <add>$ python -m spacy init config config.cfg --base base_config.cfg <add>``` <add> <add>> #### Tip: Debug your data <add>> <add>> The [`debug-data` command](/api/cli#debug-data) lets you analyze and validate <add>> your training and development data, get useful stats, and find problems like <add>> invalid entity annotations, cyclic dependencies, low data labels and more. <add>> <add>> ```bash <add>> $ python -m spacy debug-data en train.spacy dev.spacy --verbose <add>> ``` <add> <add>You can now run [`train`](/api/cli#train) with your training and development <add>data and the training config. See the [`convert`](/api/cli#convert) command for <add>details on how to convert your data to spaCy's binary `.spacy` format. <add> <add>```bash <add>$ python -m spacy train train.spacy dev.spacy config.cfg --output ./output <add>``` <ide> <ide> ## Training config {#config} <ide> <ide><path>website/docs/usage/transformers.md <ide> resolved, the function is created and passed into the model as an argument. <ide> Remember that the `config.cfg` used for training should contain **no missing <ide> values** and requires all settings to be defined. You don't want any hidden <ide> defaults creeping in and changing your results! spaCy will tell you if settings <del>are missing, and you can run [`spacy debug config`](/api/cli#debug-config) with <del>`--auto-fill` to automatically fill in all defaults. <del> <del><!-- TODO: update with details on getting started with a config --> <add>are missing, and you can run [`spacy init config`](/api/cli#init-config) with to <add>automatically fill in all defaults. <ide> <ide> </Infobox> <ide>
12
Text
Text
add active record encryption to changelog
224c9b34cb4fdc6989707f6e18e0a156e0864257
<ide><path>activerecord/CHANGELOG.md <add>* Add attribute encryption support. <add> <add> Encrypted attributes are declared at the model level. These <add> are regular Active Record attributes backed by a column with <add> the same name. The system will transparently encrypt these <add> attributes before saving them into the database and will <add> decrypt them when retrieving their values. <add> <add> <add> ```ruby <add> class Person < ApplicationRecord <add> encrypts :name <add> encrypts :email_address, deterministic: true <add> end <add> ``` <add> <add> You can learn more in the [Active Record Encryption <add> guide](https://edgeguides.rubyonrails.org/active_record_encryption.html). <add> <add> *Jorge Manrubia* <add> <ide> * Changed Arel predications `contains` and `overlaps` to use <ide> `quoted_node` so that PostgreSQL arrays are quoted properly. <ide>
1
Javascript
Javascript
avoid pause with unpipe in buffered write
68990948fe473262ec5f7d0b06112835ffd613aa
<ide><path>lib/_stream_readable.js <ide> Readable.prototype.pipe = function(dest, pipeOpts) { <ide> var ondrain = pipeOnDrain(src); <ide> dest.on('drain', ondrain); <ide> <add> var cleanedUp = false; <ide> function cleanup() { <ide> debug('cleanup'); <ide> // cleanup event handlers once the pipe is broken <ide> Readable.prototype.pipe = function(dest, pipeOpts) { <ide> src.removeListener('end', cleanup); <ide> src.removeListener('data', ondata); <ide> <add> cleanedUp = true; <add> <ide> // if the reader is waiting for a drain event from this <ide> // specific writer, then it would cause it to never start <ide> // flowing again. <ide> Readable.prototype.pipe = function(dest, pipeOpts) { <ide> debug('ondata'); <ide> var ret = dest.write(chunk); <ide> if (false === ret) { <del> debug('false write response, pause', <del> src._readableState.awaitDrain); <del> src._readableState.awaitDrain++; <add> // If the user unpiped during `dest.write()`, it is possible <add> // to get stuck in a permanently paused state if that write <add> // also returned false. <add> if (state.pipesCount === 1 && <add> state.pipes[0] === dest && <add> src.listenerCount('data') === 1 && <add> !cleanedUp) { <add> debug('false write response, pause', src._readableState.awaitDrain); <add> src._readableState.awaitDrain++; <add> } <ide> src.pause(); <ide> } <ide> } <ide><path>test/parallel/test-stream-pipe-cleanup-pause.js <add>'use strict'; <add>const common = require('../common'); <add>const assert = require('assert'); <add>const stream = require('stream'); <add> <add>const reader = new stream.Readable(); <add>const writer1 = new stream.Writable(); <add>const writer2 = new stream.Writable(); <add> <add>// 560000 is chosen here because it is larger than the (default) highWaterMark <add>// and will cause `.write()` to return false <add>// See: https://github.com/nodejs/node/issues/2323 <add>const buffer = new Buffer(560000); <add> <add>reader._read = function(n) {}; <add> <add>writer1._write = common.mustCall(function(chunk, encoding, cb) { <add> this.emit('chunk-received'); <add> cb(); <add>}, 1); <add>writer1.once('chunk-received', function() { <add> reader.unpipe(writer1); <add> reader.pipe(writer2); <add> reader.push(buffer); <add> setImmediate(function() { <add> reader.push(buffer); <add> setImmediate(function() { <add> reader.push(buffer); <add> }); <add> }); <add>}); <add> <add>writer2._write = common.mustCall(function(chunk, encoding, cb) { <add> cb(); <add>}, 3); <add> <add>reader.pipe(writer1); <add>reader.push(buffer);
2
PHP
PHP
fix cs error
e2b79fd063bc5accb44d9d37bf591cdc1475cd5d
<ide><path>src/Controller/Controller.php <ide> use Cake\ORM\Locator\LocatorAwareTrait; <ide> use Cake\Routing\Router; <ide> use Cake\View\ViewVarsTrait; <del>use LogicException; <ide> use ReflectionClass; <ide> use ReflectionException; <ide> use ReflectionMethod; <ide><path>src/Datasource/ModelAwareTrait.php <ide> namespace Cake\Datasource; <ide> <ide> use Cake\Datasource\Exception\MissingModelException; <del>use UnexpectedValueException; <ide> <ide> /** <ide> * Provides functionality for loading table classes <ide><path>src/I18n/Formatter/IcuFormatter.php <ide> namespace Cake\I18n\Formatter; <ide> <ide> use Aura\Intl\Exception\CannotFormat; <del>use Aura\Intl\Exception\CannotInstantiateFormatter; <ide> use Aura\Intl\FormatterInterface; <ide> use MessageFormatter; <ide>
3
PHP
PHP
make optimizecommand compile views
27d42308ed3c370b943b7c9db27dd4f4eaf685a6
<ide><path>src/Illuminate/Foundation/Console/OptimizeCommand.php <ide> <ide> use Illuminate\Console\Command; <ide> use Illuminate\Foundation\Composer; <add>use Illuminate\View\Engines\CompilerEngine; <ide> use ClassPreloader\Command\PreCompileCommand; <ide> use Symfony\Component\Console\Input\InputOption; <ide> <ide> public function fire() <ide> $this->info('Compiling common classes'); <ide> <ide> $this->compileClasses(); <add> <add> $this->info('Compiling views'); <add> <add> $this->compileViews(); <ide> } <ide> else <ide> { <ide> protected function registerClassPreloaderCommand() <ide> $this->getApplication()->add(new PreCompileCommand); <ide> } <ide> <add> /** <add> * Compile all view files. <add> * <add> * @return void <add> */ <add> protected function compileViews() <add> { <add> $paths = $this->laravel['view']->getFinder() <add> ->getPaths(); <add> <add> foreach ($paths as $dir) <add> { <add> $files = $this->laravel['files']->allFiles($dir); <add> <add> foreach ($files as $path) <add> { <add> $engine = $this->laravel['view']->getEngineFromPath($path); <add> <add> if ($engine instanceof CompilerEngine) <add> { <add> $engine->getCompiler()->compile($path); <add> } <add> } <add> } <add> } <add> <ide> /** <ide> * Get the console command options. <ide> * <ide><path>src/Illuminate/View/Factory.php <ide> public function renderEach($view, $data, $iterator, $empty = 'raw|') <ide> * @param string $path <ide> * @return \Illuminate\View\Engines\EngineInterface <ide> */ <del> protected function getEngineFromPath($path) <add> public function getEngineFromPath($path) <ide> { <ide> $engine = $this->extensions[$this->getExtension($path)]; <ide>
2
Javascript
Javascript
improve $formatters and $parsers info
43cf54c3a2473af3a26675d99459a6399d873d94
<ide><path>src/ng/directive/ngModel.js <ide> var ngModelMinErr = minErr('ngModel'); <ide> * @property {*} $viewValue The actual value from the control's view. For `input` elements, this is a <ide> * String. See {@link ngModel.NgModelController#$setViewValue} for information about when the $viewValue <ide> * is set. <add> * <ide> * @property {*} $modelValue The value in the model that the control is bound to. <add> * <ide> * @property {Array.<Function>} $parsers Array of functions to execute, as a pipeline, whenever <del> the control reads value from the DOM. The functions are called in array order, each passing <del> its return value through to the next. The last return value is forwarded to the <del> {@link ngModel.NgModelController#$validators `$validators`} collection. <add> * the control updates the ngModelController with a new {@link ngModel.NgModelController#$viewValue <add> `$viewValue`} from the DOM, usually via user input. <add> See {@link ngModel.NgModelController#$setViewValue `$setViewValue()`} for a detailed lifecycle explanation. <add> Note that the `$parsers` are not called when the bound ngModel expression changes programmatically. <ide> <del>Parsers are used to sanitize / convert the {@link ngModel.NgModelController#$viewValue <del>`$viewValue`}. <add> The functions are called in array order, each passing <add> its return value through to the next. The last return value is forwarded to the <add> {@link ngModel.NgModelController#$validators `$validators`} collection. <ide> <del>Returning `undefined` from a parser means a parse error occurred. In that case, <del>no {@link ngModel.NgModelController#$validators `$validators`} will run and the `ngModel` <del>will be set to `undefined` unless {@link ngModelOptions `ngModelOptions.allowInvalid`} <del>is set to `true`. The parse error is stored in `ngModel.$error.parse`. <add> Parsers are used to sanitize / convert the {@link ngModel.NgModelController#$viewValue <add> `$viewValue`}. <add> <add> Returning `undefined` from a parser means a parse error occurred. In that case, <add> no {@link ngModel.NgModelController#$validators `$validators`} will run and the `ngModel` <add> will be set to `undefined` unless {@link ngModelOptions `ngModelOptions.allowInvalid`} <add> is set to `true`. The parse error is stored in `ngModel.$error.parse`. <add> <add> This simple example shows a parser that would convert text input value to lowercase: <add> * ```js <add> * function parse(value) { <add> * if (value) { <add> * return value.toLowerCase(); <add> * } <add> * } <add> * ngModelController.$parsers.push(parse); <add> * ``` <ide> <ide> * <ide> * @property {Array.<Function>} $formatters Array of functions to execute, as a pipeline, whenever <del> the model value changes. The functions are called in reverse array order, each passing the value through to the <del> next. The last return value is used as the actual DOM value. <del> Used to format / convert values for display in the control. <add> the bound ngModel expression changes programmatically. The `$formatters` are not called when the <add> value of the control is changed by user interaction. <add> <add> Formatters are used to format / convert the {@link ngModel.NgModelController#$modelValue <add> `$modelValue`} for display in the control. <add> <add> The functions are called in reverse array order, each passing the value through to the <add> next. The last return value is used as the actual DOM value. <add> <add> This simple example shows a formatter that would convert the model value to uppercase: <add> <ide> * ```js <del> * function formatter(value) { <add> * function format(value) { <ide> * if (value) { <ide> * return value.toUpperCase(); <ide> * } <ide> * } <del> * ngModel.$formatters.push(formatter); <add> * ngModel.$formatters.push(format); <ide> * ``` <ide> * <ide> * @property {Object.<string, function>} $validators A collection of validators that are applied <ide> NgModelController.prototype = { <ide> * <ide> * When `$setViewValue` is called, the new `value` will be staged for committing through the `$parsers` <ide> * and `$validators` pipelines. If there are no special {@link ngModelOptions} specified then the staged <del> * value sent directly for processing, finally to be applied to `$modelValue` and then the <del> * **expression** specified in the `ng-model` attribute. Lastly, all the registered change listeners, <del> * in the `$viewChangeListeners` list, are called. <add> * value is sent directly for processing through the `$parsers` pipeline. After this, the `$validators` and <add> * `$asyncValidators` are called and the value is applied to `$modelValue`. <add> * Finally, the value is set to the **expression** specified in the `ng-model` attribute and <add> * all the registered change listeners, in the `$viewChangeListeners` list are called. <ide> * <ide> * In case the {@link ng.directive:ngModelOptions ngModelOptions} directive is used with `updateOn` <ide> * and the `default` trigger is not listed, all those actions will remain pending until one of the
1
Javascript
Javascript
add test for beforesend listener
8e8fcbe6eb4bc188ca01aa8446141f60a07f6a0f
<ide><path>test/XMLHttpRequest.js <ide> global.XMLHttpRequest = function XMLHttpRequest() { <ide> <ide> // TODO handle file system errors? <ide> <add>self.readyState = 0; <add> <ide> self.open = function(m, u, a) { <ide> info.url = u; <ide> info.async = a; <add> self.readyState = 1; <ide> self.send = a ? read : readSync; <ide> }; <ide> <ide> global.XMLHttpRequest = function XMLHttpRequest() { <ide> }; <ide> <ide> function read() { <add> self.readyState = 2; <ide> fs.readFile(info.url, "binary", function(e, d) { <ide> if (e) { <ide> self.status = 404; // assumed <ide> global.XMLHttpRequest = function XMLHttpRequest() { <ide> } <ide> <ide> function readSync() { <add> self.readyState = 2; <ide> try { <ide> var d = fs.readFileSync(info.url, "binary"); <ide> self.status = 200; <ide><path>test/xhr/xhr-test.js <ide> suite.addBatch({ <ide> topic: load("xhr/xhr").expression("d3.xhr").document(), <ide> <ide> "on a sample text file": { <del> topic: function(xhr) { <del> xhr("test/data/sample.txt", this.callback); <add> topic: function(d3_xhr) { <add> d3_xhr("test/data/sample.txt", this.callback); <ide> }, <ide> "makes an asynchronous HTTP request": function(req) { <ide> assert.equal(req._info.url, "test/data/sample.txt"); <ide> suite.addBatch({ <ide> }, <ide> <ide> "when a custom mime type is specified": { <del> topic: function(xhr) { <del> xhr("test/data/sample.txt", "text/plain", this.callback); <add> topic: function(d3_xhr) { <add> d3_xhr("test/data/sample.txt", "text/plain", this.callback); <ide> }, <ide> "observes the optional mime type": function(req) { <ide> assert.equal(req._info.mimeType, "text/plain"); <ide> } <ide> }, <ide> <add> "when a beforesend listener is specified": { <add> topic: function(d3_xhr) { <add> var callback = this.callback; <add> var xhr = d3_xhr("test/data/sample.txt", "text/plain").on("beforesend", function(request) { <add> callback(null, { <add> that: this, <add> xhr: xhr, <add> readyState: request.readyState, <add> request: request <add> }); <add> }); <add> xhr.get(); <add> }, <add> "invokes the beforesend listener with the xhr object as the context": function(result) { <add> assert.equal(result.that, result.xhr); <add> assert.ok(result.xhr.get); <add> }, <add> "invokes the beforesend listener with the underlying XMLHttpRequest as an argument": function(result) { <add> assert.instanceOf(result.request, XMLHttpRequest); <add> }, <add> "invokes the beforesend listener after open and before send": function(result) { <add> assert.equal(result.readyState, 1); <add> } <add> }, <add> <ide> "on a file that does not exist": { <del> topic: function(xhr) { <add> topic: function(d3_xhr) { <ide> var callback = this.callback; <del> xhr("//does/not/exist.txt", function(error, req) { <add> d3_xhr("//does/not/exist.txt", function(error, req) { <ide> callback(null, req); <ide> }); <ide> },
2