content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Ruby | Ruby | allow load from file content | ffd92a87c5e0503c1ca63c52d54332c2c93618c2 | <ide><path>Library/Homebrew/tab.rb
<ide> def self.create(formula, compiler, stdlib, build)
<ide> end
<ide>
<ide> def self.from_file path
<del> attributes = Utils::JSON.load(File.read(path))
<add> from_file_content(File.read(path), path)
<add> end
<add>
<add> def self.from_file_content content, path
<add> attributes = Utils::JSON.load(content)
<ide> attributes["tabfile"] = path
<ide> attributes["source"] ||= {}
<ide> | 1 |
Javascript | Javascript | use a fixture to test `helper-test` blueprint | cb2cadf9912e1b4616aea1e090b8c2b920b3db77 | <ide><path>node-tests/blueprints/helper-test.js
<ide> 'use strict';
<ide>
<add>const file = require('../helpers/file');
<ide> var blueprintHelpers = require('ember-cli-blueprint-test-helpers/helpers');
<ide> var setupTestHooks = blueprintHelpers.setupTestHooks;
<ide> var emberNew = blueprintHelpers.emberNew;
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> "export default helper(fooBarBaz);");
<ide>
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {");
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<del> it('in-addon helper foo-bar', function() {
<del> var args = ['helper', 'foo-bar'];
<add> it('in-addon helper foo/bar-baz', function() {
<add> var args = ['helper', 'foo/bar-baz'];
<ide>
<ide> return emberNew({ target: 'addon' })
<ide> .then(() => emberGenerateDestroy(args, _file => {
<del> expect(_file('addon/helpers/foo-bar.js'))
<add> expect(_file('addon/helpers/foo/bar-baz.js'))
<ide> .to.contain("import { helper } from '@ember/component/helper';\n\n" +
<del> "export function fooBar(params/*, hash*/) {\n" +
<add> "export function fooBarBaz(params/*, hash*/) {\n" +
<ide> " return params;\n" +
<ide> "}\n\n" +
<del> "export default helper(fooBar);");
<add> "export default helper(fooBarBaz);");
<ide>
<del> expect(_file('app/helpers/foo-bar.js'))
<del> .to.contain("export { default, fooBar } from 'my-addon/helpers/foo-bar';");
<del> expect(_file('tests/integration/helpers/foo-bar-test.js'))
<del> .to.contain("moduleForComponent('foo-bar', 'helper:foo-bar', {");
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<add> .to.contain("export { default, fooBarBaz } from 'my-addon/helpers/foo/bar-baz';");
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> expect(_file('app/helpers/foo/bar-baz.js'))
<ide> .to.contain("export { default, fooBarBaz } from 'my-addon/helpers/foo/bar-baz';");
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {");
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> }));
<ide> });
<ide>
<del> it('in-repo-addon helper foo-bar', function() {
<del> var args = ['helper', 'foo-bar', '--in-repo-addon=my-addon'];
<add> it('in-repo-addon helper foo/bar-baz', function() {
<add> var args = ['helper', 'foo/bar-baz', '--in-repo-addon=my-addon'];
<ide>
<ide> return emberNew({ target: 'in-repo-addon' })
<ide> .then(() => emberGenerateDestroy(args, _file => {
<del> expect(_file('lib/my-addon/addon/helpers/foo-bar.js'))
<add> expect(_file('lib/my-addon/addon/helpers/foo/bar-baz.js'))
<ide> .to.contain("import { helper } from '@ember/component/helper';\n\n" +
<del> "export function fooBar(params/*, hash*/) {\n" +
<add> "export function fooBarBaz(params/*, hash*/) {\n" +
<ide> " return params;\n" +
<ide> "}\n\n" +
<del> "export default helper(fooBar);");
<add> "export default helper(fooBarBaz);");
<ide>
<del> expect(_file('lib/my-addon/app/helpers/foo-bar.js'))
<del> .to.contain("export { default, fooBar } from 'my-addon/helpers/foo-bar';");
<del> expect(_file('tests/integration/helpers/foo-bar-test.js'))
<del> .to.contain("moduleForComponent('foo-bar', 'helper:foo-bar', {");
<add> expect(_file('lib/my-addon/app/helpers/foo/bar-baz.js'))
<add> .to.contain("export { default, fooBarBaz } from 'my-addon/helpers/foo/bar-baz';");
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> expect(_file('lib/my-addon/app/helpers/foo/bar-baz.js'))
<ide> .to.contain("export { default, fooBarBaz } from 'my-addon/helpers/foo/bar-baz';");
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {");
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<del> it('helper foo-bar --pod', function() {
<del> var args = ['helper', 'foo-bar', '--pod'];
<add> it('helper foo/bar-baz --pod', function() {
<add> var args = ['helper', 'foo/bar-baz', '--pod'];
<ide>
<ide> return emberNew()
<ide> .then(() => emberGenerateDestroy(args, _file => {
<del> expect(_file('app/helpers/foo-bar.js'))
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<ide> .to.contain("import { helper } from '@ember/component/helper';\n\n" +
<del> "export function fooBar(params/*, hash*/) {\n" +
<add> "export function fooBarBaz(params/*, hash*/) {\n" +
<ide> " return params;\n" +
<ide> "}\n\n" +
<del> "export default helper(fooBar);");
<add> "export default helper(fooBarBaz);");
<ide>
<del> expect(_file('tests/integration/helpers/foo-bar-test.js'))
<del> .to.contain("moduleForComponent('foo-bar', 'helper:foo-bar', {");
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<del> it('helper foo-bar --pod podModulePrefix', function() {
<del> var args = ['helper', 'foo-bar', '--pod'];
<add> it('helper foo/bar-baz --pod podModulePrefix', function() {
<add> var args = ['helper', 'foo/bar-baz', '--pod'];
<ide>
<ide> return emberNew()
<ide> .then(() => setupPodConfig({ podModulePrefix: true }))
<ide> .then(() => emberGenerateDestroy(args, _file => {
<del> expect(_file('app/helpers/foo-bar.js'))
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<ide> .to.contain("import { helper } from '@ember/component/helper';\n\n" +
<del> "export function fooBar(params/*, hash*/) {\n" +
<add> "export function fooBarBaz(params/*, hash*/) {\n" +
<ide> " return params;\n" +
<ide> "}\n\n" +
<del> "export default helper(fooBar);");
<add> "export default helper(fooBarBaz);");
<ide>
<del> expect(_file('tests/integration/helpers/foo-bar-test.js'))
<del> .to.contain("moduleForComponent('foo-bar', 'helper:foo-bar', {");
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> "export default helper(fooBarBaz);");
<ide>
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {");
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> "export default helper(fooBarBaz);");
<ide>
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {");
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> return emberNew()
<ide> .then(() => emberGenerateDestroy(args, _file => {
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {");
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide> describe('Acceptance: ember generate and destroy helper', function() {
<ide> return emberNew()
<ide> .then(() => emberGenerateDestroy(args, _file => {
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {");
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<del> it('in-addon helper-test foo-bar', function() {
<del> var args = ['helper-test', 'foo-bar'];
<add> it('in-addon helper-test foo/bar-baz', function() {
<add> var args = ['helper-test', 'foo/bar-baz'];
<ide>
<ide> return emberNew({ target: 'addon' })
<ide> .then(() => emberGenerateDestroy(args, _file => {
<del> expect(_file('tests/integration/helpers/foo-bar-test.js'))
<del> .to.contain("moduleForComponent('foo-bar', 'helper:foo-bar', {");
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.equal(file('helper-test/integration.js'));
<ide> }));
<ide> });
<ide>
<ide><path>node-tests/fixtures/helper-test/integration.js
<add>
<add>import { moduleForComponent, test } from 'ember-qunit';
<add>import hbs from 'htmlbars-inline-precompile';
<add>
<add>moduleForComponent('foo/bar-baz', 'helper:foo/bar-baz', {
<add> integration: true
<add>});
<add>
<add>// Replace this with your real tests.
<add>test('it renders', function(assert) {
<add> this.set('inputValue', '1234');
<add>
<add> this.render(hbs`{{foo/bar-baz inputValue}}`);
<add>
<add> assert.equal(this.$().text().trim(), '1234');
<add>});
<add>
<ide><path>node-tests/helpers/file.js
<add>'use strict';
<add>
<add>const path = require('path');
<add>const file = require('ember-cli-blueprint-test-helpers/chai').file;
<add>
<add>module.exports = function(filePath) {
<add> return file(path.join(__dirname, '../fixtures', filePath));
<add>}; | 3 |
PHP | PHP | fix more cs | d5a189d3f8f2c40c5ed1672a1e993e5911455d17 | <ide><path>src/Shell/Task/PluginTask.php
<ide> public function bake($plugin) {
<ide> $out .= "class AppController extends BaseController {\n\n";
<ide> $out .= "}\n";
<ide> $this->createFile($this->path . $plugin . DS . $classBase . DS . 'Controller' . DS . $controllerFileName, $out);
<del> $emptyFile = $this->path . 'empty';
<add> $emptyFile = $this->path . 'empty';
<ide> $this->_deleteEmptyFile($emptyFile);
<ide>
<ide> $hasAutoloader = $this->_modifyAutoloader($plugin, $this->path); | 1 |
Python | Python | add stringfield to imports | 6b7f001a607f2527f88352d7f1aaf608d91f1557 | <ide><path>airflow/www/app.py
<ide> import sqlalchemy as sqla
<ide> from wtforms import (
<ide> widgets,
<del> Form, DateTimeField, SelectField, TextAreaField, PasswordField)
<add> Form, DateTimeField, SelectField, TextAreaField, PasswordField, StringField)
<ide>
<ide> from pygments import highlight, lexers
<ide> from pygments.formatters import HtmlFormatter | 1 |
Text | Text | add hint about using media types | a94d9712741daf33863e159106e8980fbe3e7745 | <ide><path>curriculum/challenges/english/14-responsive-web-design-22/learn-css-flexbox-by-building-a-photo-gallery/6153a3952facd25a83fe8083.md
<ide> You should add a new `@media` query.
<ide> assert(new __helpers.CSSHelp(document).getCSSRules('media')?.length === 1);
<ide> ```
<ide>
<del>Your new `@media` query should have a `max-width` of `800px`.
<add>Your new `@media` query should have a `max-width` of `800px` like this: `@media (max-width: 800px)`.
<ide>
<ide> ```js
<ide> assert(new __helpers.CSSHelp(document).getCSSRules('media')?.[0]?.media?.mediaText === '(max-width: 800px)');
<ide><path>curriculum/challenges/english/14-responsive-web-design-22/learn-css-flexbox-by-building-a-photo-gallery/6153a3ebb4f7f05b8401b716.md
<ide> You should have a second `@media` query.
<ide> assert(new __helpers.CSSHelp(document).getCSSRules('media')?.length === 2);
<ide> ```
<ide>
<del>Your new `@media` query should come after your existing one.
<add>Your new `@media` query should come after your existing one. You should have a `@media (max-width: 800px)` rule.
<ide>
<ide> ```js
<ide> assert(new __helpers.CSSHelp(document).getCSSRules('media')?.[0]?.media?.mediaText === '(max-width: 800px)');
<ide> ```
<ide>
<del>Your new `@media` query should have a `max-width` of `600px`.
<add>Your new `@media` query should have a `max-width` of `600px` like this: `@media (max-width: 600px)`.
<ide>
<ide> ```js
<ide> assert(new __helpers.CSSHelp(document).getCSSRules('media')?.[1]?.media?.mediaText === '(max-width: 600px)'); | 2 |
PHP | PHP | between method | c354a691669005381c824331f29e184e31f7c583 | <ide><path>src/Illuminate/Support/Str.php
<ide> public static function beforeLast($subject, $search)
<ide> return static::substr($subject, 0, $pos);
<ide> }
<ide>
<add> /**
<add> * Get the portion of a string between a given values.
<add> *
<add> * @param string $subject
<add> * @param string $before
<add> * @param string $after
<add> * @return string
<add> */
<add> public static function between($subject, $before, $after)
<add> {
<add> if ($before === '' || $after === '') {
<add> return $subject;
<add> }
<add>
<add> $rightCropped = static::after($subject, $before);
<add>
<add> return static::beforeLast($rightCropped, $after);
<add> }
<add>
<ide> /**
<ide> * Convert a value to camel case.
<ide> *
<ide><path>tests/Support/SupportStrTest.php
<ide> public function testStrBeforeLast()
<ide> $this->assertSame('yv2et', Str::beforeLast('yv2et2te', 2));
<ide> }
<ide>
<add> public function testStrBetween()
<add> {
<add> $this->assertSame('nn', Str::between('hannah', 'ha', 'ah'));
<add> $this->assertSame('foo', Str::between('foofoobar', 'foo', 'bar'));
<add> $this->assertSame('bar', Str::between('foobarbar', 'foo', 'bar'));
<add> }
<add>
<ide> public function testStrAfter()
<ide> {
<ide> $this->assertSame('nah', Str::after('hannah', 'han')); | 2 |
Ruby | Ruby | delegate all calculations to the scope | edd94cee9af1688dd036fc58fd405adb30a5e0da | <ide><path>activerecord/lib/active_record/associations/collection_proxy.rb
<ide> module Associations
<ide> # is computed directly through SQL and does not trigger by itself the
<ide> # instantiation of the actual post records.
<ide> class CollectionProxy < Relation
<add> delegate *ActiveRecord::Calculations.public_instance_methods, to: :scope
<add>
<ide> def initialize(association) #:nodoc:
<ide> @association = association
<ide> super association.klass, association.klass.arel_table
<ide> def reload
<ide> proxy_association.reload
<ide> self
<ide> end
<del>
<del> def pluck(column_names)
<del> scope.pluck(column_names)
<del> end
<ide> end
<ide> end
<ide> end
<ide><path>activerecord/lib/active_record/null_relation.rb
<ide> def count(*)
<ide> 0
<ide> end
<ide>
<add> def sum(*)
<add> 0
<add> end
<add>
<ide> def calculate(_operation, _column_name, _options = {})
<ide> nil
<ide> end
<ide><path>activerecord/test/cases/associations/has_many_associations_test.rb
<ide> def test_collection_association_with_private_kernel_method
<ide> assert_equal [], post.comments
<ide> assert_equal [], post.comments.where(body: 'omg')
<ide> assert_equal [], post.comments.pluck(:body)
<add> assert_equal 0, post.comments.sum(:id)
<ide> end
<ide> end
<ide> end | 3 |
Javascript | Javascript | add rdfa attributes not already covered | fd682b5cac157b08c12403e4e6e87c3384385202 | <ide><path>src/renderers/dom/shared/HTMLDOMPropertyConfig.js
<ide> var HTMLDOMPropertyConfig = {
<ide> wmode: MUST_USE_ATTRIBUTE,
<ide> wrap: null,
<ide>
<add> /**
<add> * RDFa Properties
<add> */
<add> about: MUST_USE_ATTRIBUTE,
<add> datatype: MUST_USE_ATTRIBUTE,
<add> inlist: MUST_USE_ATTRIBUTE,
<add> prefix: MUST_USE_ATTRIBUTE,
<add> // property is also supported for OpenGraph in meta tags.
<add> property: MUST_USE_ATTRIBUTE,
<add> resource: MUST_USE_ATTRIBUTE,
<add> typeof: MUST_USE_ATTRIBUTE,
<add> vocab: MUST_USE_ATTRIBUTE,
<add>
<ide> /**
<ide> * Non-standard Properties
<ide> */
<ide> var HTMLDOMPropertyConfig = {
<ide> // https://html.spec.whatwg.org/multipage/microdata.html#microdata-dom-api
<ide> itemID: MUST_USE_ATTRIBUTE,
<ide> itemRef: MUST_USE_ATTRIBUTE,
<del> // property is supported for OpenGraph in meta tags.
<del> property: null,
<ide> // results show looking glass icon and recent searches on input
<ide> // search fields in WebKit/Blink
<ide> results: null, | 1 |
Javascript | Javascript | fix validation of options in `blob` constructor | d6ee27445b58651f40bc2be54b63986dace2e997 | <ide><path>lib/internal/blob.js
<ide> const {
<ide> } = require('internal/errors');
<ide>
<ide> const {
<del> validateObject,
<ide> isUint32,
<add> validateDictionary,
<ide> } = require('internal/validators');
<ide>
<ide> const kHandle = Symbol('kHandle');
<ide> class Blob {
<ide> * }} [options]
<ide> * @constructs {Blob}
<ide> */
<del> constructor(sources = [], options = kEmptyObject) {
<add> constructor(sources = [], options) {
<ide> if (sources === null ||
<ide> typeof sources[SymbolIterator] !== 'function' ||
<ide> typeof sources === 'string') {
<ide> throw new ERR_INVALID_ARG_TYPE('sources', 'a sequence', sources);
<ide> }
<del> validateObject(options, 'options');
<add> validateDictionary(options, 'options');
<ide> let {
<ide> type = '',
<ide> endings = 'transparent',
<del> } = options;
<add> } = options ?? kEmptyObject;
<ide>
<ide> endings = `${endings}`;
<ide> if (endings !== 'transparent' && endings !== 'native')
<ide><path>lib/internal/validators.js
<ide> const validateObject = hideStackFrames(
<ide> }
<ide> });
<ide>
<add>/**
<add> * @callback validateDictionary - We are using the Web IDL Standard definition
<add> * of "dictionary" here, which means any value
<add> * whose Type is either Undefined, Null, or
<add> * Object (which includes functions).
<add> * @param {*} value
<add> * @param {string} name
<add> * @see https://webidl.spec.whatwg.org/#es-dictionary
<add> * @see https://tc39.es/ecma262/#table-typeof-operator-results
<add> */
<add>
<add>/** @type {validateDictionary} */
<add>const validateDictionary = hideStackFrames(
<add> (value, name) => {
<add> if (value != null && typeof value !== 'object' && typeof value !== 'function') {
<add> throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value);
<add> }
<add> });
<add>
<ide> /**
<ide> * @callback validateArray
<ide> * @param {*} value
<ide> module.exports = {
<ide> validateBooleanArray,
<ide> validateBoolean,
<ide> validateBuffer,
<add> validateDictionary,
<ide> validateEncoding,
<ide> validateFunction,
<ide> validateInt32,
<ide><path>test/parallel/test-blob.js
<ide> assert.throws(() => new Blob({}), {
<ide> assert.strictEqual(blob.size, 28);
<ide> assert.strictEqual(blob.type, '');
<ide> })().then(common.mustCall());
<add>
<add>{
<add> // Testing the defaults
<add> [undefined, null, Object.create(null), { type: undefined }, {
<add> get type() {}, // eslint-disable-line getter-return
<add> }].forEach((options) => {
<add> assert.strictEqual(
<add> new Blob([], options).type,
<add> new Blob([]).type,
<add> );
<add> });
<add>
<add> Reflect.defineProperty(Object.prototype, 'type', {
<add> __proto__: null,
<add> configurable: true,
<add> get: common.mustCall(() => 3, 7),
<add> });
<add>
<add> [{}, [], () => {}, Number, new Number(), new String(), new Boolean()].forEach(
<add> (options) => {
<add> assert.strictEqual(new Blob([], options).type, '3');
<add> },
<add> );
<add> [0, '', true, Symbol(), 0n].forEach((options) => {
<add> assert.throws(() => new Blob([], options), { code: 'ERR_INVALID_ARG_TYPE' });
<add> });
<add>
<add> delete Object.prototype.type;
<add>} | 3 |
Python | Python | reinstate imports for github enterprise auth | 9339711625ea46e738870bcf5e3c9a8765fc3d21 | <ide><path>airflow/contrib/auth/backends/github_enterprise_auth.py
<ide> import logging
<ide>
<ide> import flask_login
<del>from flask_login import login_user
<add>
<add># Need to expose these downstream
<add># pylint: disable=unused-import
<add>from flask_login import (current_user,
<add> logout_user,
<add> login_required)
<add># pylint: enable=unused-import
<ide>
<ide> from flask import url_for, redirect, request
<ide> | 1 |
Python | Python | add type hints for poolformer in pytorch | 5493c10ecba5eb9aa3023108f2af9499bdb1aea9 | <ide><path>src/transformers/models/poolformer/modeling_poolformer.py
<ide>
<ide> import collections.abc
<ide> from dataclasses import dataclass
<del>from typing import Optional, Tuple
<add>from typing import Optional, Tuple, Union
<ide>
<ide> import torch
<ide> import torch.utils.checkpoint
<ide> def get_input_embeddings(self):
<ide> modality="vision",
<ide> expected_output=_EXPECTED_OUTPUT_SHAPE,
<ide> )
<del> def forward(self, pixel_values=None, output_hidden_states=None, return_dict=None):
<add> def forward(
<add> self,
<add> pixel_values: Optional[torch.FloatTensor] = None,
<add> output_hidden_states: Optional[bool] = None,
<add> return_dict: Optional[bool] = None,
<add> ) -> Union[Tuple, PoolFormerModelOutput]:
<ide> output_hidden_states = (
<ide> output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
<ide> )
<ide> def __init__(self, config):
<ide> )
<ide> def forward(
<ide> self,
<del> pixel_values=None,
<del> labels=None,
<del> output_hidden_states=None,
<del> return_dict=None,
<del> ):
<add> pixel_values: Optional[torch.FloatTensor] = None,
<add> labels: Optional[torch.LongTensor] = None,
<add> output_hidden_states: Optional[bool] = None,
<add> return_dict: Optional[bool] = None,
<add> ) -> Union[Tuple, PoolFormerClassifierOutput]:
<ide> r"""
<ide> labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
<ide> Labels for computing the image classification/regression loss. Indices should be in `[0, ..., | 1 |
Java | Java | improve javadocs of the subscribeactual methods | d07dfa1fe59e71e2f231246374c9c2009599ef62 | <ide><path>src/main/java/io/reactivex/Completable.java
<ide> public final void subscribe(CompletableObserver s) {
<ide> }
<ide>
<ide> /**
<del> * Implement this to handle the incoming CompletableObserver and
<add> * Implement this method to handle the incoming {@link CompletableObserver}s and
<ide> * perform the business logic in your operator.
<add> * <p>There is no need to call any of the plugin hooks on the current {@code Completable} instance or
<add> * the {@code CompletableObserver}; all hooks and basic safeguards have been
<add> * applied by {@link #subscribe(CompletableObserver)} before this method gets called.
<ide> * @param s the CompletableObserver instance, never null
<ide> */
<ide> protected abstract void subscribeActual(CompletableObserver s);
<ide><path>src/main/java/io/reactivex/Flowable.java
<ide> public final void subscribe(FlowableSubscriber<? super T> s) {
<ide>
<ide> /**
<ide> * Operator implementations (both source and intermediate) should implement this method that
<del> * performs the necessary business logic.
<del> * <p>There is no need to call any of the plugin hooks on the current Flowable instance or
<del> * the Subscriber.
<add> * performs the necessary business logic and handles the incoming {@link Subscriber}s.
<add> * <p>There is no need to call any of the plugin hooks on the current {@code Flowable} instance or
<add> * the {@code Subscriber}; all hooks and basic safeguards have been
<add> * applied by {@link #subscribe(Subscriber)} before this method gets called.
<ide> * @param s the incoming Subscriber, never null
<ide> */
<ide> protected abstract void subscribeActual(Subscriber<? super T> s);
<ide><path>src/main/java/io/reactivex/Maybe.java
<ide> public final void subscribe(MaybeObserver<? super T> observer) {
<ide> }
<ide>
<ide> /**
<del> * Override this method in subclasses to handle the incoming MaybeObservers.
<add> * Implement this method in subclasses to handle the incoming {@link MaybeObserver}s.
<add> * <p>There is no need to call any of the plugin hooks on the current {@code Maybe} instance or
<add> * the {@code MaybeObserver}; all hooks and basic safeguards have been
<add> * applied by {@link #subscribe(MaybeObserver)} before this method gets called.
<ide> * @param observer the MaybeObserver to handle, not null
<ide> */
<ide> protected abstract void subscribeActual(MaybeObserver<? super T> observer);
<ide><path>src/main/java/io/reactivex/Observable.java
<ide> public final void subscribe(Observer<? super T> observer) {
<ide>
<ide> /**
<ide> * Operator implementations (both source and intermediate) should implement this method that
<del> * performs the necessary business logic.
<del> * <p>There is no need to call any of the plugin hooks on the current Observable instance or
<del> * the Subscriber.
<add> * performs the necessary business logic and handles the incoming {@link Observer}s.
<add> * <p>There is no need to call any of the plugin hooks on the current {@code Observable} instance or
<add> * the {@code Observer}; all hooks and basic safeguards have been
<add> * applied by {@link #subscribe(Observer)} before this method gets called.
<ide> * @param observer the incoming Observer, never null
<ide> */
<ide> protected abstract void subscribeActual(Observer<? super T> observer);
<ide><path>src/main/java/io/reactivex/Single.java
<ide> public final void subscribe(SingleObserver<? super T> subscriber) {
<ide> }
<ide>
<ide> /**
<del> * Override this method in subclasses to handle the incoming SingleObservers.
<add> * Implement this method in subclasses to handle the incoming {@link SingleObserver}s.
<add> * <p>There is no need to call any of the plugin hooks on the current {@code Single} instance or
<add> * the {@code SingleObserver}; all hooks and basic safeguards have been
<add> * applied by {@link #subscribe(SingleObserver)} before this method gets called.
<ide> * @param observer the SingleObserver to handle, not null
<ide> */
<ide> protected abstract void subscribeActual(@NonNull SingleObserver<? super T> observer); | 5 |
PHP | PHP | add deletemany() and deletemanyorfail() | 8392c4c00cf6d14629ea05761b88b699bb9747d6 | <ide><path>src/ORM/Table.php
<ide> public function delete(EntityInterface $entity, $options = []): bool
<ide> return $success;
<ide> }
<ide>
<add> /**
<add> * Deletes multiple entities of a table.
<add> *
<add> * The records will be deleted in a transaction which will be rolled back if
<add> * any one of the records fails to delete due to failed validation or database
<add> * error.
<add> *
<add> * @param \Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface $entities Entities to save.
<add> * @param array|\ArrayAccess $options Options used when calling Table::save() for each entity.
<add> * @return bool|\Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface False on failure, entities list on success.
<add> * @throws \Exception
<add> */
<add> public function deleteMany(iterable $entities, $options = [])
<add> {
<add> $options = new ArrayObject((array)$options + [
<add> 'atomic' => true,
<add> 'checkRules' => true,
<add> '_primary' => true,
<add> ]);
<add>
<add> $success = $this->_executeTransaction(function () use ($entities, $options) {
<add> foreach ($entities as $entity) {
<add> if (!$this->_processDelete($entity, $options)) {
<add> return false;
<add> }
<add> }
<add> return true;
<add> }, $options['atomic']);
<add>
<add> if ($success && $this->_transactionCommitted($options['atomic'], $options['_primary'])) {
<add> foreach ($entities as $entity) {
<add> $this->dispatchEvent('Model.afterDeleteCommit', [
<add> 'entity' => $entity,
<add> 'options' => $options,
<add> ]);
<add> }
<add> }
<add>
<add> return $success;
<add> }
<add>
<add> /**
<add> * Deletes multiple entities of a table.
<add> *
<add> * The records will be deleted in a transaction which will be rolled back if
<add> * any one of the records fails to delete due to failed validation or database
<add> * error.
<add> *
<add> * @param \Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface $entities Entities to save.
<add> * @param array|\ArrayAccess $options Options used when calling Table::save() for each entity.
<add> * @return \Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface Entities list.
<add> * @throws \Exception
<add> */
<add> public function deleteManyOrFail(iterable $entities, $options = [])
<add> {
<add> $options = new ArrayObject((array)$options + [
<add> 'atomic' => true,
<add> 'checkRules' => true,
<add> '_primary' => true,
<add> ]);
<add>
<add> $failed = null;
<add> $success = $this->_executeTransaction(function () use ($entities, $options, &$failed) {
<add> foreach ($entities as $entity) {
<add> if (!$this->_processDelete($entity, $options)) {
<add> $failed = $entity;
<add> return false;
<add> }
<add> }
<add> return true;
<add> }, $options['atomic']);
<add>
<add> if ($success && $this->_transactionCommitted($options['atomic'], $options['_primary'])) {
<add> foreach ($entities as $entity) {
<add> $this->dispatchEvent('Model.afterDeleteCommit', [
<add> 'entity' => $entity,
<add> 'options' => $options,
<add> ]);
<add> }
<add> }
<add>
<add> if ($success === false) {
<add> throw new PersistenceFailedException($failed, ['delete']);
<add> }
<add>
<add> return $success;
<add> }
<add>
<ide> /**
<ide> * Try to delete an entity or throw a PersistenceFailedException if the entity is new,
<ide> * has no primary key value, application rules checks failed or the delete was aborted by a callback.
<ide><path>tests/TestCase/ORM/TableTest.php
<ide> use Cake\ORM\SaveOptionsBuilder;
<ide> use Cake\ORM\Table;
<ide> use Cake\TestSuite\TestCase;
<add>use Cake\Utility\Hash;
<ide> use Cake\Validation\Validator;
<ide> use InvalidArgumentException;
<ide> use TestApp\Model\Entity\ProtectedEntity;
<ide> public function testAfterSave()
<ide> $data->username = 'newusername';
<ide>
<ide> $called = false;
<del> $listener = function ($e, $entity, $options) use ($data, &$called) {
<add> $listener = function ($e, EntityInterface $entity, $options) use ($data, &$called) {
<ide> $this->assertSame($data, $entity);
<ide> $this->assertTrue($entity->isDirty());
<ide> $called = true;
<ide> };
<ide> $table->getEventManager()->on('Model.afterSave', $listener);
<ide>
<ide> $calledAfterCommit = false;
<del> $listenerAfterCommit = function ($e, $entity, $options) use ($data, &$calledAfterCommit) {
<add> $listenerAfterCommit = function ($e, EntityInterface $entity, $options) use ($data, &$calledAfterCommit) {
<ide> $this->assertSame($data, $entity);
<ide> $this->assertTrue($entity->isDirty());
<ide> $this->assertNotSame($data->get('username'), $data->getOriginal('username'));
<ide> public function testAfterSaveCommitWithNonAtomicAndTransactionRunning()
<ide> */
<ide> public function testAfterSaveNotCalled()
<ide> {
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['query'])
<ide> ->setConstructorArgs([['table' => 'users', 'connection' => $this->connection]])
<ide> public function testAtomicSave()
<ide> ->getMock();
<ide> $connection->setDriver($this->connection->getDriver());
<ide>
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['getConnection'])
<ide> ->setConstructorArgs([['table' => 'users']])
<ide> public function testAtomicSaveRollback()
<ide> ->setConstructorArgs([ConnectionManager::getConfig('test')])
<ide> ->getMock();
<ide> $connection->setDriver(ConnectionManager::get('test')->getDriver());
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['query', 'getConnection'])
<ide> ->setConstructorArgs([['table' => 'users']])
<ide> public function testAtomicSaveRollbackOnFailure()
<ide> ->setConstructorArgs([ConnectionManager::getConfig('test')])
<ide> ->getMock();
<ide> $connection->setDriver(ConnectionManager::get('test')->getDriver());
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['query', 'getConnection', 'exists'])
<ide> ->setConstructorArgs([['table' => 'users']])
<ide> public function testBeforeSaveGetsCorrectPersistance()
<ide> */
<ide> public function testSaveUpdateWithHint()
<ide> {
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['exists'])
<ide> ->setConstructorArgs([['table' => 'users', 'connection' => ConnectionManager::get('test')]])
<ide> public function testSaveUpdateWithHint()
<ide> */
<ide> public function testSaveUpdatePrimaryKeyNotModified()
<ide> {
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['query'])
<ide> ->setConstructorArgs([['table' => 'users', 'connection' => $this->connection]])
<ide> public function testSaveUpdatePrimaryKeyNotModified()
<ide> */
<ide> public function testUpdateNoChange()
<ide> {
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['query'])
<ide> ->setConstructorArgs([['table' => 'users', 'connection' => $this->connection]])
<ide> public function testUpdateDirtyNoActualChanges()
<ide> public function testUpdateNoPrimaryButOtherKeys()
<ide> {
<ide> $this->expectException(\InvalidArgumentException::class);
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['query'])
<ide> ->setConstructorArgs([['table' => 'users', 'connection' => $this->connection]])
<ide> public function testDeleteIsNew()
<ide> {
<ide> $entity = new Entity(['id' => 1, 'name' => 'mark']);
<ide>
<add> /** @var \Cake\ORM\Table|\PHPUnit\Framework\MockObject\MockObject $table */
<ide> $table = $this->getMockBuilder(Table::class)
<ide> ->setMethods(['query'])
<ide> ->setConstructorArgs([['connection' => $this->connection]])
<ide> public function testDeleteIsNew()
<ide> $this->assertFalse($result);
<ide> }
<ide>
<add> /**
<add> * Test simple delete.
<add> *
<add> * @return void
<add> */
<add> public function testDeleteMany()
<add> {
<add> $table = $this->getTableLocator()->get('users');
<add> $entities = $table->find()->limit(2)->all()->toArray();
<add> $this->assertCount(2, $entities);
<add>
<add> $result = $table->deleteMany($entities);
<add> $this->assertTrue($result);
<add>
<add> $count = $table->find()->where(['id IN' => Hash::extract($entities, '{n}.id')])->count();
<add> $this->assertSame(0, $count, 'Find should not return > 0.');
<add> }
<add>
<add> /**
<add> * Test simple delete.
<add> *
<add> * @return void
<add> */
<add> public function testDeleteManyOrFail()
<add> {
<add> $table = $this->getTableLocator()->get('users');
<add> $entities = $table->find()->limit(2)->all()->toArray();
<add> $this->assertCount(2, $entities);
<add>
<add> $result = $table->deleteManyOrFail($entities);
<add> $this->assertTrue($result);
<add>
<add> $count = $table->find()->where(['id IN' => Hash::extract($entities, '{n}.id')])->count();
<add> $this->assertSame(0, $count, 'Find should not return > 0.');
<add> }
<add>
<ide> /**
<ide> * test hasField()
<ide> * | 2 |
Python | Python | fix w605 flake8 warning (x5) | 5eab3cf6bce7b6f11793056d8772aeb6e761ac4f | <ide><path>examples/contrib/run_openai_gpt.py
<ide> --model_name openai-gpt \
<ide> --do_train \
<ide> --do_eval \
<del> --train_dataset $ROC_STORIES_DIR/cloze_test_val__spring2016\ -\ cloze_test_ALL_val.csv \
<del> --eval_dataset $ROC_STORIES_DIR/cloze_test_test__spring2016\ -\ cloze_test_ALL_test.csv \
<add> --train_dataset "$ROC_STORIES_DIR/cloze_test_val__spring2016 - cloze_test_ALL_val.csv" \
<add> --eval_dataset "$ROC_STORIES_DIR/cloze_test_test__spring2016 - cloze_test_ALL_test.csv" \
<ide> --output_dir ../log \
<ide> --train_batch_size 16 \
<ide> """
<ide><path>transformers/tokenization_xlm.py
<ide> def _tokenize(self, text, lang="en", bypass_tokenizer=False):
<ide> make && make install
<ide> pip install kytea
<ide> ```
<del> - [jieba](https://github.com/fxsjy/jieba): Chinese tokenizer *
<add> - [jieba](https://github.com/fxsjy/jieba): Chinese tokenizer (*)
<ide> - Install with `pip install jieba`
<ide>
<del> \* The original XLM used [Stanford Segmenter](https://nlp.stanford.edu/software/stanford-segmenter-2018-10-16.zip).
<add> (*) The original XLM used [Stanford Segmenter](https://nlp.stanford.edu/software/stanford-segmenter-2018-10-16.zip).
<ide> However, the wrapper (`nltk.tokenize.stanford_segmenter`) is slow due to JVM overhead, and it will be deprecated.
<ide> Jieba is a lot faster and pip-installable. Note there is some mismatch with the Stanford Segmenter. It should be fine
<ide> if you fine-tune the model with Chinese supervisionself. If you want the same exact behaviour, use the original XLM | 2 |
Ruby | Ruby | update a comment | e6f8f1618388d705883e943b1c4b6aff8e409b1a | <ide><path>Library/Homebrew/formula.rb
<ide> def fails_with_llvm msg=nil, data=nil
<ide> end
<ide> end
<ide>
<del># see ack.rb for an example usage
<add># See youtube-dl.rb for an example
<ide> class ScriptFileFormula < Formula
<ide> def install
<ide> bin.install Dir['*']
<ide> end
<ide> end
<ide>
<del># see flac.rb for example usage
<add># See flac.rb for an example
<ide> class GithubGistFormula < ScriptFileFormula
<ide> def initialize name='__UNKNOWN__', path=nil
<ide> super name, path | 1 |
Go | Go | update the crashtest to have the dockerpath in env | ebe157ebb567965e05cca45a1221cd36ec48a052 | <ide><path>contrib/crashTest.go
<ide> import (
<ide> "log"
<ide> "os"
<ide> "os/exec"
<add> "path"
<ide> "time"
<ide> )
<ide>
<del>const DOCKER_PATH = "/home/creack/dotcloud/docker/docker/docker"
<add>var DOCKER_PATH string = path.Join(os.Getenv("DOCKERPATH"), "docker")
<ide>
<ide> func runDaemon() (*exec.Cmd, error) {
<ide> os.Remove("/var/run/docker.pid") | 1 |
PHP | PHP | simplify code in data_get() | c6a84451d6771cea7d528e4303319c4c299105a7 | <ide><path>src/Illuminate/Support/helpers.php
<ide> function data_get($target, $key, $default = null)
<ide> return in_array('*', $key) ? Arr::collapse($result) : $result;
<ide> }
<ide>
<del> if (Arr::accessible($target)) {
<del> if (! Arr::exists($target, $segment)) {
<del> return value($default);
<del> }
<del>
<add> if (Arr::accessible($target) && Arr::exists($target, $segment)) {
<ide> $target = $target[$segment];
<del> } elseif (is_object($target)) {
<del> if (! isset($target->{$segment})) {
<del> return value($default);
<del> }
<del>
<add> } elseif (is_object($target) && isset($target->{$segment})) {
<ide> $target = $target->{$segment};
<ide> } else {
<ide> return value($default); | 1 |
Javascript | Javascript | add failure mode for reading project settings | ad40ff9825c6dc640dea9788845e17510439608f | <ide><path>src/main-process/parse-command-line.js
<ide> const readProjectSettingsSync = (filepath, executedFrom) => {
<ide> }
<ide> try {
<ide> const readPath = path.isAbsolute(filepath) ? filepath : path.join(executedFrom, filepath)
<del> return CSON.readFileSync(readPath)
<add> const contents = CSON.readFileSync(readPath)
<add> if (contents.paths || content.config) {
<add> return contents
<add> }
<add>
<ide> } catch (e) {
<ide> throw new Error('Unable to read supplied config file.')
<ide> } | 1 |
Javascript | Javascript | use temp repo copy while linking packages | f525ab6909f6b570ac36894e587cc73f5593464b | <ide><path>scripts/trace-next-server.js
<ide> const MAX_UNCOMPRESSED_SIZE = 2.5 * 1000 * 1000
<ide> // version so isn't pre-traced
<ide> async function main() {
<ide> const tmpdir = os.tmpdir()
<del> const repoDir = path.join(__dirname, '..')
<add> const origRepoDir = path.join(__dirname, '..')
<add> const repoDir = path.join(tmpdir, `tmp-next-${Date.now()}`)
<ide> const workDir = path.join(tmpdir, `trace-next-${Date.now()}`)
<ide>
<add> await fs.copy(origRepoDir, repoDir, {
<add> filter: (item) => {
<add> return !item.includes('node_modules')
<add> },
<add> })
<add>
<ide> console.log('using workdir', workDir)
<add> console.log('using repodir', repoDir)
<ide> await fs.ensureDir(workDir)
<ide>
<ide> const pkgPaths = await linkPackages(repoDir)
<ide> async function main() {
<ide> },
<ide> })
<ide>
<del> // remove temporary package packs
<del> pkgPaths.forEach((packagePath) => {
<del> fs.unlinkSync(packagePath)
<del> })
<del> // remove changes to package.json files from packing
<del> await execa('git', ['checkout', '.'], {
<del> cwd: repoDir,
<del> stdio: ['ignore', 'inherit', 'inherit'],
<del> })
<del>
<ide> const nextServerPath = path.join(
<ide> workDir,
<ide> 'node_modules/next/dist/server/next-server.js'
<ide> async function main() {
<ide> })
<ide> )
<ide> await fs.remove(workDir)
<add> await fs.remove(repoDir)
<ide>
<ide> console.timeEnd(traceLabel)
<ide> | 1 |
PHP | PHP | remove unnecessary test | 981cb002042ad93af018bd02e368d22621137d48 | <ide><path>tests/Support/SupportSerializableClosureTest.php
<del><?php
<del>
<del>use SuperClosure\Serializer;
<del>
<del>class SupportSerializableClosureTest extends PHPUnit_Framework_TestCase {
<del>
<del> public function testClosureCanBeSerializedAndRebuilt()
<del> {
<del> $serialized = (new Serializer)->serialize(function() { return 'hello'; });
<del>
<del> $unserialized = unserialize($serialized);
<del>
<del> $this->assertEquals('hello', $unserialized());
<del> }
<del>
<del>
<del> public function testClosureCanBeSerializedAndRebuiltAndInheritState()
<del> {
<del> $a = 1;
<del> $b = 1;
<del>
<del> $serialized = (new Serializer)->serialize(function($i) use ($a, $b)
<del> {
<del> return $a + $b + $i;
<del> });
<del>
<del> $unserialized = unserialize($serialized);
<del>
<del> $this->assertEquals(3, $unserialized(1));
<del> }
<del>
<del>} | 1 |
Javascript | Javascript | remove unused variable | a6ad8eace3e2211bce100f153082397baf023c6f | <ide><path>packages/ember-handlebars/tests/handlebars_test.js
<ide> var appendView = function() {
<ide> Ember.run(function() { view.appendTo('#qunit-fixture'); });
<ide> };
<ide>
<del>var additionalTeardown;
<ide> var originalLookup = Ember.lookup, lookup;
<ide> var TemplateTests, container;
<ide> | 1 |
Go | Go | fix races in channel close | 378f0657f963fa6c854643571e4fe83628466c01 | <ide><path>daemon/logger/copier.go
<ide> import (
<ide> // Writes are concurrent, so you need implement some sync in your logger
<ide> type Copier struct {
<ide> // srcs is map of name -> reader pairs, for example "stdout", "stderr"
<del> srcs map[string]io.Reader
<del> dst Logger
<del> copyJobs sync.WaitGroup
<del> closed chan struct{}
<add> srcs map[string]io.Reader
<add> dst Logger
<add> copyJobs sync.WaitGroup
<add> closeOnce sync.Once
<add> closed chan struct{}
<ide> }
<ide>
<ide> // NewCopier creates a new Copier
<ide> func (c *Copier) Wait() {
<ide>
<ide> // Close closes the copier
<ide> func (c *Copier) Close() {
<del> select {
<del> case <-c.closed:
<del> default:
<add> c.closeOnce.Do(func() {
<ide> close(c.closed)
<del> }
<add> })
<ide> }
<ide><path>daemon/logger/logger.go
<ide> import (
<ide> "errors"
<ide> "sort"
<ide> "strings"
<add> "sync"
<ide> "time"
<ide>
<ide> "github.com/docker/docker/pkg/jsonlog"
<ide> type LogWatcher struct {
<ide> Msg chan *Message
<ide> // For sending error messages that occur while while reading logs.
<ide> Err chan error
<add> closeOnce sync.Once
<ide> closeNotifier chan struct{}
<ide> }
<ide>
<ide> func NewLogWatcher() *LogWatcher {
<ide> // Close notifies the underlying log reader to stop.
<ide> func (w *LogWatcher) Close() {
<ide> // only close if not already closed
<del> select {
<del> case <-w.closeNotifier:
<del> default:
<add> w.closeOnce.Do(func() {
<ide> close(w.closeNotifier)
<del> }
<add> })
<ide> }
<ide>
<ide> // WatchClose returns a channel receiver that receives notification | 2 |
Javascript | Javascript | remove special casing for `node.meshes` | 129ea49e81a3650255929ee8dd98a13c0361fef9 | <ide><path>examples/js/loaders/GLTFLoader.js
<ide> THREE.GLTFLoader = ( function () {
<ide> ] ).then( function ( dependencies ) {
<ide>
<ide> return _each( __nodes, function ( _node, nodeId ) {
<del>
<add>
<ide> var node = json.nodes[ nodeId ];
<ide>
<del> var meshes;
<del>
<del> if ( node.mesh !== undefined) {
<del>
<del> meshes = [ node.mesh ];
<del>
<del> } else if ( node.meshes !== undefined ) {
<del>
<del> console.warn( 'THREE.GLTFLoader: Legacy glTF file detected. Nodes may have no more than one mesh.' );
<del>
<del> meshes = node.meshes;
<del>
<del> }
<del>
<del> if ( meshes !== undefined ) {
<del>
<del> for ( var meshId in meshes ) {
<del>
<del> var mesh = meshes[ meshId ];
<del> var group = dependencies.meshes[ mesh ];
<del>
<del> if ( group === undefined ) {
<add> var mesh = node.mesh;
<ide>
<del> console.warn( 'THREE.GLTFLoader: Could not find node "' + mesh + '".' );
<del> continue;
<add> if ( mesh !== undefined) {
<ide>
<del> }
<add> var group = dependencies.meshes[ mesh ];
<ide>
<add> if ( group !== undefined ) {
<ide> // do not clone children as they will be replaced anyway
<ide> var clonedgroup = group.clone( false );
<ide>
<ide> THREE.GLTFLoader = ( function () {
<ide> }
<ide>
<ide> _node.add( clonedgroup );
<add> } else {
<ide>
<del> }
<add> console.warn( 'THREE.GLTFLoader: Could not find node "' + mesh + '".' );
<add>
<add> }
<ide>
<ide> }
<ide> | 1 |
Javascript | Javascript | add trusted types to react on client side | b8d079b41372290aa1846e3a780d85d05ab8ffc1 | <ide><path>.eslintrc.js
<ide> module.exports = {
<ide> spyOnProd: true,
<ide> __PROFILE__: true,
<ide> __UMD__: true,
<add> trustedTypes: true,
<ide> },
<ide> };
<ide><path>packages/react-dom/src/client/DOMPropertyOperations.js
<ide> import {
<ide> OVERLOADED_BOOLEAN,
<ide> } from '../shared/DOMProperty';
<ide> import sanitizeURL from '../shared/sanitizeURL';
<add>import {toStringOrTrustedType} from './ToStringValue';
<ide> import {disableJavaScriptURLs} from 'shared/ReactFeatureFlags';
<add>import {setAttribute, setAttributeNS} from './setAttribute';
<ide>
<ide> import type {PropertyInfo} from '../shared/DOMProperty';
<ide>
<ide> export function setValueForProperty(
<ide> if (value === null) {
<ide> node.removeAttribute(attributeName);
<ide> } else {
<del> node.setAttribute(attributeName, '' + (value: any));
<add> setAttribute(node, attributeName, toStringOrTrustedType(value));
<ide> }
<ide> }
<ide> return;
<ide> export function setValueForProperty(
<ide> const {type} = propertyInfo;
<ide> let attributeValue;
<ide> if (type === BOOLEAN || (type === OVERLOADED_BOOLEAN && value === true)) {
<add> // If attribute type is boolean, we know for sure it won't be an execution sink
<add> // and we won't require Trusted Type here.
<ide> attributeValue = '';
<ide> } else {
<ide> // `setAttribute` with objects becomes only `[object]` in IE8/9,
<ide> // ('' + value) makes it output the correct toString()-value.
<del> attributeValue = '' + (value: any);
<add> attributeValue = toStringOrTrustedType(value);
<ide> if (propertyInfo.sanitizeURL) {
<del> sanitizeURL(attributeValue);
<add> sanitizeURL(attributeValue.toString());
<ide> }
<ide> }
<ide> if (attributeNamespace) {
<del> node.setAttributeNS(attributeNamespace, attributeName, attributeValue);
<add> setAttributeNS(node, attributeNamespace, attributeName, attributeValue);
<ide> } else {
<del> node.setAttribute(attributeName, attributeValue);
<add> setAttribute(node, attributeName, attributeValue);
<ide> }
<ide> }
<ide> }
<ide><path>packages/react-dom/src/client/ReactDOMComponent.js
<ide> import possibleStandardNames from '../shared/possibleStandardNames';
<ide> import {validateProperties as validateARIAProperties} from '../shared/ReactDOMInvalidARIAHook';
<ide> import {validateProperties as validateInputProperties} from '../shared/ReactDOMNullInputValuePropHook';
<ide> import {validateProperties as validateUnknownProperties} from '../shared/ReactDOMUnknownPropertyHook';
<add>import {toStringOrTrustedType} from './ToStringValue';
<ide>
<del>import {enableFlareAPI} from 'shared/ReactFeatureFlags';
<add>import {
<add> enableFlareAPI,
<add> enableTrustedTypesIntegration,
<add>} from 'shared/ReactFeatureFlags';
<ide>
<ide> let didWarnInvalidHydration = false;
<ide> let didWarnShadyDOM = false;
<add>let didWarnScriptTags = false;
<ide>
<ide> const DANGEROUSLY_SET_INNER_HTML = 'dangerouslySetInnerHTML';
<ide> const SUPPRESS_CONTENT_EDITABLE_WARNING = 'suppressContentEditableWarning';
<ide> export function createElement(
<ide> // Create the script via .innerHTML so its "parser-inserted" flag is
<ide> // set to true and it does not execute
<ide> const div = ownerDocument.createElement('div');
<add> if (__DEV__) {
<add> if (enableTrustedTypesIntegration && !didWarnScriptTags) {
<add> warning(
<add> false,
<add> 'Encountered a script tag while rendering React component. ' +
<add> 'Scripts inside React components are never executed when rendering ' +
<add> 'on the client. Consider using template tag instead ' +
<add> '(https://developer.mozilla.org/en-US/docs/Web/HTML/Element/template).',
<add> );
<add> didWarnScriptTags = true;
<add> }
<add> }
<ide> div.innerHTML = '<script><' + '/script>'; // eslint-disable-line
<ide> // This is guaranteed to yield a script element.
<ide> const firstChild = ((div.firstChild: any): HTMLScriptElement);
<ide> export function diffProperties(
<ide> const lastHtml = lastProp ? lastProp[HTML] : undefined;
<ide> if (nextHtml != null) {
<ide> if (lastHtml !== nextHtml) {
<del> (updatePayload = updatePayload || []).push(propKey, '' + nextHtml);
<add> (updatePayload = updatePayload || []).push(
<add> propKey,
<add> toStringOrTrustedType(nextHtml),
<add> );
<ide> }
<ide> } else {
<ide> // TODO: It might be too late to clear this if we have children
<ide><path>packages/react-dom/src/client/ToStringValue.js
<ide> * @flow
<ide> */
<ide>
<add>import {enableTrustedTypesIntegration} from 'shared/ReactFeatureFlags';
<add>
<ide> export opaque type ToStringValue =
<ide> | boolean
<ide> | number
<ide> export function getToStringValue(value: mixed): ToStringValue {
<ide> return '';
<ide> }
<ide> }
<add>
<add>/**
<add> * Returns true only if Trusted Types are available in global object and the value is a trusted type.
<add> */
<add>let isTrustedTypesValue: (value: any) => boolean;
<add>// $FlowExpectedError - TrustedTypes are defined only in some browsers or with polyfill
<add>if (enableTrustedTypesIntegration && typeof trustedTypes !== 'undefined') {
<add> isTrustedTypesValue = (value: any) =>
<add> trustedTypes.isHTML(value) ||
<add> trustedTypes.isScript(value) ||
<add> trustedTypes.isScriptURL(value) ||
<add> // TrustedURLs are deprecated and will be removed soon: https://github.com/WICG/trusted-types/pull/204
<add> (trustedTypes.isURL && trustedTypes.isURL(value));
<add>} else {
<add> isTrustedTypesValue = () => false;
<add>}
<add>
<add>/** Trusted value is a wrapper for "safe" values which can be assigned to DOM execution sinks. */
<add>export opaque type TrustedValue: {toString(): string, valueOf(): string} = {
<add> toString(): string,
<add> valueOf(): string,
<add>};
<add>
<add>/**
<add> * We allow passing objects with toString method as element attributes or in dangerouslySetInnerHTML
<add> * and we do validations that the value is safe. Once we do validation we want to use the validated
<add> * value instead of the object (because object.toString may return something else on next call).
<add> *
<add> * If application uses Trusted Types we don't stringify trusted values, but preserve them as objects.
<add> */
<add>export function toStringOrTrustedType(value: any): string | TrustedValue {
<add> if (
<add> enableTrustedTypesIntegration &&
<add> // fast-path string values as it's most frequent usage of the function
<add> typeof value !== 'string' &&
<add> isTrustedTypesValue(value)
<add> ) {
<add> return value;
<add> } else {
<add> return '' + value;
<add> }
<add>}
<ide><path>packages/react-dom/src/client/__tests__/trustedTypes-test.internal.js
<add>describe('when Trusted Types are available in global object', () => {
<add> let React;
<add> let ReactDOM;
<add> let ReactFeatureFlags;
<add> let container;
<add>
<add> beforeEach(() => {
<add> container = document.createElement('div');
<add> window.trustedTypes = {
<add> isHTML: () => true,
<add> isScript: () => false,
<add> isScriptURL: () => false,
<add> };
<add> ReactFeatureFlags = require('shared/ReactFeatureFlags');
<add> ReactFeatureFlags.enableTrustedTypesIntegration = true;
<add> React = require('react');
<add> ReactDOM = require('react-dom');
<add> });
<add>
<add> afterEach(() => {
<add> delete window.trustedTypes;
<add> ReactFeatureFlags.enableTrustedTypesIntegration = false;
<add> });
<add>
<add> it('should not stringify trusted values', () => {
<add> const trustedObject = {toString: () => 'I look like a trusted object'};
<add> class Component extends React.Component {
<add> state = {inner: undefined};
<add> render() {
<add> return <div dangerouslySetInnerHTML={{__html: this.state.inner}} />;
<add> }
<add> }
<add>
<add> const isHTMLSpy = jest.spyOn(window.trustedTypes, ['isHTML']);
<add> const instance = ReactDOM.render(<Component />, container);
<add> instance.setState({inner: trustedObject});
<add>
<add> expect(container.firstChild.innerHTML).toBe(trustedObject.toString());
<add> expect(isHTMLSpy).toHaveBeenCalledWith(trustedObject);
<add> });
<add>
<add> describe('dangerouslySetInnerHTML in svg elements in Internet Explorer', () => {
<add> let innerHTMLDescriptor;
<add>
<add> // simulate svg elements in Internet Explorer which don't have 'innerHTML' property
<add> beforeEach(() => {
<add> innerHTMLDescriptor = Object.getOwnPropertyDescriptor(
<add> Element.prototype,
<add> 'innerHTML',
<add> );
<add> delete Element.prototype.innerHTML;
<add> Object.defineProperty(
<add> HTMLDivElement.prototype,
<add> 'innerHTML',
<add> innerHTMLDescriptor,
<add> );
<add> });
<add>
<add> afterEach(() => {
<add> delete HTMLDivElement.prototype.innerHTML;
<add> Object.defineProperty(
<add> Element.prototype,
<add> 'innerHTML',
<add> innerHTMLDescriptor,
<add> );
<add> });
<add>
<add> it('should log a warning', () => {
<add> class Component extends React.Component {
<add> render() {
<add> return <svg dangerouslySetInnerHTML={{__html: 'unsafe html'}} />;
<add> }
<add> }
<add> expect(() => {
<add> ReactDOM.render(<Component />, container);
<add> }).toWarnDev(
<add> "Warning: Using 'dangerouslySetInnerHTML' in an svg element with " +
<add> 'Trusted Types enabled in an Internet Explorer will cause ' +
<add> 'the trusted value to be converted to string. Assigning string ' +
<add> "to 'innerHTML' will throw an error if Trusted Types are enforced. " +
<add> "You can try to wrap your svg element inside a div and use 'dangerouslySetInnerHTML' " +
<add> 'on the enclosing div instead.',
<add> );
<add> });
<add> });
<add>
<add> it('should warn once when rendering script tag in jsx on client', () => {
<add> expect(() => {
<add> ReactDOM.render(<script>alert("I am not executed")</script>, container);
<add> }).toWarnDev(
<add> 'Warning: Encountered a script tag while rendering React component. ' +
<add> 'Scripts inside React components are never executed when rendering ' +
<add> 'on the client. Consider using template tag instead ' +
<add> '(https://developer.mozilla.org/en-US/docs/Web/HTML/Element/template).\n' +
<add> ' in script (at **)',
<add> );
<add>
<add> // check that the warning is print only once
<add> ReactDOM.render(<script>alert("I am not executed")</script>, container);
<add> });
<add>});
<ide><path>packages/react-dom/src/client/setAttribute.js
<add>/**
<add> * Copyright (c) Facebook, Inc. and its affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @flow
<add> */
<add>
<add>import type {TrustedValue} from './ToStringValue';
<add>
<add>/**
<add> * Set attribute for a node. The attribute value can be either string or
<add> * Trusted value (if application uses Trusted Types).
<add> */
<add>export function setAttribute(
<add> node: Element,
<add> attributeName: string,
<add> attributeValue: string | TrustedValue,
<add>) {
<add> node.setAttribute(attributeName, (attributeValue: any));
<add>}
<add>
<add>/**
<add> * Set attribute with namespace for a node. The attribute value can be either string or
<add> * Trusted value (if application uses Trusted Types).
<add> */
<add>export function setAttributeNS(
<add> node: Element,
<add> attributeNamespace: string,
<add> attributeName: string,
<add> attributeValue: string | TrustedValue,
<add>) {
<add> node.setAttributeNS(attributeNamespace, attributeName, (attributeValue: any));
<add>}
<ide><path>packages/react-dom/src/client/setInnerHTML.js
<ide>
<ide> import {Namespaces} from '../shared/DOMNamespaces';
<ide> import createMicrosoftUnsafeLocalFunction from '../shared/createMicrosoftUnsafeLocalFunction';
<add>import warning from 'shared/warning';
<add>import type {TrustedValue} from './ToStringValue';
<add>import {enableTrustedTypesIntegration} from 'shared/ReactFeatureFlags';
<ide>
<ide> // SVG temp container for IE lacking innerHTML
<ide> let reusableSVGContainer;
<ide> let reusableSVGContainer;
<ide> */
<ide> const setInnerHTML = createMicrosoftUnsafeLocalFunction(function(
<ide> node: Element,
<del> html: string,
<add> html: string | TrustedValue,
<ide> ): void {
<ide> // IE does not have innerHTML for SVG nodes, so instead we inject the
<ide> // new markup in a temp node and then move the child nodes across into
<ide> // the target node
<del>
<del> if (node.namespaceURI === Namespaces.svg && !('innerHTML' in node)) {
<del> reusableSVGContainer =
<del> reusableSVGContainer || document.createElement('div');
<del> reusableSVGContainer.innerHTML = '<svg>' + html + '</svg>';
<del> const svgNode = reusableSVGContainer.firstChild;
<del> while (node.firstChild) {
<del> node.removeChild(node.firstChild);
<add> if (node.namespaceURI === Namespaces.svg) {
<add> if (enableTrustedTypesIntegration && __DEV__) {
<add> warning(
<add> // $FlowExpectedError - trustedTypes are defined only in some browsers or with polyfill
<add> typeof trustedTypes === 'undefined',
<add> "Using 'dangerouslySetInnerHTML' in an svg element with " +
<add> 'Trusted Types enabled in an Internet Explorer will cause ' +
<add> 'the trusted value to be converted to string. Assigning string ' +
<add> "to 'innerHTML' will throw an error if Trusted Types are enforced. " +
<add> "You can try to wrap your svg element inside a div and use 'dangerouslySetInnerHTML' " +
<add> 'on the enclosing div instead.',
<add> );
<ide> }
<del> while (svgNode.firstChild) {
<del> node.appendChild(svgNode.firstChild);
<add> if (!('innerHTML' in node)) {
<add> reusableSVGContainer =
<add> reusableSVGContainer || document.createElement('div');
<add> reusableSVGContainer.innerHTML =
<add> '<svg>' + html.valueOf().toString() + '</svg>';
<add> const svgNode = reusableSVGContainer.firstChild;
<add> while (node.firstChild) {
<add> node.removeChild(node.firstChild);
<add> }
<add> while (svgNode.firstChild) {
<add> node.appendChild(svgNode.firstChild);
<add> }
<add> } else {
<add> node.innerHTML = (html: any);
<ide> }
<ide> } else {
<del> node.innerHTML = html;
<add> node.innerHTML = (html: any);
<ide> }
<ide> });
<ide>
<ide><path>packages/shared/ReactFeatureFlags.js
<ide> export const warnAboutStringRefs = false;
<ide> export const disableLegacyContext = false;
<ide>
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<add>
<add>export const enableTrustedTypesIntegration = false;
<ide><path>packages/shared/forks/ReactFeatureFlags.native-fb.js
<ide> export const warnAboutDefaultPropsOnFunctionComponents = false;
<ide> export const warnAboutStringRefs = false;
<ide> export const disableLegacyContext = false;
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<add>export const enableTrustedTypesIntegration = false;
<ide>
<ide> // Only used in www builds.
<ide> export function addUserTimingListener() {
<ide><path>packages/shared/forks/ReactFeatureFlags.native-oss.js
<ide> export const warnAboutDefaultPropsOnFunctionComponents = false;
<ide> export const warnAboutStringRefs = false;
<ide> export const disableLegacyContext = false;
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<add>export const enableTrustedTypesIntegration = false;
<ide>
<ide> // Only used in www builds.
<ide> export function addUserTimingListener() {
<ide><path>packages/shared/forks/ReactFeatureFlags.persistent.js
<ide> export const warnAboutDefaultPropsOnFunctionComponents = false;
<ide> export const warnAboutStringRefs = false;
<ide> export const disableLegacyContext = false;
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<add>export const enableTrustedTypesIntegration = false;
<ide>
<ide> // Only used in www builds.
<ide> export function addUserTimingListener() {
<ide><path>packages/shared/forks/ReactFeatureFlags.test-renderer.js
<ide> export const warnAboutDefaultPropsOnFunctionComponents = false;
<ide> export const warnAboutStringRefs = false;
<ide> export const disableLegacyContext = false;
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<add>export const enableTrustedTypesIntegration = false;
<ide>
<ide> // Only used in www builds.
<ide> export function addUserTimingListener() {
<ide><path>packages/shared/forks/ReactFeatureFlags.test-renderer.www.js
<ide> export const warnAboutDefaultPropsOnFunctionComponents = false;
<ide> export const warnAboutStringRefs = false;
<ide> export const disableLegacyContext = false;
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<add>export const enableTrustedTypesIntegration = false;
<ide>
<ide> // Only used in www builds.
<ide> export function addUserTimingListener() {
<ide><path>packages/shared/forks/ReactFeatureFlags.www.js
<ide> export const {
<ide> enableUserBlockingEvents,
<ide> disableLegacyContext,
<ide> disableSchedulerTimeoutBasedOnReactExpirationTime,
<add> enableTrustedTypesIntegration,
<ide> warnAboutStringRefs,
<ide> warnAboutDefaultPropsOnFunctionComponents,
<ide> } = require('ReactFeatureFlags');
<ide><path>scripts/rollup/validate/eslintrc.cjs.js
<ide> module.exports = {
<ide> process: true,
<ide> setImmediate: true,
<ide> Buffer: true,
<add> // Trusted Types
<add> trustedTypes: true,
<ide>
<ide> // Scheduler profiling
<ide> SharedArrayBuffer: true,
<ide><path>scripts/rollup/validate/eslintrc.fb.js
<ide> module.exports = {
<ide> // Node.js Server Rendering
<ide> setImmediate: true,
<ide> Buffer: true,
<add> // Trusted Types
<add> trustedTypes: true,
<ide>
<ide> // Scheduler profiling
<ide> SharedArrayBuffer: true,
<ide><path>scripts/rollup/validate/eslintrc.rn.js
<ide> module.exports = {
<ide> // Fabric. See https://github.com/facebook/react/pull/15490
<ide> // for more information
<ide> nativeFabricUIManager: true,
<add> // Trusted Types
<add> trustedTypes: true,
<ide>
<ide> // Scheduler profiling
<ide> SharedArrayBuffer: true,
<ide><path>scripts/rollup/validate/eslintrc.umd.js
<ide> module.exports = {
<ide> define: true,
<ide> require: true,
<ide> global: true,
<add> // Trusted Types
<add> trustedTypes: true,
<ide>
<ide> // Scheduler profiling
<ide> SharedArrayBuffer: true, | 18 |
Go | Go | delay network deletion until after lb cleanup | 6861aade580e13e039330dca2ca46a07bcf13026 | <ide><path>libnetwork/network.go
<ide> func (n *network) delete(force bool, rmLBEndpoint bool) error {
<ide> goto removeFromStore
<ide> }
<ide>
<del> if err = n.deleteNetwork(); err != nil {
<del> if !force {
<del> return err
<del> }
<del> logrus.Debugf("driver failed to delete stale network %s (%s): %v", n.Name(), n.ID(), err)
<del> }
<del>
<ide> n.ipamRelease()
<ide> if err = c.updateToStore(n); err != nil {
<ide> logrus.Warnf("Failed to update store after ipam release for network %s (%s): %v", n.Name(), n.ID(), err)
<ide> func (n *network) delete(force bool, rmLBEndpoint bool) error {
<ide> c.cleanupServiceDiscovery(n.ID())
<ide>
<ide> // Cleanup the load balancer. On Windows this call is required
<del> // to remove remote loadbalancers in VFP.
<add> // to remove remote loadbalancers in VFP, and must be performed before
<add> // dataplane network deletion.
<ide> c.cleanupServiceBindings(n.ID())
<ide>
<add> // Delete the network from the dataplane
<add> if err = n.deleteNetwork(); err != nil {
<add> if !force {
<add> return err
<add> }
<add> logrus.Debugf("driver failed to delete stale network %s (%s): %v", n.Name(), n.ID(), err)
<add> }
<add>
<ide> removeFromStore:
<ide> // deleteFromStore performs an atomic delete operation and the
<ide> // network.epCnt will help prevent any possible
<ide><path>libnetwork/service_common.go
<ide> func (c *controller) rmServiceBinding(svcName, svcID, nID, eID, containerName st
<ide> // Remove loadbalancer service(if needed) and backend in all
<ide> // sandboxes in the network only if the vip is valid.
<ide> if entries == 0 {
<del> // The network may well have been deleted before the last
<del> // of the service bindings. That's ok on Linux because
<del> // removing the network sandbox implicitly removes the
<del> // backend service bindings. Windows VFP cleanup requires
<del> // calling cleanupServiceBindings on the network prior to
<del> // deleting the network, performed by network.delete.
<add> // The network may well have been deleted from the store (and
<add> // dataplane) before the last of the service bindings. On Linux that's
<add> // ok because removing the network sandbox from the dataplane
<add> // implicitly cleans up all related dataplane state.
<add> // On the Windows dataplane, VFP policylists must be removed
<add> // independently of the network, and they must be removed before the HNS
<add> // network. Otherwise, policylist removal fails with "network not
<add> // found." On Windows cleanupServiceBindings must be called prior to
<add> // removing the network from the store or dataplane.
<ide> n, err := c.NetworkByID(nID)
<ide> if err == nil {
<ide> n.(*network).rmLBBackend(ip, lb, rmService, fullRemove) | 2 |
Javascript | Javascript | remove logic for multiple error recovery attempts | 5318971f50da06fd42763689826acecdb14b4c5e | <ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.new.js
<ide> function recoverFromConcurrentError(root, errorRetryLanes) {
<ide> }
<ide> }
<ide>
<del> let exitStatus;
<del>
<del> const MAX_ERROR_RETRY_ATTEMPTS = 50;
<del> for (let i = 0; i < MAX_ERROR_RETRY_ATTEMPTS; i++) {
<del> exitStatus = renderRootSync(root, errorRetryLanes);
<del> if (
<del> exitStatus === RootErrored &&
<del> workInProgressRootRenderPhaseUpdatedLanes !== NoLanes
<del> ) {
<del> // There was a render phase update during this render. Some internal React
<del> // implementation details may use this as a trick to schedule another
<del> // render pass. To protect against an inifinite loop, eventually
<del> // we'll give up.
<del> continue;
<del> }
<del> break;
<del> }
<add> const exitStatus = renderRootSync(root, errorRetryLanes);
<ide>
<ide> executionContext = prevExecutionContext;
<ide>
<ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.old.js
<ide> function recoverFromConcurrentError(root, errorRetryLanes) {
<ide> }
<ide> }
<ide>
<del> let exitStatus;
<del>
<del> const MAX_ERROR_RETRY_ATTEMPTS = 50;
<del> for (let i = 0; i < MAX_ERROR_RETRY_ATTEMPTS; i++) {
<del> exitStatus = renderRootSync(root, errorRetryLanes);
<del> if (
<del> exitStatus === RootErrored &&
<del> workInProgressRootRenderPhaseUpdatedLanes !== NoLanes
<del> ) {
<del> // There was a render phase update during this render. Some internal React
<del> // implementation details may use this as a trick to schedule another
<del> // render pass. To protect against an inifinite loop, eventually
<del> // we'll give up.
<del> continue;
<del> }
<del> break;
<del> }
<add> const exitStatus = renderRootSync(root, errorRetryLanes);
<ide>
<ide> executionContext = prevExecutionContext;
<ide> | 2 |
Ruby | Ruby | extend curl warning | 821dbab5f88e9dcbb9f08ba5ef3d8e1bf5dece3b | <ide><path>Library/Homebrew/cmd/doctor.rb
<ide> def check_user_path_3
<ide> end
<ide>
<ide> def check_for_bad_curl
<del> if MacOS.version <= "10.6" && !Formula["curl"].installed? then <<-EOS.undent
<del> The system curl on 10.6 and below is often incapable of supporting
<add> if MacOS.version <= "10.8" && !Formula["curl"].installed? then <<-EOS.undent
<add> The system curl on 10.8 and below is often incapable of supporting
<ide> modern secure connections & will fail on fetching formulae.
<add>
<ide> We recommend you:
<ide> brew install curl
<ide> EOS | 1 |
Ruby | Ruby | distinguish indirect deps from undeclared deps | c946da88ab07772ac4becd45ef0c82a60bbfc515 | <ide><path>Library/Homebrew/os/mac/linkage_checker.rb
<ide> def initialize(keg, formula = nil)
<ide> @system_dylibs = Set.new
<ide> @broken_dylibs = Set.new
<ide> @variable_dylibs = Set.new
<add> @indirect_deps = []
<ide> @undeclared_deps = []
<ide> @reverse_links = Hash.new { |h, k| h[k] = Set.new }
<ide> @unnecessary_deps = []
<ide> def check_dylibs
<ide> end
<ide> end
<ide>
<del> @undeclared_deps, @unnecessary_deps = check_undeclared_deps if formula
<add> @indirect_deps, @undeclared_deps, @unnecessary_deps = check_undeclared_deps if formula
<ide> end
<ide>
<ide> def check_undeclared_deps
<ide> def check_undeclared_deps
<ide> formula.build.without?(dep)
<ide> end
<ide> declared_deps = formula.deps.reject { |dep| filter_out.call(dep) }.map(&:name)
<add> recursive_deps = keg.to_formula.runtime_dependencies.map { |dep| dep.to_formula.full_name }
<ide> declared_dep_names = declared_deps.map { |dep| dep.split("/").last }
<del> undeclared_deps = @brewed_dylibs.keys.reject do |full_name|
<add> indirect_deps = []
<add> undeclared_deps = []
<add> @brewed_dylibs.each_key do |full_name|
<ide> name = full_name.split("/").last
<del> next true if name == formula.name
<del> declared_dep_names.include?(name)
<add> next if name == formula.name
<add> if recursive_deps.include?(name)
<add> indirect_deps << full_name unless declared_dep_names.include?(name)
<add> else
<add> undeclared_deps << full_name
<add> end
<add> end
<add> sort_by_formula_full_name!(indirect_deps)
<add> sort_by_formula_full_name!(undeclared_deps)
<add> unnecessary_deps = declared_dep_names.reject do |full_name|
<add> name = full_name.split("/").last
<add> next true if Formula[name].bin.directory?
<add> @brewed_dylibs.keys.map { |x| x.split("/").last }.include?(name)
<ide> end
<del> undeclared_deps.sort do |a, b|
<add> [indirect_deps, undeclared_deps, unnecessary_deps]
<add> end
<add>
<add> def sort_by_formula_full_name!(arr)
<add> arr.sort! do |a, b|
<ide> if a.include?("/") && !b.include?("/")
<ide> 1
<ide> elsif !a.include?("/") && b.include?("/")
<ide> def check_undeclared_deps
<ide> a <=> b
<ide> end
<ide> end
<del> unnecessary_deps = declared_dep_names.reject do |full_name|
<del> name = full_name.split("/").last
<del> next true if Formula[name].bin.directory?
<del> @brewed_dylibs.keys.map { |x| x.split("/").last }.include?(name)
<del> end
<del> [undeclared_deps, unnecessary_deps]
<ide> end
<ide>
<ide> def display_normal_output
<ide> display_items "System libraries", @system_dylibs
<ide> display_items "Homebrew libraries", @brewed_dylibs
<add> display_items "Indirect dependencies with linkage", @indirect_deps
<ide> display_items "Variable-referenced libraries", @variable_dylibs
<ide> display_items "Missing libraries", @broken_dylibs
<ide> display_items "Undeclared dependencies with linkage", @undeclared_deps | 1 |
Text | Text | fix stylistic issues in api/net.md | d3418b13190d142112270dcacf33d5542170729d | <ide><path>doc/api/net.md
<ide> double-backslashes, such as:
<ide>
<ide> ```js
<ide> net.createServer().listen(
<del> path.join('\\\\?\\pipe', process.cwd(), 'myctl'))
<add> path.join('\\\\?\\pipe', process.cwd(), 'myctl'));
<ide> ```
<ide>
<ide> ## Class: net.Server
<ide> Returns an object with `port`, `family`, and `address` properties:
<ide> Example:
<ide>
<ide> ```js
<del>var server = net.createServer((socket) => {
<add>const server = net.createServer((socket) => {
<ide> socket.end('goodbye\n');
<ide> }).on('error', (err) => {
<ide> // handle errors here
<ide> socket.setTimeout(3000);
<ide> socket.on('timeout', () => {
<ide> console.log('socket timeout');
<ide> socket.end();
<del>})
<add>});
<ide> ```
<ide>
<ide> If `timeout` is 0, then the existing idle timeout is disabled.
<ide> server.listen(8124, () => {
<ide>
<ide> Test this by using `telnet`:
<ide>
<del>```sh
<del>telnet localhost 8124
<add>```console
<add>$ telnet localhost 8124
<ide> ```
<ide>
<ide> To listen on the socket `/tmp/echo.sock` the third line from the last would
<ide> server.listen('/tmp/echo.sock', () => {
<ide>
<ide> Use `nc` to connect to a UNIX domain socket server:
<ide>
<del>```js
<del>nc -U /tmp/echo.sock
<add>```console
<add>$ nc -U /tmp/echo.sock
<ide> ```
<ide>
<ide> ## net.isIP(input) | 1 |
PHP | PHP | add typehint for mailer/ | 1e16656a25e769328f7bd88d911feea688137da6 | <ide><path>src/Mailer/Email.php
<ide> public function __clone()
<ide> * @param array $args Method arguments
<ide> * @return $this|mixed
<ide> */
<del> public function __call($method, $args)
<add> public function __call(string $method, array $args)
<ide> {
<ide> $result = $this->message->$method(...$args);
<ide>
<ide> public function getTransport(): ?AbstractTransport
<ide> * @param string|null $type Use MESSAGE_* constants or null to return the full message as array
<ide> * @return string|array String if type is given, array if type is null
<ide> */
<del> public function message($type = null)
<add> public function message(?string $type = null)
<ide> {
<ide> if ($type) {
<ide> return $this->message->getBody($type);
<ide> public function setRenderer(Renderer $renderer)
<ide> * @param array $contents The content with 'headers' and 'message' keys.
<ide> * @return void
<ide> */
<del> protected function _logDelivery($contents): void
<add> protected function _logDelivery(array $contents): void
<ide> {
<ide> if (empty($this->_profile['log'])) {
<ide> return;
<ide> public function createFromArray(array $config)
<ide> public function serialize(): string
<ide> {
<ide> $array = $this->jsonSerialize();
<del> array_walk_recursive($array, function (&$item, $key) {
<add> array_walk_recursive($array, function (&$item, $key): void {
<ide> if ($item instanceof SimpleXMLElement) {
<ide> $item = json_decode(json_encode((array)$item), true);
<ide> }
<ide><path>src/Mailer/Mailer.php
<ide> public function viewBuilder(): ViewBuilder
<ide> * @param array $args Method arguments
<ide> * @return $this|mixed
<ide> */
<del> public function __call($method, $args)
<add> public function __call(string $method, array $args)
<ide> {
<ide> $result = $this->_email->$method(...$args);
<ide> if (strpos($method, 'get') === 0) {
<ide><path>src/Mailer/Message.php
<ide> public function jsonSerialize(): array
<ide> $array[$property] = $this->{$property};
<ide> }
<ide>
<del> array_walk($array['attachments'], function (&$item, $key) {
<add> array_walk($array['attachments'], function (&$item, $key): void {
<ide> if (!empty($item['file'])) {
<ide> $item['data'] = $this->readFile($item['file']);
<ide> unset($item['file']);
<ide> public function createFromArray(array $config)
<ide> public function serialize(): string
<ide> {
<ide> $array = $this->jsonSerialize();
<del> array_walk_recursive($array, function (&$item, $key) {
<add> array_walk_recursive($array, function (&$item, $key): void {
<ide> if ($item instanceof SimpleXMLElement) {
<ide> $item = json_decode(json_encode((array)$item), true);
<ide> }
<ide> public function serialize(): string
<ide> * Unserializes the Email object.
<ide> *
<ide> * @param string $data Serialized string.
<del> * @return static Configured message instance.
<add> * @return void
<ide> */
<ide> public function unserialize($data)
<ide> {
<ide> public function unserialize($data)
<ide> throw new Exception('Unable to unserialize message.');
<ide> }
<ide>
<del> return $this->createFromArray($array);
<add> $this->createFromArray($array);
<ide> }
<ide> }
<ide><path>src/Mailer/Transport/SmtpTransport.php
<ide> public function __destruct()
<ide> *
<ide> * @return void
<ide> */
<del> public function __wakeup()
<add> public function __wakeup(): void
<ide> {
<ide> $this->_socket = null;
<ide> }
<ide> protected function _smtpSend(?string $data, $checkCode = '250'): ?string
<ide> }
<ide>
<ide> /**
<add> * Get socket instance.
<add> *
<ide> * @return \Cake\Network\Socket
<ide> * @throws \RuntimeException If socket is not set.
<ide> */
<ide><path>src/Mailer/TransportFactory.php
<ide> public static function getRegistry(): TransportRegistry
<ide> * @param \Cake\Mailer\TransportRegistry $registry Injectable registry object.
<ide> * @return void
<ide> */
<del> public static function setRegistry(TransportRegistry $registry)
<add> public static function setRegistry(TransportRegistry $registry): void
<ide> {
<ide> static::$_registry = $registry;
<ide> }
<ide> public static function setRegistry(TransportRegistry $registry)
<ide> * @return void
<ide> * @throws \InvalidArgumentException When a tranport cannot be created.
<ide> */
<del> protected static function _buildTransport($name): void
<add> protected static function _buildTransport(string $name): void
<ide> {
<ide> if (!isset(static::$_config[$name])) {
<ide> throw new InvalidArgumentException(
<ide> protected static function _buildTransport($name): void
<ide> * @param string $name Config name.
<ide> * @return \Cake\Mailer\AbstractTransport
<ide> */
<del> public static function get($name): AbstractTransport
<add> public static function get(string $name): AbstractTransport
<ide> {
<ide> $registry = static::getRegistry();
<ide> | 5 |
Javascript | Javascript | remove unused parameters | e67220ec81ce55833559e9d40f44a08b80756a07 | <ide><path>benchmark/cluster/echo.js
<ide> if (cluster.isMaster) {
<ide> for (var i = 0; i < workers; ++i)
<ide> cluster.fork().on('online', onOnline).on('message', onMessage);
<ide>
<del> function onOnline(msg) {
<add> function onOnline() {
<ide> if (++readies === workers) {
<ide> bench.start();
<ide> broadcast();
<ide> if (cluster.isMaster) {
<ide> }
<ide> }
<ide>
<del> function onMessage(msg) {
<add> function onMessage() {
<ide> if (++msgCount === expectedPerBroadcast) {
<ide> msgCount = 0;
<ide> broadcast(); | 1 |
Javascript | Javascript | remove the `gulp extension` build target | 32de419a88ec6e22fffec5999bd126472740e447 | <ide><path>gulpfile.js
<ide> gulp.task('default', function() {
<ide> });
<ide> });
<ide>
<del>gulp.task('extension', ['chromium']);
<del>
<ide> gulp.task('buildnumber', function (done) {
<ide> console.log();
<ide> console.log('### Getting extension build number');
<ide> gulp.task('lib', ['buildnumber'], function () {
<ide> ]);
<ide> });
<ide>
<del>gulp.task('web-pre', ['generic', 'extension', 'jsdoc']);
<add>gulp.task('web-pre', ['generic', 'jsdoc']);
<ide>
<ide> gulp.task('publish', ['generic'], function (done) {
<ide> var version = JSON.parse( | 1 |
Javascript | Javascript | add hooks to containerview | a2cd03b0f219142c52f291de7a439931d83ffaa3 | <ide><path>packages/ember-views/lib/views/container_view.js
<ide> var childViewsProperty = Ember.computed(function() {
<ide> {{view Ember.ContainerView currentViewBinding="App.appController.view"}}
<ide> ```
<ide>
<add> ## Use lifecycle hooks
<add>
<add> This is an example of how you could implement reusable currentView view.
<add>
<add> ``` javascript
<add> App.ContainerView = Ember.ContainerView.extend({
<add> appendCurrentView: function(currentView, callback) {
<add> currentView.set('isVisible', true);
<add>
<add> if (!this.get('childViews').contains(currentView)) {
<add> this._super(currentView, callback);
<add> } else {
<add> callback();
<add> }
<add> },
<add> removeCurrentView: function(currentView, callback) {
<add> if (currentView.get('isShared')) {
<add> currentView.set('isVisible', false);
<add> callback();
<add> } else {
<add> this._super(currentView, callback);
<add> }
<add> }
<add> });
<add> ````
<add>
<add> This is an example of how you could implement animations.
<add>
<add> ```` javascript
<add> App.ContainerView = Ember.ContainerView.extend({
<add> presentCurrentView: function(currentView, callback) {
<add> currentView.$().animate({top: '0px'}, callback);
<add> },
<add> dismissCurrentView: function(currentView, callback) {
<add> currentView.$().animate({top: '-100px'}, callback);
<add> }
<add> });
<add> ````
<add>
<ide> @class ContainerView
<ide> @namespace Ember
<ide> @extends Ember.View
<ide> Ember.ContainerView = Ember.View.extend({
<ide> _childViews[idx] = view;
<ide> }, this);
<ide>
<del> var currentView = get(this, 'currentView');
<del> if (currentView) _childViews.push(this.createChildView(currentView));
<del>
<ide> // Make the _childViews array observable
<ide> Ember.A(_childViews);
<ide>
<ide> Ember.ContainerView = Ember.View.extend({
<ide> willChange: 'childViewsWillChange',
<ide> didChange: 'childViewsDidChange'
<ide> });
<add>
<add> // Make sure we initialize with currentView if it is present
<add> var currentView = get(this, 'currentView');
<add> if (currentView) { this._currentViewDidChange(); }
<ide> },
<ide>
<ide> /**
<ide> Ember.ContainerView = Ember.View.extend({
<ide>
<ide> currentView: null,
<ide>
<add> /**
<add> This method is responsible for presenting a new view.
<add> Default implementation will simply call the callback.
<add> You can override this method if you want to add an animation for example.
<add>
<add> @param {Ember.View} currentView a view to present
<add> @param {Function} callback the callback called once operation is terminated
<add> */
<add> presentCurrentView: function(currentView, callback) {
<add> callback();
<add> },
<add>
<add> /**
<add> This method is responsible for adding view to containerView
<add>
<add> @param {Ember.View} currentView a view to present
<add> @param {Function} callback the callback called once view is appended
<add> */
<add> appendCurrentView: function(currentView, callback) {
<add> var childViews = get(this, 'childViews');
<add>
<add> currentView.one('didInsertElement', callback);
<add>
<add> childViews.pushObject(currentView);
<add> },
<add>
<add> /**
<add> This method is responsible for dismissing a view.
<add> Default implementation will simply call the callback.
<add> You can override this method if you want to add an animation for example.
<add>
<add> @param {Ember.View} currentView a view to dismiss
<add> @param {Function} callback the callback called once operation is terminated
<add> */
<add> dismissCurrentView: function(currentView, callback) {
<add> callback();
<add> },
<add>
<add> /**
<add> This method is responsible for removing a view from the containerView
<add> You may want to override it in case you implementing views sharing for example
<add>
<add> @param {Ember.View} currentView a view to present
<add> @param {Function} callback the callback called once view is removed
<add> */
<add> removeCurrentView: function(currentView, callback) {
<add> var childViews = get(this, 'childViews');
<add>
<add> currentView.one('didDisappear', function() {
<add> currentView.destroy();
<add> });
<add>
<add> childViews.removeObject(currentView);
<add>
<add> callback();
<add> },
<add>
<ide> _currentViewWillChange: Ember.beforeObserver(function() {
<del> var childViews = get(this, 'childViews'),
<del> currentView = get(this, 'currentView');
<add> var currentView = get(this, 'currentView'),
<add> containerView = this;
<ide>
<ide> if (currentView) {
<del> childViews.removeObject(currentView);
<del> currentView.destroy();
<add> set(currentView, 'isBeingDismissed', true);
<add> currentView.trigger('willDisappear', currentView);
<add>
<add> this.dismissCurrentView(currentView, function() {
<add> containerView.removeCurrentView(currentView, function() {
<add> set(currentView, 'isBeingDismissed', false);
<add> currentView.trigger('didDisappear', currentView);
<add> });
<add> });
<ide> }
<ide> }, 'currentView'),
<ide>
<ide> _currentViewDidChange: Ember.observer(function() {
<del> var childViews = get(this, 'childViews'),
<del> currentView = get(this, 'currentView');
<add> var currentView = get(this, 'currentView'),
<add> containerView = this;
<ide>
<ide> if (currentView) {
<del> childViews.pushObject(currentView);
<add> set(currentView, 'isBeingPresented', true);
<add> currentView.trigger('willAppear', currentView);
<add>
<add> this.appendCurrentView(currentView, function() {
<add> containerView.presentCurrentView(currentView, function() {
<add> set(currentView, 'isBeingPresented', false);
<add> currentView.trigger('didAppear', currentView);
<add> });
<add> });
<ide> }
<ide> }, 'currentView'),
<ide>
<ide><path>packages/ember-views/tests/views/container_view_test.js
<ide> test("should invalidate `element` on itself and childViews when being rendered b
<ide> ok(!!container.get('element'), "Parent's element should have been recomputed after being rendered");
<ide> ok(!!view.get('element'), "Child's element should have been recomputed after being rendered");
<ide> });
<add>
<add>test("should execut all the hooks when removing or adding a currentView", function() {
<add> expect(9);
<add> var viewsCount = 0;
<add> var container = Ember.ContainerView.create({
<add> presentCurrentView: function(currentView, callback) {
<add> if (viewsCount === 1) {
<add> equal(currentView, child1, 'will present child1');
<add> equal(child1.get('isBeingPresented'), true);
<add> } else {
<add> equal(currentView, child2, 'will present child2');
<add> equal(child2.get('isBeingPresented'), true);
<add> }
<add> callback();
<add> },
<add> appendCurrentView: function(currentView, callback) {
<add> viewsCount++;
<add> if (viewsCount === 1) {
<add> equal(currentView, child1, 'will append child1');
<add> } else {
<add> equal(currentView, child2, 'will append child2');
<add> }
<add> this._super(currentView, callback);
<add> },
<add> dismissCurrentView: function(currentView, callback) {
<add> equal(child1.get('isBeingDismissed'), true);
<add> equal(currentView, child1, 'will dismiss child1');
<add> callback();
<add> },
<add> removeCurrentView: function(currentView, callback) {
<add> equal(currentView, child1, 'will remove child1');
<add> this._super(currentView, callback);
<add> }
<add> });
<add>
<add> var child1 = Ember.View.create();
<add> var child2 = Ember.View.create();
<add>
<add> Ember.run(function() {
<add> container.appendTo('#qunit-fixture');
<add> });
<add>
<add> Ember.run(function() {
<add> set(container, 'currentView', child1);
<add> });
<add>
<add> Ember.run(function() {
<add> set(container, 'currentView', child2);
<add> });
<add>}); | 2 |
Text | Text | add deprecation message for od api | 304583012bab34a3f27949745324abe16b0fa529 | <ide><path>research/object_detection/README.md
<ide> [](https://github.com/tensorflow/tensorflow/releases/tag/v1.15.0)
<ide> [](https://www.python.org/downloads/release/python-360/)
<ide>
<add>## Deprecation
<add>
<add>*Note to our users*: the Tensorflow Object Detection API is no longer being
<add>maintained to be compatible with new versions of external dependencies
<add>(from pip, apt-get etc.). Any changes that follow are meant for internal
<add>maintenance. We may use the OD API to release projects in the future,
<add>in which case we will provide full install instructions or Docker images.
<add>We encourage users seeking an actively maintained detection / segmentation
<add>codebase to consider [TF-Vision](https://github.com/tensorflow/models/tree/master/official/vision)
<add>or [scenic](https://github.com/google-research/scenic). We have preserved
<add>the original install instructions below in case anyone wants to try out old
<add>models or scripts.
<add>
<ide> Creating accurate machine learning models capable of localizing and identifying
<ide> multiple objects in a single image remains a core challenge in computer vision.
<ide> The TensorFlow Object Detection API is an open source framework built on top of
<ide> TensorFlow that makes it easy to construct, train and deploy object detection
<ide> models. At Google we’ve certainly found this codebase to be useful for our
<ide> computer vision needs, and we hope that you will as well. <p align="center">
<ide> <img src="g3doc/img/kites_detections_output.jpg" width=676 height=450> </p>
<del>Contributions to the codebase are welcome and we would love to hear back from
<del>you if you find this API useful. Finally if you use the TensorFlow Object
<add>If you use the TensorFlow Object
<ide> Detection API for a research publication, please consider citing:
<ide>
<ide> ``` | 1 |
Javascript | Javascript | remove incorrect comment about lambert material | f4fb49e3d5b6d738baf367246f5fc83f86a9828d | <ide><path>examples/js/loaders/LDrawLoader.js
<ide> THREE.LDrawLoader = ( function () {
<ide>
<ide> case LDrawLoader.FINISH_TYPE_RUBBER:
<ide>
<del> // Rubber is best simulated with Lambert
<add> // Rubber finish
<ide> material = new THREE.MeshStandardMaterial( { color: colour, roughness: 0.9, metalness: 0 } );
<ide> canHaveEnvMap = false;
<ide> break; | 1 |
Python | Python | display hours too | e446d380a1c0b84577f5a2d20f8e9ee6dc371274 | <ide><path>glances/plugins/glances_processlist.py
<ide> from glances.plugins.glances_plugin import GlancesPlugin
<ide>
<ide>
<add>def convert_timedelta(delta):
<add> """Convert timedelta to human-readable time."""
<add> # Python 2.7+:
<add> # total_seconds = delta.total_seconds()
<add> # hours = total_seconds // 3600
<add> days, total_seconds = delta.days, delta.seconds
<add> hours = days * 24 + total_seconds // 3600
<add> minutes = (total_seconds % 3600) // 60
<add> seconds = str(total_seconds % 60).zfill(2)
<add> microseconds = str(delta.microseconds)[:2].zfill(2)
<add>
<add> return hours, minutes, seconds, microseconds
<add>
<add>
<ide> class Plugin(GlancesPlugin):
<ide>
<ide> """Glances' processes plugin.
<ide> def get_process_curses_data(self, p, first, args):
<ide> # TIME+
<ide> if self.tag_proc_time:
<ide> try:
<del> dtime = timedelta(seconds=sum(p['cpu_times']))
<add> delta = timedelta(seconds=sum(p['cpu_times']))
<ide> except Exception:
<ide> # Catched on some Amazon EC2 server
<ide> # See https://github.com/nicolargo/glances/issues/87
<ide> self.tag_proc_time = False
<ide> else:
<del> msg = '{0}:{1}.{2}'.format(str(dtime.seconds // 60 % 60),
<del> str(dtime.seconds % 60).zfill(2),
<del> str(dtime.microseconds)[:2].zfill(2))
<add> hours, minutes, seconds, microseconds = convert_timedelta(delta)
<add> if hours:
<add> msg = '{0}h{1}:{2}'.format(hours, minutes, seconds)
<add> else:
<add> msg = '{0}:{1}.{2}'.format(minutes, seconds, microseconds)
<ide> else:
<ide> msg = ' '
<del> msg = '{0:>9}'.format(msg)
<add> msg = '{0:>10}'.format(msg)
<ide> ret.append(self.curse_add_line(msg, optional=True))
<ide> # IO read/write
<ide> if 'io_counters' in p:
<ide> def msg_curse(self, args=None):
<ide> ret.append(self.curse_add_line(msg))
<ide> msg = '{0:>2}'.format(_("S"))
<ide> ret.append(self.curse_add_line(msg))
<del> msg = '{0:>9}'.format(_("TIME+"))
<add> msg = '{0:>10}'.format(_("TIME+"))
<ide> ret.append(self.curse_add_line(msg, sort_style if process_sort_key == 'cpu_times' else 'DEFAULT', optional=True))
<ide> msg = '{0:>6}'.format(_("IOR/s"))
<ide> ret.append(self.curse_add_line(msg, sort_style if process_sort_key == 'io_counters' else 'DEFAULT', optional=True, additional=True)) | 1 |
Text | Text | add examples for implementing esm | 47804933012841f2dc90626bdcc161adf34569a5 | <ide><path>doc/api/esm.md
<ide> ECMAScript modules are [the official standard format][] to package JavaScript
<ide> code for reuse. Modules are defined using a variety of [`import`][] and
<ide> [`export`][] statements.
<ide>
<add>The following example of an ES module exports a function:
<add>
<add>```js
<add>// addTwo.js
<add>function addTwo(num) {
<add> return num + 2;
<add>}
<add>
<add>export { addTwo };
<add>```
<add>
<add>The following example of an ES module imports the function from `addTwo.js`:
<add>
<add>```js
<add>// app.js
<add>import { addTwo } from './addTwo.js';
<add>
<add>// Prints: 6
<add>console.log(addTwo(4));
<add>```
<add>
<ide> Node.js fully supports ECMAScript modules as they are currently specified and
<ide> provides limited interoperability between them and the existing module format,
<ide> [CommonJS][]. | 1 |
Ruby | Ruby | remove superfluous require | 1304b664924bfea54fd6dc0dc924ae3d126ff92d | <ide><path>activerecord/lib/active_record/test_case.rb
<ide> require "active_support/test_case"
<del>require "active_record/fixtures"
<ide>
<ide> module ActiveRecord
<ide> class TestCase < ActiveSupport::TestCase #:nodoc: | 1 |
Java | Java | relax javabean rules for spel property access | b25e91a550beaf428a6e696959b717341a04f27d | <ide><path>spring-expression/src/main/java/org/springframework/expression/spel/support/ReflectivePropertyAccessor.java
<ide> private Field findField(String name, Class<?> clazz, Object target) {
<ide> * Find a getter method for the specified property.
<ide> */
<ide> protected Method findGetterForProperty(String propertyName, Class<?> clazz, boolean mustBeStatic) {
<del> Method[] ms = getSortedClassMethods(clazz);
<del> String propertyMethodSuffix = getPropertyMethodSuffix(propertyName);
<del>
<del> // Try "get*" method...
<del> String getterName = "get" + propertyMethodSuffix;
<del> for (Method method : ms) {
<del> if (method.getName().equals(getterName) && method.getParameterTypes().length == 0 &&
<del> (!mustBeStatic || Modifier.isStatic(method.getModifiers()))) {
<del> return method;
<del> }
<del> }
<del> // Try "is*" method...
<del> getterName = "is" + propertyMethodSuffix;
<del> for (Method method : ms) {
<del> if (method.getName().equals(getterName) && method.getParameterTypes().length == 0 &&
<del> (boolean.class.equals(method.getReturnType()) || Boolean.class.equals(method.getReturnType())) &&
<del> (!mustBeStatic || Modifier.isStatic(method.getModifiers()))) {
<del> return method;
<del> }
<del> }
<del> return null;
<add> return findMethodForProperty(getPropertyMethodSuffixes(propertyName),
<add> new String[] { "get", "is" }, clazz, mustBeStatic, 0);
<ide> }
<ide>
<ide> /**
<ide> * Find a setter method for the specified property.
<ide> */
<ide> protected Method findSetterForProperty(String propertyName, Class<?> clazz, boolean mustBeStatic) {
<add> return findMethodForProperty(getPropertyMethodSuffixes(propertyName),
<add> new String[] { "set" }, clazz, mustBeStatic, 1);
<add> }
<add>
<add> private Method findMethodForProperty(String[] methodSuffixes, String[] prefixes, Class<?> clazz,
<add> boolean mustBeStatic, int numberOfParams) {
<ide> Method[] methods = getSortedClassMethods(clazz);
<del> String setterName = "set" + getPropertyMethodSuffix(propertyName);
<del> for (Method method : methods) {
<del> if (method.getName().equals(setterName) && method.getParameterTypes().length == 1 &&
<del> (!mustBeStatic || Modifier.isStatic(method.getModifiers()))) {
<del> return method;
<add> for (String methodSuffix : methodSuffixes) {
<add> for (String prefix : prefixes) {
<add> for (Method method : methods) {
<add> if (method.getName().equals(prefix + methodSuffix)
<add> && method.getParameterTypes().length == numberOfParams
<add> && (!mustBeStatic || Modifier.isStatic(method.getModifiers()))) {
<add> return method;
<add> }
<add> }
<ide> }
<ide> }
<ide> return null;
<add>
<ide> }
<ide>
<ide> /**
<ide> public int compare(Method o1, Method o2) {
<ide> return methods;
<ide> }
<ide>
<add> /**
<add> * Return the method suffixes for a given property name. The default implementation
<add> * uses JavaBean conventions with additional support for properties of the form 'xY'
<add> * where the method 'getXY()' is used in preference to the JavaBean convention of
<add> * 'getxY()'.
<add> */
<add> protected String[] getPropertyMethodSuffixes(String propertyName) {
<add> String suffix = getPropertyMethodSuffix(propertyName);
<add> if (suffix.length() > 0 && Character.isUpperCase(suffix.charAt(0))) {
<add> return new String[] { suffix };
<add> }
<add> return new String[] { suffix, StringUtils.capitalize(suffix) };
<add> }
<add>
<add> /**
<add> * Return the method suffix for a given property name. The default implementation
<add> * uses JavaBean conventions.
<add> */
<ide> protected String getPropertyMethodSuffix(String propertyName) {
<ide> if (propertyName.length() > 1 && Character.isUpperCase(propertyName.charAt(1))) {
<ide> return propertyName;
<ide> }
<del> else {
<del> return StringUtils.capitalize(propertyName);
<del> }
<add> return StringUtils.capitalize(propertyName);
<ide> }
<ide>
<ide> /**
<ide><path>spring-expression/src/test/java/org/springframework/expression/spel/support/ReflectionHelperTests.java
<ide> public void testReflectivePropertyResolver() throws Exception {
<ide> assertEquals("id",rpr.read(ctx,t,"Id").getValue());
<ide> assertTrue(rpr.canRead(ctx,t,"Id"));
<ide>
<add> // repro SPR-10994
<add> assertEquals("xyZ",rpr.read(ctx,t,"xyZ").getValue());
<add> assertTrue(rpr.canRead(ctx,t,"xyZ"));
<add> assertEquals("xY",rpr.read(ctx,t,"xY").getValue());
<add> assertTrue(rpr.canRead(ctx,t,"xY"));
<add>
<ide> // SPR-10122, ReflectivePropertyAccessor JavaBean property names compliance tests - setters
<ide> rpr.write(ctx, t, "pEBS","Test String");
<ide> assertEquals("Test String",rpr.read(ctx,t,"pEBS").getValue());
<ide> static class Tester {
<ide> String id = "id";
<ide> String ID = "ID";
<ide> String pEBS = "pEBS";
<add> String xY = "xY";
<add> String xyZ = "xyZ";
<ide>
<ide> public String getProperty() { return property; }
<ide> public void setProperty(String value) { property = value; }
<ide> static class Tester {
<ide>
<ide> public String getID() { return ID; }
<ide>
<add> public String getXY() { return xY; }
<add>
<add> public String getXyZ() { return xyZ; }
<add>
<ide> public String getpEBS() {
<ide> return pEBS;
<ide> } | 2 |
Python | Python | optimize long list of if-statements | b91dd5eaaae638d21b42f05bc7020ade26759e65 | <ide><path>celery/events/state.py
<ide> from celery.utils.functional import LRUCache, memoize
<ide> from celery.utils.log import get_logger
<ide>
<add>__all__ = ['Worker', 'Task', 'State', 'heartbeat_expires']
<add>
<ide> PYPY = hasattr(sys, 'pypy_version_info')
<ide>
<ide> # The window (in percentage) is added to the workers heartbeat
<ide> R_WORKER = '<Worker: {0.hostname} ({0.status_string} clock:{0.clock})'
<ide> R_TASK = '<Task: {0.name}({0.uuid}) {0.state} clock:{0.clock}>'
<ide>
<del>__all__ = ['Worker', 'Task', 'State', 'heartbeat_expires']
<add>#: Mapping of task event names to task state.
<add>TASK_EVENT_TO_STATE = {
<add> 'sent': states.PENDING,
<add> 'received': states.RECEIVED,
<add> 'started': states.STARTED,
<add> 'failed': states.FAILURE,
<add> 'retried': states.RETRY,
<add> 'succeeded': states.SUCCESS,
<add> 'revoked': states.REVOKED,
<add> 'rejected': states.REJECTED,
<add>}
<ide>
<ide>
<ide> @memoize(maxsize=1000, keyfun=lambda a, _: a[0])
<ide> def __init__(self, uuid=None, cluster_state=None, **kwargs):
<ide> self.__dict__.update(kwargs)
<ide>
<ide> def event(self, type_, timestamp=None, local_received=None, fields=None,
<del> precedence=states.precedence, items=items, dict=dict,
<del> PENDING=states.PENDING, RECEIVED=states.RECEIVED,
<del> STARTED=states.STARTED, FAILURE=states.FAILURE,
<del> RETRY=states.RETRY, SUCCESS=states.SUCCESS,
<del> REVOKED=states.REVOKED, REJECTED=states.REJECTED):
<add> precedence=states.precedence, items=items,
<add> setattr=setattr, task_event_to_state=TASK_EVENT_TO_STATE.get,
<add> RETRY=states.RETRY):
<ide> fields = fields or {}
<del> if type_ == 'sent':
<del> state, self.sent = PENDING, timestamp
<del> elif type_ == 'received':
<del> state, self.received = RECEIVED, timestamp
<del> elif type_ == 'started':
<del> state, self.started = STARTED, timestamp
<del> elif type_ == 'failed':
<del> state, self.failed = FAILURE, timestamp
<del> elif type_ == 'retried':
<del> state, self.retried = RETRY, timestamp
<del> elif type_ == 'succeeded':
<del> state, self.succeeded = SUCCESS, timestamp
<del> elif type_ == 'revoked':
<del> state, self.revoked = REVOKED, timestamp
<del> elif type_ == 'rejected':
<del> state, self.rejected = REJECTED, timestamp
<add>
<add> # using .get is faster than catching KeyError in this case.
<add> state = task_event_to_state(type_)
<add> if state is not None:
<add> # sets e.g. self.succeeded to the timestamp.
<add> setattr(self, type_, timestamp)
<ide> else:
<del> state = type_.upper()
<add> state = type_.upper() # custom state
<ide>
<ide> # note that precedence here is reversed
<ide> # see implementation in celery.states.state.__lt__
<ide> def event(self, type_, timestamp=None, local_received=None, fields=None,
<ide> fields = {
<ide> k: v for k, v in items(fields) if k in keep
<ide> }
<del> self.__dict__.update(fields)
<ide> else:
<del> self.state = state
<del> self.timestamp = timestamp
<del> self.__dict__.update(fields)
<add> fields.update(state=state, timestamp=timestamp)
<add>
<add> # update current state with info from this event.
<add> self.__dict__.update(fields)
<ide>
<ide> def info(self, fields=None, extra=[]):
<ide> """Information about this task suitable for on-screen display.""" | 1 |
Javascript | Javascript | add these changes to the non scheduled code path | b7138a041079fc85fa13b4c1218f0a7c197957dc | <ide><path>src/text-editor-component.js
<ide> class TextEditorComponent {
<ide> })
<ide> })
<ide> } else {
<del> this.measureContentDuringUpdateSync()
<del> this.updateSyncAfterMeasuringContent()
<add> const restartFrame = this.measureContentDuringUpdateSync()
<add> if (restartFrame) {
<add> this.updateSync(false)
<add> } else {
<add> this.updateSyncAfterMeasuringContent()
<add> }
<ide> }
<ide>
<ide> this.updateScheduled = false | 1 |
PHP | PHP | accept empty strings as empty sessions | 011562184cf0c0a412ff40c3bb093725a02f5812 | <ide><path>tests/TestCase/Network/Session/DatabaseSessionTest.php
<ide> public function testRead()
<ide> $this->assertEquals($expected, $result);
<ide>
<ide> $result = $this->storage->read('made up value');
<del> $this->assertFalse($result);
<add> $this->assertEmpty($result);
<ide> }
<ide>
<ide> /**
<ide> public function testDestroy()
<ide> $this->storage->write('foo', 'Some value');
<ide>
<ide> $this->assertTrue($this->storage->destroy('foo'), 'Destroy failed');
<del> $this->assertFalse($this->storage->read('foo'), 'Value still present.');
<add> $this->assertEmpty($this->storage->read('foo'), 'Value still present.');
<ide> }
<ide>
<ide> /**
<ide> public function testGc()
<ide>
<ide> sleep(1);
<ide> $storage->gc(0);
<del> $this->assertFalse($storage->read('foo'));
<add> $this->assertEmpty($storage->read('foo'));
<ide> }
<ide>
<ide> /** | 1 |
Text | Text | add invitation to add new content | 4871dbdc73632be4fb00befbc816bc670bb609cc | <ide><path>docs/topics/third-party-resources.md
<ide> Django REST Framework has a growing community of developers, packages, and resou
<ide>
<ide> Check out a grid detailing all the packages and ecosystem around Django REST Framework at [Django Packages](https://www.djangopackages.com/grids/g/django-rest-framework/).
<ide>
<add>To submit new content, [open an issue](https://github.com/tomchristie/django-rest-framework/issues/new) or [create a pull request](https://github.com/tomchristie/django-rest-framework/). Pull requests will be given higher priority since they are easier to include.
<add>
<ide> ## Libraries and Extensions
<ide>
<ide> ### Authentication | 1 |
Python | Python | add test for issue #589 | 6977a2b8cdef4325eca8de1c44aed923bf8f2908 | <ide><path>spacy/tests/regression/test_issue589.py
<add>import pytest
<add>
<add>from ...vocab import Vocab
<add>from ...tokens import Doc
<add>
<add>
<add>def test_issue589():
<add> vocab = Vocab()
<add> vocab.strings.set_frozen(True)
<add> doc = Doc(vocab, words=[u'whata']) | 1 |
Python | Python | fix unit tests. prevent nomask from being copied | 11c95e35e85166b808ea24d321f86a7bc4a0dcab | <ide><path>numpy/ma/core.py
<ide> from numpy import array as narray
<ide> import warnings
<ide>
<add>class NoMask(ndarray):
<add> def __new__(subtype):
<add> narray(False)
<add> return narray(False).view(subtype)
<add>
<add> def no_op(self,*args,**kwargs):
<add> return self
<add>
<add> def __array_finalize__(self,obj):
<add> obj.flags['WRITEABLE'] = False
<add>
<add> def copy(self):
<add> return self
<ide>
<ide> MaskType = bool_
<del>nomask = narray(False)
<add>nomask = NoMask()
<ide>
<ide> divide_tolerance = 1.e-35
<ide> numpy.seterr(all='ignore')
<ide>
<ide>
<del>
<ide> #####--------------------------------------------------------------------------
<ide> #---- --- Exceptions ---
<ide> #####--------------------------------------------------------------------------
<ide><path>numpy/ma/tests/test_core.py
<ide>
<ide> from test_old_ma import *
<ide>
<add>class TestNoMask(NumpyTestCase):
<add> def test_no_inplace(self):
<add> x = nomask
<add> def iadd(x):
<add> x += 1
<add> self.failUnlessRaises(ValueError,iadd,x)
<add>
<add> def test_no_copy(self):
<add> x = nomask
<add> y = x.copy()
<add> assert x is y
<add>
<ide> #..............................................................................
<ide> class TestMA(NumpyTestCase):
<ide> "Base test class for MaskedArrays."
<ide> def check_squeeze(self):
<ide> assert(data.squeeze() is masked)
<ide>
<ide> def check_putmask(self):
<del> x = numpy.arange(6)+1
<add> x = arange(6)+1
<ide> mx = array(x, mask=[0,0,0,1,1,1])
<ide> mask = [0,0,1,0,0,1]
<del>
<ide> # w/o mask, w/o masked values
<ide> xx = x.copy()
<ide> putmask(xx, mask, 99)
<ide><path>numpy/ma/tests/test_old_ma.py
<ide> def check_testMaPut(self):
<ide> (x, y, a10, m1, m2, xm, ym, z, zm, xf, s) = self.d
<ide> m = [1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1]
<ide> i = numpy.nonzero(m)[0]
<del> putmask(xm, m, z)
<del> assert all(take(xm, i, axis=0) == z)
<ide> put(ym, i, zm)
<del> assert take(ym, i, axis=0) == zm
<add> assert all(take(ym, i, axis=0) == zm)
<ide>
<ide> def check_testOddFeatures(self):
<ide> "Test of other odd features"
<ide> def check_testToPython(self):
<ide> self.assertEqual(1, int(array([[[1]]])))
<ide> self.assertEqual(1.0, float(array([[1]])))
<ide> self.failUnlessRaises(ValueError, float, array([1,1]))
<del> self.failUnlessRaises(MAError, float, array([1],mask=[1]))
<del> self.failUnless(bool(array([0,1])))
<del> self.failUnless(bool(array([0,0],mask=[0,1])))
<del> self.failIf(bool(array([0,0])))
<del> self.failIf(bool(array([0,0],mask=[0,0])))
<add> self.failUnlessRaises(ValueError, bool, array([0,1]))
<add> self.failUnlessRaises(ValueError, bool, array([0,0],mask=[0,1]))
<ide>
<ide> def check_testScalarArithmetic(self):
<ide> xm = array(0, mask=1) | 3 |
Python | Python | fix ticket #322 | be917888687f967df612629d3b52b8c488ad3755 | <ide><path>numpy/core/records.py
<ide> def fromarrays(arrayList, dtype=None, shape=None, formats=None,
<ide> d0 = descr[0].shape
<ide> nn = len(d0)
<ide> if nn > 0:
<del> shape = shape[nn:]
<add> shape = shape[:-nn]
<ide>
<ide> for k, obj in enumerate(arrayList):
<ide> nn = len(descr[k].shape)
<del> if obj.shape[nn:] != shape:
<add> testshape = obj.shape[:len(obj.shape)-nn]
<add> if testshape != shape:
<ide> raise ValueError, "array-shape mismatch in array %d" % k
<ide>
<ide> _array = recarray(shape, descr) | 1 |
Java | Java | add debug information in mounting manager | b3a07685f25c6d40a7177a1e40f899d407db4a4b | <ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/mounting/MountingManager.java
<ide> package com.facebook.react.fabric.mounting;
<ide>
<ide> import android.content.Context;
<del>import androidx.annotation.AnyThread;
<del>import androidx.annotation.Nullable;
<del>import androidx.annotation.UiThread;
<ide> import android.view.View;
<ide> import android.view.ViewGroup;
<ide> import android.view.ViewParent;
<add>import androidx.annotation.AnyThread;
<add>import androidx.annotation.Nullable;
<add>import androidx.annotation.UiThread;
<ide> import com.facebook.infer.annotation.Assertions;
<del>import com.facebook.react.bridge.ReactContext;
<ide> import com.facebook.react.bridge.ReadableArray;
<ide> import com.facebook.react.bridge.ReadableMap;
<ide> import com.facebook.react.bridge.ReadableNativeMap;
<ide> import com.facebook.react.uimanager.ViewManager;
<ide> import com.facebook.react.uimanager.ViewManagerRegistry;
<ide> import com.facebook.yoga.YogaMeasureMode;
<del>import android.util.Log;
<ide> import java.util.concurrent.ConcurrentHashMap;
<ide>
<ide> /**
<ide> private void dropView(View view) {
<ide> mTagToViewState.remove(reactTag);
<ide> Context context = view.getContext();
<ide> if (context instanceof ThemedReactContext) {
<del> // We only recycle views that were created by RN (its context is instance of ThemedReactContext)
<add> // We only recycle views that were created by RN (its context is instance of
<add> // ThemedReactContext)
<ide> mViewFactory.recycle(
<ide> (ThemedReactContext) context, Assertions.assertNotNull(viewManager).getName(), view);
<ide> }
<ide> public void addViewAt(int parentTag, int tag, int index) {
<ide> ViewState viewState = getViewState(tag);
<ide> final View view = viewState.mView;
<ide> if (view == null) {
<del> throw new IllegalStateException("Unable to find view for view " + viewState);
<add> throw new IllegalStateException("Unable to find view for viewState " + viewState);
<ide> }
<ide> getViewGroupManager(parentViewState).addView(parentView, view, index);
<ide> }
<ide> public void updateState(final int reactTag, StateWrapper stateWrapper) {
<ide> if (viewManager == null) {
<ide> throw new IllegalStateException("Unable to find ViewManager for tag: " + reactTag);
<ide> }
<del> viewManager.updateState(
<del> viewState.mView,
<del> stateWrapper);
<add> viewManager.updateState(viewState.mView, stateWrapper);
<ide> }
<ide>
<ide> @UiThread
<ide> public void preallocateView(
<del> ThemedReactContext reactContext,
<del> String componentName,
<del> int reactTag,
<del> ReadableMap props,
<del> boolean isLayoutable) {
<add> ThemedReactContext reactContext,
<add> String componentName,
<add> int reactTag,
<add> ReadableMap props,
<add> boolean isLayoutable) {
<ide>
<ide> if (mTagToViewState.get(reactTag) != null) {
<del> throw new IllegalStateException("View for component " + componentName + " with tag " + reactTag + " already exists.");
<add> throw new IllegalStateException(
<add> "View for component " + componentName + " with tag " + reactTag + " already exists.");
<ide> }
<ide>
<ide> createViewWithProps(reactContext, componentName, reactTag, props, isLayoutable);
<ide> private ViewState(int reactTag, @Nullable View view, ViewManager viewManager, bo
<ide>
<ide> @Override
<ide> public String toString() {
<del> return "ViewState [" + mReactTag + "] - isRoot: " + mIsRoot + " - props: " + mCurrentProps + " - localData: " + mCurrentLocalData + " - viewManager: " + mViewManager;
<add> boolean isLayoutOnly = mViewManager == null;
<add> return "ViewState ["
<add> + mReactTag
<add> + "] - isRoot: "
<add> + mIsRoot
<add> + " - props: "
<add> + mCurrentProps
<add> + " - localData: "
<add> + mCurrentLocalData
<add> + " - viewManager: "
<add> + mViewManager
<add> + " - isLayoutOnly: "
<add> + isLayoutOnly;
<ide> }
<ide> }
<ide> } | 1 |
PHP | PHP | fix styleci errors | b39a01d2962edf2d114cf81c38f4b441479d4a4e | <ide><path>src/Illuminate/Database/Eloquent/Relations/MorphTo.php
<ide> class MorphTo extends BelongsTo
<ide> protected $macroBuffer = [];
<ide>
<ide> /**
<del> * A map of relations to load for each individual morph type
<add> * A map of relations to load for each individual morph type.
<ide> *
<ide> * @var array
<ide> */
<ide> public function getDictionary()
<ide> }
<ide>
<ide> /**
<del> * Specify which relations to load for a given morph type
<add> * Specify which relations to load for a given morph type.
<ide> *
<ide> * @param string $modelClass
<ide> * @param array $with | 1 |
Python | Python | reduce `operator_name` dupe in serialised json | 0267a47e5abd104891e0ec6c741b5bed208eef1e | <ide><path>airflow/serialization/serialized_objects.py
<ide> def serialize_to_json(
<ide> if cls._is_excluded(value, key, object_to_serialize):
<ide> continue
<ide>
<del> if key in decorated_fields:
<add> if key == '_operator_name':
<add> # when operator_name matches task_type, we can remove
<add> # it to reduce the JSON payload
<add> task_type = getattr(object_to_serialize, '_task_type', None)
<add> if value != task_type:
<add> serialized_object[key] = cls._serialize(value)
<add> elif key in decorated_fields:
<ide> serialized_object[key] = cls._serialize(value)
<ide> elif key == "timetable" and value is not None:
<ide> serialized_object[key] = _encode_timetable(value)
<ide> def _serialize_node(cls, op: Union[BaseOperator, MappedOperator], include_deps:
<ide> serialize_op = cls.serialize_to_json(op, cls._decorated_fields)
<ide> serialize_op['_task_type'] = getattr(op, "_task_type", type(op).__name__)
<ide> serialize_op['_task_module'] = getattr(op, "_task_module", type(op).__module__)
<del> serialize_op['_operator_name'] = op.operator_name
<add> if op.operator_name != serialize_op['_task_type']:
<add> serialize_op['_operator_name'] = op.operator_name
<ide>
<ide> # Used to determine if an Operator is inherited from EmptyOperator
<ide> serialize_op['_is_empty'] = op.inherits_from_empty_operator
<ide> def populate_operator(cls, op: Operator, encoded_op: Dict[str, Any]) -> None:
<ide> # Extra Operator Links defined in Plugins
<ide> op_extra_links_from_plugin = {}
<ide>
<add> if "_operator_name" not in encoded_op:
<add> encoded_op["_operator_name"] = encoded_op["_task_type"]
<add>
<ide> # We don't want to load Extra Operator links in Scheduler
<ide> if cls._load_operator_extra_links:
<ide> from airflow import plugins_manager
<ide> def deserialize_operator(cls, encoded_op: Dict[str, Any]) -> Operator:
<ide> if encoded_op.get("_is_mapped", False):
<ide> # Most of these will be loaded later, these are just some stand-ins.
<ide> op_data = {k: v for k, v in encoded_op.items() if k in BaseOperator.get_serialized_fields()}
<add> try:
<add> operator_name = encoded_op["_operator_name"]
<add> except KeyError:
<add> operator_name = encoded_op["_task_type"]
<ide> op = MappedOperator(
<ide> operator_class=op_data,
<ide> expand_input=EXPAND_INPUT_EMPTY,
<ide> def deserialize_operator(cls, encoded_op: Dict[str, Any]) -> Operator:
<ide> is_empty=False,
<ide> task_module=encoded_op["_task_module"],
<ide> task_type=encoded_op["_task_type"],
<del> operator_name=encoded_op["_operator_name"],
<add> operator_name=operator_name,
<ide> dag=None,
<ide> task_group=None,
<ide> start_date=None,
<ide><path>tests/serialization/test_dag_serialization.py
<ide> def detect_task_dependencies(task: Operator) -> Optional[DagDependency]: # type
<ide> "template_fields_renderers": {'bash_command': 'bash', 'env': 'json'},
<ide> "bash_command": "echo {{ task.task_id }}",
<ide> "_task_type": "BashOperator",
<del> "_operator_name": "BashOperator",
<ide> "_task_module": "airflow.operators.bash",
<ide> "pool": "default_pool",
<ide> "executor_config": {
<ide> def test_operator_expand_serde():
<ide> '_is_mapped': True,
<ide> '_task_module': 'airflow.operators.bash',
<ide> '_task_type': 'BashOperator',
<del> '_operator_name': 'BashOperator',
<ide> 'downstream_task_ids': [],
<ide> 'expand_input': {
<ide> "type": "dict-of-lists",
<ide> def test_operator_expand_serde():
<ide>
<ide> assert op.operator_class == {
<ide> '_task_type': 'BashOperator',
<del> '_operator_name': 'BashOperator',
<ide> 'downstream_task_ids': [],
<ide> 'task_id': 'a',
<ide> 'template_ext': ['.sh', '.bash'],
<ide> def test_operator_expand_xcomarg_serde():
<ide> '_is_mapped': True,
<ide> '_task_module': 'tests.test_utils.mock_operators',
<ide> '_task_type': 'MockOperator',
<del> '_operator_name': 'MockOperator',
<ide> 'downstream_task_ids': [],
<ide> 'expand_input': {
<ide> "type": "dict-of-lists",
<ide> def test_operator_expand_kwargs_serde(strict):
<ide> '_is_mapped': True,
<ide> '_task_module': 'tests.test_utils.mock_operators',
<ide> '_task_type': 'MockOperator',
<del> '_operator_name': 'MockOperator',
<ide> 'downstream_task_ids': [],
<ide> 'expand_input': {
<ide> "type": "list-of-dicts",
<ide> class MyDummyOperator(DummyOperator):
<ide> '_is_empty': is_inherit,
<ide> '_task_module': 'tests.serialization.test_dag_serialization',
<ide> '_task_type': 'MyDummyOperator',
<del> '_operator_name': 'MyDummyOperator',
<ide> 'downstream_task_ids': [],
<ide> "pool": "default_pool",
<ide> 'task_id': 'my_task', | 2 |
Text | Text | change 'ancho' to width in css only | e8ff81210cbbd869f7d3214cbf44bce81d017ac9 | <ide><path>curriculum/challenges/spanish/01-responsive-web-design/basic-css/size-your-images.spanish.md
<ide> localeTitle: Tamaño de sus imágenes
<ide> ---
<ide>
<ide> ## Descripción
<del><section id="description"> CSS tiene una propiedad llamada <code>width</code> que controla el ancho de un elemento. Al igual que con las fuentes, usaremos <code>px</code> (píxeles) para especificar el ancho de la imagen. Por ejemplo, si quisiéramos crear una clase CSS llamada <code>larger-image</code> que diera a los elementos HTML un ancho de 500 píxeles, usaríamos: <blockquote> <estilo> <br> .larger-image { <br> ancho: 500px; <br> } <br> </style> </blockquote></section>
<add><section id="description"> CSS tiene una propiedad llamada <code>width</code> que controla el ancho de un elemento. Al igual que con las fuentes, usaremos <code>px</code> (píxeles) para especificar el ancho de la imagen. Por ejemplo, si quisiéramos crear una clase CSS llamada <code>larger-image</code> que diera a los elementos HTML un ancho de 500 píxeles, usaríamos: <blockquote> <style> <br> .larger-image { <br> width: 500px; <br> } <br> </style> </blockquote></section>
<ide>
<ide> ## Instrucciones
<ide> <section id="instructions"> Cree una clase llamada <code>smaller-image</code> y utilícela para cambiar el tamaño de la imagen de modo que tenga solo 100 píxeles de ancho. <strong>Nota</strong> <br> Debido a las diferencias de implementación del navegador, es posible que tenga que estar al 100% del zoom para pasar las pruebas en este desafío. </section> | 1 |
Text | Text | add the text "### nested html lists" to | f365204cc86c724b66d3dce68f9239c9595955fe | <ide><path>guide/english/html/lists/index.md
<ide> which would end up looking like:
<ide> </dl>
<ide>
<ide>
<del>## More Information:
<add>## Nested HTML Lists
<add>
<add>List can be nested (lists inside lists):
<add>
<add>##### Code:
<add>```html
<add><ul>
<add> <li>Coffee</li>
<add> <li>Tea
<add> <ul>
<add> <li>Black tea</li>
<add> <li>Green tea</li>
<add> </ul>
<add> </li>
<add> <li>Milk</li>
<add></ul>
<add>```
<add>
<add>##### Output:
<add>
<add><ul>
<add> <li>Coffee</li>
<add> <li>Tea
<add> <ul>
<add> <li>Black tea</li>
<add> <li>Green tea</li>
<add> </ul>
<add> </li>
<add> <li>Milk</li>
<add></ul>
<add>
<add>#### More Information:
<ide>
<del><!-- Please add any articles you think might be helpful to read before writing the article -->
<ide> * [HTML lists on w3schools](https://www.w3schools.com/html/html_lists.asp)
<ide> * [HTML lists on WebPlatform](https://webplatform.github.io/docs/guides/html_lists/)
<add> | 1 |
Python | Python | add test for multilevel_native_crop_and_resize | 8ce327f82d0b8e075c25513388bffb12135aacb4 | <ide><path>research/object_detection/utils/spatial_transform_ops_test.py
<ide> def graph_fn(image, boxes):
<ide>
<ide> class NativeCropAndResizeTest(test_case.TestCase):
<ide>
<add> # def testBatchCropAndResize3x3To2x2_2Channels(self):
<add>
<add> # def graph_fn(image, boxes):
<add> # return spatial_ops.native_crop_and_resize(image, boxes, crop_size=[2, 2])
<add>
<add> # image = np.array([[[[1, 0], [2, 1], [3, 2]],
<add> # [[4, 3], [5, 4], [6, 5]],
<add> # [[7, 6], [8, 7], [9, 8]]],
<add> # [[[1, 0], [2, 1], [3, 2]],
<add> # [[4, 3], [5, 4], [6, 5]],
<add> # [[7, 6], [8, 7], [9, 8]]]], dtype=np.float32)
<add> # boxes = np.array([[[0, 0, 1, 1],
<add> # [0, 0, .5, .5]],
<add> # [[1, 1, 0, 0],
<add> # [.5, .5, 0, 0]]], dtype=np.float32)
<add> # expected_output = [[[[[1, 0], [3, 2]], [[7, 6], [9, 8]]],
<add> # [[[1, 0], [2, 1]], [[4, 3], [5, 4]]]],
<add> # [[[[9, 8], [7, 6]], [[3, 2], [1, 0]]],
<add> # [[[5, 4], [4, 3]], [[2, 1], [1, 0]]]]]
<add> # crop_output = self.execute_cpu(graph_fn, [image, boxes])
<add> # self.assertAllClose(crop_output, expected_output)
<add>
<ide> def testBatchCropAndResize3x3To2x2_2Channels(self):
<ide>
<ide> def graph_fn(image, boxes):
<ide> def graph_fn(image, boxes):
<ide> [[[5, 4], [4, 3]], [[2, 1], [1, 0]]]]]
<ide> crop_output = self.execute_cpu(graph_fn, [image, boxes])
<ide> self.assertAllClose(crop_output, expected_output)
<add>
<add> def testMultilevelBatchCropAndResize3x3To2x2_2Channels(self):
<add>
<add> def graph_fn(image1, image2, boxes, box_levels):
<add> return spatial_ops.multilevel_native_crop_and_resize([image1, image2],
<add> boxes,
<add> box_levels,
<add> crop_size=[2, 2])
<add> image = [np.array([[[[1, 0], [2, 1], [3, 2]],
<add> [[4, 3], [5, 4], [6, 5]],
<add> [[7, 6], [8, 7], [9, 8]]],
<add> [[[1, 0], [2, 1], [3, 2]],
<add> [[4, 3], [5, 4], [6, 5]],
<add> [[7, 6], [8, 7], [9, 8]]]], dtype=np.float32),
<add> np.array([[[[1, 0], [2, 1]],
<add> [[4, 3], [5, 4]]],
<add> [[[1, 0], [2, 1]],
<add> [[4, 3], [5, 4]]]], dtype=np.float32)]
<add> boxes = np.array([[[0, 0, 1, 1],
<add> [0, 0, .5, .5]],
<add> [[1, 1, 0, 0],
<add> [.5, .5, 0, 0]]], dtype=np.float32)
<add> box_levels = np.array([[0, 1], [0, 0]], dtype=np.float32)
<add> expected_output = [[[[[1, 0], [3, 2]], [[7, 6], [9, 8]]],
<add> [[[1, 0], [1.5, 0.5]], [[2.5, 1.5], [3, 2]]]],
<add> [[[[9, 8], [7, 6]], [[3, 2], [1, 0]]],
<add> [[[5, 4], [4, 3]], [[2, 1], [1, 0]]]]]
<add> crop_output = self.execute_cpu(graph_fn, [*image, boxes, box_levels])
<add> self.assertAllClose(crop_output, expected_output)
<ide>
<ide>
<ide> if __name__ == '__main__': | 1 |
Javascript | Javascript | fix ng-prop-* with undefined values | 8b973e04cad06b839f8d6641131e2b206afb284c | <ide><path>src/ng/compile.js
<ide> function $CompileProvider($provide, $$sanitizeUriProvider) {
<ide> pre: function ngPropPreLinkFn(scope, $element) {
<ide> function applyPropValue() {
<ide> var propValue = ngPropGetter(scope);
<del> $element.prop(propName, sanitizer(propValue));
<add> $element[0][propName] = sanitizer(propValue);
<ide> }
<ide>
<ide> applyPropValue();
<ide><path>test/ng/ngPropSpec.js
<ide> describe('ngProp*', function() {
<ide> expect(element.prop('asdf')).toBe(true);
<ide> }));
<ide>
<add> // https://github.com/angular/angular.js/issues/16797
<add> it('should support falsy property values', inject(function($rootScope, $compile) {
<add> var element = $compile('<span ng-prop-text="myText" />')($rootScope);
<add> // Initialize to truthy value
<add> $rootScope.myText = 'abc';
<add> $rootScope.$digest();
<add> expect(element.prop('text')).toBe('abc');
<add>
<add> // Assert various falsey values get assigned to the property
<add> $rootScope.myText = '';
<add> $rootScope.$digest();
<add> expect(element.prop('text')).toBe('');
<add> $rootScope.myText = 0;
<add> $rootScope.$digest();
<add> expect(element.prop('text')).toBe(0);
<add> $rootScope.myText = false;
<add> $rootScope.$digest();
<add> expect(element.prop('text')).toBe(false);
<add> $rootScope.myText = undefined;
<add> $rootScope.$digest();
<add> expect(element.prop('text')).toBeUndefined();
<add> $rootScope.myText = null;
<add> $rootScope.$digest();
<add> expect(element.prop('text')).toBe(null);
<add> }));
<add>
<add> it('should directly map special properties (class)', inject(function($rootScope, $compile) {
<add> var element = $compile('<span ng-prop-class="myText" />')($rootScope);
<add> $rootScope.myText = 'abc';
<add> $rootScope.$digest();
<add> expect(element[0].class).toBe('abc');
<add> expect(element).not.toHaveClass('abc');
<add> }));
<add>
<add> it('should not use jQuery .prop() to avoid jQuery propFix/hooks', inject(function($rootScope, $compile) {
<add> var element = $compile('<span ng-prop-class="myText" />')($rootScope);
<add> spyOn(jqLite.prototype, 'prop');
<add> $rootScope.myText = 'abc';
<add> $rootScope.$digest();
<add> expect(jqLite.prototype.prop).not.toHaveBeenCalled();
<add> }));
<add>
<ide> it('should support mixed case using underscore-separated names', inject(function($rootScope, $compile) {
<ide> var element = $compile('<span ng-prop-a_bcd_e="value" />')($rootScope);
<ide> $rootScope.value = 123; | 2 |
Javascript | Javascript | prevent code duplication when using help button | d67a617e472c597a6d7532426a77f51fdce62546 | <ide><path>client/src/templates/Challenges/redux/create-question-epic.js
<ide> function filesToMarkdown(challengeFiles = {}) {
<ide> return fileString;
<ide> }
<ide> const fileName = moreThanOneFile
<del> ? `\\ file: ${challengeFile.contents}`
<add> ? `/* file: ${challengeFile.name}.${challengeFile.ext} */\n`
<ide> : '';
<ide> const fileType = challengeFile.ext;
<del> return `${fileString}\`\`\`${fileType}\n${fileName}\n${challengeFile.contents}\n\`\`\`\n\n`;
<add> return `${fileString}\`\`\`${fileType}\n${fileName}${challengeFile.contents}\n\`\`\`\n\n`;
<ide> }, '\n');
<ide> }
<ide> | 1 |
PHP | PHP | add identifier quoting to table aliases | 0a90481969b8cce14c2092c68f94ae7f98cb8c6b | <ide><path>Cake/Database/Query.php
<ide> public function from($tables = [], $overwrite = false) {
<ide> protected function _buildFromPart($parts, $generator) {
<ide> $select = ' FROM %s';
<ide> $normalized = [];
<add> $driver = $this->connection()->driver();
<ide> $parts = $this->_stringifyExpressions($parts, $generator);
<ide> foreach ($parts as $k => $p) {
<ide> if (!is_numeric($k)) {
<del> $p = $p . ' ' . $k;
<add> $p = $p . ' AS ' . $driver->quoteIdentifier($k);
<ide> }
<ide> $normalized[] = $p;
<ide> }
<ide><path>Cake/Test/TestCase/Database/QueryTest.php
<ide> public function testSelectFieldsFromTable() {
<ide> $this->assertEquals(array('body' => 'Second Article Body', 'author_id' => 3, 'name' => 'nate'), $result->fetch('assoc'));
<ide> $this->assertEquals(array('body' => 'Third Article Body', 'author_id' => 1, 'name' => 'nate'), $result->fetch('assoc'));
<ide>
<del> //Overwrite tables and only fetch from authors
<add> // Overwrite tables and only fetch from authors
<ide> $result = $query->select('name', true)->from('authors', true)->order(['name' => 'desc'], true)->execute();
<ide> $this->assertEquals(array('nate'), $result->fetch());
<ide> $this->assertEquals(array('mariano'), $result->fetch());
<ide> public function testSelectAliasedFieldsFromTable() {
<ide> $this->assertEquals(array('text' => 'Second Article Body', 'two' => 2, 'three' => 5), $result->fetch('assoc'));
<ide> }
<ide>
<add>/**
<add> * Test that table aliases are quoted.
<add> *
<add> * @return void
<add> */
<add> public function testSelectAliasTablesAreQuoted() {
<add> $query = new Query($this->connection);
<add> $query = $query->select(['text' => 'a.body', 'a.author_id'])
<add> ->from(['a' => 'articles']);
<add>
<add> $sql = $query->sql();
<add> $this->assertRegExp('/articles AS [`"]a[`"]/', $sql);
<add> }
<add>
<ide> /**
<ide> * Tests that tables can also be aliased and referenced in the select clause using such alias
<ide> *
<ide> public function testSelectAliasedTables() {
<ide> $query = new Query($this->connection);
<ide> $result = $query->select(['text' => 'a.body', 'a.author_id'])
<ide> ->from(['a' => 'articles'])->execute();
<add>
<ide> $this->assertEquals(['text' => 'First Article Body', 'author_id' => 1], $result->fetch('assoc'));
<ide> $this->assertEquals(['text' => 'Second Article Body', 'author_id' => 3], $result->fetch('assoc'));
<ide> | 2 |
Javascript | Javascript | report native warnings to yellowbox | e697ed75d14289c6d8e2ada448f7f24adbd1d29a | <ide><path>Libraries/ReactNative/YellowBox.js
<ide> const EventEmitter = require('EventEmitter');
<ide> const Platform = require('Platform');
<ide> const React = require('React');
<ide> const StyleSheet = require('StyleSheet');
<add>const RCTLog = require('RCTLog');
<ide>
<ide> const infoLog = require('infoLog');
<ide> const openFileInEditor = require('openFileInEditor');
<ide> const parseErrorStack = require('parseErrorStack');
<add>const stringifySafe = require('stringifySafe');
<ide> const symbolicateStackTrace = require('symbolicateStackTrace');
<ide>
<ide> import type EmitterSubscription from 'EmitterSubscription';
<ide> if (__DEV__) {
<ide>
<ide> (console: any).warn = function() {
<ide> warn.apply(console, arguments);
<del>
<del> if (typeof arguments[0] === 'string' &&
<del> arguments[0].startsWith('(ADVICE)')) {
<del> return;
<del> }
<del>
<ide> updateWarningMap.apply(null, arguments);
<ide> };
<ide>
<ide> if (Platform.isTesting) {
<ide> (console: any).disableYellowBox = true;
<ide> }
<add>
<add> RCTLog.setWarningHandler((...args) => {
<add> updateWarningMap.apply(null, args);
<add> });
<ide> }
<ide>
<ide> /**
<ide> function updateWarningMap(format, ...args): void {
<ide> if (console.disableYellowBox) {
<ide> return;
<ide> }
<del> const stringifySafe = require('stringifySafe');
<ide>
<ide> format = String(format);
<ide> const argCount = (format.match(/%s/g) || []).length;
<ide> function updateWarningMap(format, ...args): void {
<ide> ...args.slice(argCount).map(stringifySafe),
<ide> ].join(' ');
<ide>
<add> if (warning.startsWith('(ADVICE)')) {
<add> return;
<add> }
<add>
<ide> const warningInfo = _warningMap.get(warning);
<ide> if (warningInfo) {
<ide> warningInfo.count += 1;
<ide><path>Libraries/Utilities/RCTLog.js
<ide> const levelsMap = {
<ide> fatal: 'error',
<ide> };
<ide>
<del>class RCTLog {
<add>let warningHandler: ?(Array<any> => void) = null;
<add>
<add>const RCTLog = {
<ide> // level one of log, info, warn, error, mustfix
<del> static logIfNoNativeHook(...args) {
<add> logIfNoNativeHook(level: string, ...args: Array<any>): void {
<add> // We already printed in the native console, so only log here if using a js debugger
<ide> if (typeof global.nativeLoggingHook === 'undefined') {
<del> // We already printed in xcode, so only log here if using a js debugger
<del> RCTLog.logToConsole(...args);
<add> RCTLog.logToConsole(level, ...args);
<add> } else {
<add> // Report native warnings to YellowBox
<add> if (warningHandler && level === 'warn') {
<add> warningHandler(...args);
<add> }
<ide> }
<del>
<del> return true;
<del> }
<add> },
<ide>
<ide> // Log to console regardless of nativeLoggingHook
<del> static logToConsole(level, ...args) {
<add> logToConsole(level: string, ...args: Array<any>): void {
<ide> const logFn = levelsMap[level];
<ide> invariant(
<ide> logFn,
<del> 'Level "' + level + '" not one of ' + Object.keys(levelsMap)
<add> 'Level "' + level + '" not one of ' + Object.keys(levelsMap).toString()
<ide> );
<ide>
<ide> console[logFn](...args);
<add> },
<ide>
<del> return true;
<add> setWarningHandler(handler: typeof warningHandler): void {
<add> warningHandler = handler;
<ide> }
<ide> }
<ide> | 2 |
Python | Python | fix xlnet tokenizer and python2 | c946bb51a61f67b0c9eaae1c9cf6f164a7748e37 | <ide><path>pytorch_pretrained_bert/tokenization_xlnet.py
<ide> def convert_tokens_to_ids(self, tokens, sample=False):
<ide> )
<ide> return ids
<ide>
<del> def convert_ids_to_tokens(self, ids, skip_special_tokens=False):
<add> def convert_ids_to_tokens(self, ids, return_unicode=True, skip_special_tokens=False):
<ide> """Converts a sequence of ids in tokens."""
<ide> tokens = []
<ide> for i in ids:
<ide> def convert_ids_to_tokens(self, ids, skip_special_tokens=False):
<ide> tokens.append(self.special_tokens_decoder[i])
<ide> else:
<ide> tokens.append(self.sp_model.IdToPiece(i))
<add>
<add> if six.PY2 and return_unicode:
<add> ret_pieces = []
<add> for piece in tokens:
<add> if isinstance(piece, str):
<add> piece = piece.decode('utf-8')
<add> ret_pieces.append(piece)
<add> tokens = ret_pieces
<ide> return tokens
<ide>
<ide> def encode(self, text, sample=False):
<ide><path>tests/tokenization_xlnet_test.py
<ide> class XLNetTokenizationTest(unittest.TestCase):
<ide> def test_full_tokenizer(self):
<ide> tokenizer = XLNetTokenizer(SAMPLE_VOCAB)
<ide>
<del> tokens = tokenizer.tokenize('This is a test')
<del> self.assertListEqual(tokens, ['▁This', '▁is', '▁a', '▁t', 'est'])
<add> tokens = tokenizer.tokenize(u'This is a test')
<add> self.assertListEqual(tokens, [u'▁This', u'▁is', u'▁a', u'▁t', u'est'])
<ide>
<ide> self.assertListEqual(
<ide> tokenizer.convert_tokens_to_ids(tokens), [285, 46, 10, 170, 382])
<ide>
<del> vocab_path = "/tmp/"
<add> vocab_path = u"/tmp/"
<ide> vocab_file, special_tokens_file = tokenizer.save_vocabulary(vocab_path)
<ide> tokenizer = tokenizer.from_pretrained(vocab_path,
<ide> keep_accents=True)
<ide> os.remove(vocab_file)
<ide> os.remove(special_tokens_file)
<ide>
<del> tokens = tokenizer.tokenize("I was born in 92000, and this is falsé.")
<del> self.assertListEqual(tokens, [SPIECE_UNDERLINE + 'I', SPIECE_UNDERLINE + 'was', SPIECE_UNDERLINE + 'b', 'or', 'n', SPIECE_UNDERLINE + 'in', SPIECE_UNDERLINE + '',
<del> '9', '2', '0', '0', '0', ',', SPIECE_UNDERLINE + 'and', SPIECE_UNDERLINE + 'this',
<del> SPIECE_UNDERLINE + 'is', SPIECE_UNDERLINE + 'f', 'al', 's', 'é', '.'])
<add> tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
<add> self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
<add> u'or', u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
<add> u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
<add> SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u's', u'é', u'.'])
<ide> ids = tokenizer.convert_tokens_to_ids(tokens)
<ide> self.assertListEqual(
<ide> ids, [8, 21, 84, 55, 24, 19, 7, 0,
<ide> 602, 347, 347, 347, 3, 12, 66,
<ide> 46, 72, 80, 6, 0, 4])
<ide>
<ide> back_tokens = tokenizer.convert_ids_to_tokens(ids)
<del> self.assertListEqual(back_tokens, [SPIECE_UNDERLINE + 'I', SPIECE_UNDERLINE + 'was', SPIECE_UNDERLINE + 'b', 'or', 'n', SPIECE_UNDERLINE + 'in',
<del> SPIECE_UNDERLINE + '', '<unk>', '2', '0', '0', '0', ',',
<del> SPIECE_UNDERLINE + 'and', SPIECE_UNDERLINE + 'this', SPIECE_UNDERLINE + 'is', SPIECE_UNDERLINE + 'f', 'al', 's',
<del> '<unk>', '.'])
<add> self.assertListEqual(back_tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
<add> u'or', u'n', SPIECE_UNDERLINE + u'in',
<add> SPIECE_UNDERLINE + u'', u'<unk>', u'2', u'0', u'0', u'0', u',',
<add> SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
<add> SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u's',
<add> u'<unk>', u'.'])
<ide>
<ide> @pytest.mark.slow
<ide> def test_tokenizer_from_pretrained(self):
<ide> def test_tokenizer_from_pretrained(self):
<ide> def test_tokenizer_lower(self):
<ide> tokenizer = XLNetTokenizer(SAMPLE_VOCAB, do_lower_case=True)
<ide> tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
<del> self.assertListEqual(tokens, [SPIECE_UNDERLINE + '', 'i', SPIECE_UNDERLINE + 'was', SPIECE_UNDERLINE + 'b', 'or', 'n', SPIECE_UNDERLINE + 'in', SPIECE_UNDERLINE + '',
<del> '9', '2', '0', '0', '0', ',', SPIECE_UNDERLINE + 'and', SPIECE_UNDERLINE + 'this',
<del> SPIECE_UNDERLINE + 'is', SPIECE_UNDERLINE + 'f', 'al', 'se', '.'])
<del> self.assertListEqual(tokenizer.tokenize(u"H\u00E9llo"), ["▁he", "ll", "o"])
<add> self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'', u'i', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b',
<add> u'or', u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
<add> u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
<add> SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u'se', u'.'])
<add> self.assertListEqual(tokenizer.tokenize(u"H\u00E9llo"), [u"▁he", u"ll", u"o"])
<ide>
<ide> def test_tokenizer_no_lower(self):
<ide> tokenizer = XLNetTokenizer(SAMPLE_VOCAB, do_lower_case=False)
<ide> tokens = tokenizer.tokenize(u"I was born in 92000, and this is falsé.")
<del> self.assertListEqual(tokens, [SPIECE_UNDERLINE + 'I', SPIECE_UNDERLINE + 'was', SPIECE_UNDERLINE + 'b', 'or', 'n', SPIECE_UNDERLINE + 'in', SPIECE_UNDERLINE + '',
<del> '9', '2', '0', '0', '0', ',', SPIECE_UNDERLINE + 'and', SPIECE_UNDERLINE + 'this',
<del> SPIECE_UNDERLINE + 'is', SPIECE_UNDERLINE + 'f', 'al', 'se', '.'])
<add> self.assertListEqual(tokens, [SPIECE_UNDERLINE + u'I', SPIECE_UNDERLINE + u'was', SPIECE_UNDERLINE + u'b', u'or',
<add> u'n', SPIECE_UNDERLINE + u'in', SPIECE_UNDERLINE + u'',
<add> u'9', u'2', u'0', u'0', u'0', u',', SPIECE_UNDERLINE + u'and', SPIECE_UNDERLINE + u'this',
<add> SPIECE_UNDERLINE + u'is', SPIECE_UNDERLINE + u'f', u'al', u'se', u'.'])
<ide>
<ide>
<ide> if __name__ == '__main__': | 2 |
Python | Python | trim lines in resnet keras | c49b8b71be09b6492705be0d0fa6300c1c3e8941 | <ide><path>official/resnet/keras/keras_common.py
<ide> def __enter__(self):
<ide>
<ide> def __exit__(self, *args):
<ide> pass
<del>
<del> | 1 |
PHP | PHP | add test for find('list') with no fields | b4b1c22a5a3b018deaf9fbfa177844b3304f14de | <ide><path>Cake/Test/TestCase/ORM/TableTest.php
<ide> public function testFindListNoHydration() {
<ide> 'connection' => $this->connection,
<ide> ]);
<ide> $table->displayField('username');
<add> $query = $table->find('list')
<add> ->hydrate(false)
<add> ->order('id');
<add> $expected = [
<add> 1 => 'mariano',
<add> 2 => 'nate',
<add> 3 => 'larry',
<add> 4 => 'garrett'
<add> ];
<add> $this->assertSame($expected, $query->toArray());
<add>
<ide> $query = $table->find('list', ['fields' => ['id', 'username']])
<ide> ->hydrate(false)
<ide> ->order('id'); | 1 |
Mixed | Ruby | accept a block in button_to helper | ab7a80ea22c94a006788eddfa3b92123b4031cb6 | <ide><path>actionpack/CHANGELOG.md
<ide> ## Rails 4.0.0 (unreleased) ##
<ide>
<add>* Make possible to use a block in button_to helper if button text is hard
<add> to fit into the name parameter, e.g.:
<add>
<add> <%= button_to [:make_happy, @user] do %>
<add> Make happy <strong><%= @user.name %></strong>
<add> <% end %>
<add> # => "<form method="post" action="/users/1/make_happy" class="button_to">
<add> # <div>
<add> # <button type="submit">
<add> # Make happy <strong>Name</strong>
<add> # </button>
<add> # </div>
<add> # </form>"
<add>
<add> *Sergey Nartimov*
<add>
<ide> * change a way of ordering helpers from several directories. Previously,
<ide> when loading helpers from multiple paths, all of the helpers files were
<ide> gathered into one array an then they were sorted. Helpers from different
<ide><path>actionpack/lib/action_view/helpers/url_helper.rb
<ide> def link_to(*args, &block)
<ide> # # <div><input value="New" type="submit" /></div>
<ide> # # </form>"
<ide> #
<add> # <%= button_to [:make_happy, @user] do %>
<add> # Make happy <strong><%= @user.name %></strong>
<add> # <% end %>
<add> # # => "<form method="post" action="/users/1/make_happy" class="button_to">
<add> # # <div>
<add> # # <button type="submit">
<add> # # Make happy <strong><%= @user.name %></strong>
<add> # # </button>
<add> # # </div>
<add> # # </form>"
<ide> #
<ide> # <%= button_to "New", :action => "new", :form_class => "new-thing" %>
<ide> # # => "<form method="post" action="/controller/new" class="new-thing">
<ide> def link_to(*args, &block)
<ide> # # </div>
<ide> # # </form>"
<ide> # #
<del> def button_to(name, options = {}, html_options = {})
<add> def button_to(*args, &block)
<add> if block_given?
<add> options = args[0] || {}
<add> html_options = args[1] || {}
<add> else
<add> name = args[0]
<add> options = args[1] || {}
<add> html_options = args[2] || {}
<add> end
<add>
<ide> html_options = html_options.stringify_keys
<ide> convert_boolean_attributes!(html_options, %w(disabled))
<ide>
<ide> def button_to(name, options = {}, html_options = {})
<ide> request_token_tag = form_method == 'post' ? token_tag : ''
<ide>
<ide> html_options = convert_options_to_data_attributes(options, html_options)
<del> html_options.merge!("type" => "submit", "value" => name || url)
<add> html_options['type'] = 'submit'
<add>
<add> button = if block_given?
<add> content_tag('button', html_options, &block)
<add> else
<add> tag('input', html_options.merge('value' => name || url))
<add> end
<ide>
<del> inner_tags = method_tag.safe_concat tag('input', html_options).safe_concat request_token_tag
<add> inner_tags = method_tag.safe_concat(button).safe_concat(request_token_tag)
<ide> content_tag('form', content_tag('div', inner_tags), form_options)
<ide> end
<ide>
<ide><path>actionpack/test/template/url_helper_test.rb
<ide> def test_button_to_with_method_get
<ide> )
<ide> end
<ide>
<add> def test_button_to_with_block
<add> assert_dom_equal(
<add> "<form method=\"post\" action=\"http://www.example.com\" class=\"button_to\"><div><button type=\"submit\"><span>Hello</span></button></div></form>",
<add> button_to("http://www.example.com") { content_tag(:span, 'Hello') }
<add> )
<add> end
<add>
<ide> def test_link_tag_with_straight_url
<ide> assert_dom_equal "<a href=\"http://www.example.com\">Hello</a>", link_to("Hello", "http://www.example.com")
<ide> end | 3 |
Java | Java | add contextclass resolution to jacksonjsondecoder | 9fb8a2eb2e05b99179614a7d67a1a745e432819c | <ide><path>spring-web-reactive/src/test/java/org/springframework/web/reactive/result/method/annotation/MessageReaderArgumentResolverTests.java
<ide> package org.springframework.web.reactive.result.method.annotation;
<ide>
<ide> import java.io.Serializable;
<add>import java.lang.reflect.Method;
<ide> import java.net.URI;
<ide> import java.nio.ByteBuffer;
<ide> import java.nio.charset.StandardCharsets;
<ide> import javax.xml.bind.annotation.XmlRootElement;
<ide>
<ide> import org.junit.Before;
<del>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import reactor.core.publisher.Flux;
<ide> import reactor.core.publisher.Mono;
<ide> import org.springframework.validation.Errors;
<ide> import org.springframework.validation.Validator;
<ide> import org.springframework.validation.annotation.Validated;
<add>import org.springframework.web.method.HandlerMethod;
<ide> import org.springframework.web.reactive.result.ResolvableMethod;
<ide> import org.springframework.web.server.ServerWebExchange;
<ide> import org.springframework.web.server.ServerWebInputException;
<ide> public void validateFluxTestBean() throws Exception {
<ide> }
<ide>
<ide> @Test // SPR-9964
<del> @Ignore
<ide> public void parameterizedMethodArgument() throws Exception {
<del> Class<?> clazz = ConcreteParameterizedController.class;
<del> MethodParameter param = ResolvableMethod.onClass(clazz).name("handleDto").resolveParam();
<del> SimpleBean simpleBean = resolveValue(param, "{\"name\" : \"Jad\"}");
<add> Method method = AbstractParameterizedController.class.getMethod("handleDto", Identifiable.class);
<add> HandlerMethod handlerMethod = new HandlerMethod(new ConcreteParameterizedController(), method);
<add> MethodParameter methodParam = handlerMethod.getMethodParameters()[0];
<add> SimpleBean simpleBean = resolveValue(methodParam, "{\"name\" : \"Jad\"}");
<ide>
<ide> assertEquals("Jad", simpleBean.getName());
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/http/codec/json/JacksonJsonDecoder.java
<ide> private Flux<Object> decodeInternal(JsonObjectDecoder objectDecoder, Publisher<D
<ide>
<ide> MethodParameter methodParameter = (elementType.getSource() instanceof MethodParameter ?
<ide> (MethodParameter)elementType.getSource() : null);
<del> // TODO Find a way to pass the real concrete controller contextClass
<del> JavaType javaType = getJavaType(elementType.getType(), null);
<add> Class<?> contextClass = (methodParameter != null ? methodParameter.getContainingClass() : null);
<add> JavaType javaType = getJavaType(elementType.getType(), contextClass);
<ide> ObjectReader reader;
<ide>
<ide> if (methodParameter != null && methodParameter.getParameter().getAnnotation(JsonView.class) != null) { | 2 |
Javascript | Javascript | replace duplicate conditions by function | 523d44a66e5a4f9bbe335b7872919aa39d6ee4c4 | <ide><path>lib/fs.js
<ide> function isFd(path) {
<ide>
<ide> fs.Stats = Stats;
<ide>
<add>function isFileType(fileType) {
<add> // Use stats array directly to avoid creating an fs.Stats instance just for
<add> // our internal use.
<add> return (statValues[1/*mode*/] & S_IFMT) === fileType;
<add>}
<add>
<ide> // Don't allow mode to accidentally be overwritten.
<ide> Object.defineProperties(fs, {
<ide> F_OK: { enumerable: true, value: constants.F_OK || 0 },
<ide> function readFileAfterStat(err) {
<ide> if (err)
<ide> return context.close(err);
<ide>
<del> // Use stats array directly to avoid creating an fs.Stats instance just for
<del> // our internal use.
<ide> var size;
<del> if ((statValues[1/*mode*/] & S_IFMT) === S_IFREG)
<add> if (isFileType(S_IFREG))
<ide> size = context.size = statValues[8];
<ide> else
<ide> size = context.size = 0;
<ide> fs.readFileSync = function(path, options) {
<ide> var fd = isUserFd ? path : fs.openSync(path, options.flag || 'r', 0o666);
<ide>
<ide> tryStatSync(fd, isUserFd);
<del> // Use stats array directly to avoid creating an fs.Stats instance just for
<del> // our internal use.
<ide> var size;
<del> if ((statValues[1/*mode*/] & S_IFMT) === S_IFREG)
<add> if (isFileType(S_IFREG))
<ide> size = statValues[8];
<ide> else
<ide> size = 0;
<ide> fs.realpathSync = function realpathSync(p, options) {
<ide>
<ide> // continue if not a symlink, break if a pipe/socket
<ide> if (knownHard[base] || (cache && cache.get(base) === base)) {
<del> if ((statValues[1/*mode*/] & S_IFMT) === S_IFIFO ||
<del> (statValues[1/*mode*/] & S_IFMT) === S_IFSOCK) {
<add> if (isFileType(S_IFIFO) || isFileType(S_IFSOCK)) {
<ide> break;
<ide> }
<ide> continue;
<ide> fs.realpathSync = function realpathSync(p, options) {
<ide> binding.lstat(baseLong, undefined, ctx);
<ide> handleErrorFromBinding(ctx);
<ide>
<del> if ((statValues[1/*mode*/] & S_IFMT) !== S_IFLNK) {
<add> if (!isFileType(S_IFLNK)) {
<ide> knownHard[base] = true;
<ide> if (cache) cache.set(base, base);
<ide> continue;
<ide> fs.realpath = function realpath(p, options, callback) {
<ide>
<ide> // continue if not a symlink, break if a pipe/socket
<ide> if (knownHard[base]) {
<del> if ((statValues[1/*mode*/] & S_IFMT) === S_IFIFO ||
<del> (statValues[1/*mode*/] & S_IFMT) === S_IFSOCK) {
<add> if (isFileType(S_IFIFO) || isFileType(S_IFSOCK)) {
<ide> return callback(null, encodeRealpathResult(p, options));
<ide> }
<ide> return process.nextTick(LOOP);
<ide> fs.realpath = function realpath(p, options, callback) {
<ide> // our internal use.
<ide>
<ide> // if not a symlink, skip to the next path part
<del> if ((statValues[1/*mode*/] & S_IFMT) !== S_IFLNK) {
<add> if (!isFileType(S_IFLNK)) {
<ide> knownHard[base] = true;
<ide> return process.nextTick(LOOP);
<ide> } | 1 |
Python | Python | reduce number of return statements | f473fe4418a98e2e0edb357f23b74964b09d6a7a | <ide><path>numpy/core/fromnumeric.py
<ide> def sum(a, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> out[...] = res
<ide> return out
<ide> return res
<del> elif type(a) is not mu.ndarray:
<add> if type(a) is not mu.ndarray:
<ide> try:
<ide> sum = a.sum
<ide> except AttributeError:
<del> return _methods._sum(a, axis=axis, dtype=dtype,
<del> out=out, **kwargs)
<del> return sum(axis=axis, dtype=dtype, out=out, **kwargs)
<del> else:
<del> return _methods._sum(a, axis=axis, dtype=dtype,
<del> out=out, **kwargs)
<add> pass
<add> else:
<add> return sum(axis=axis, dtype=dtype, out=out, **kwargs)
<add> return _methods._sum(a, axis=axis, dtype=dtype,
<add> out=out, **kwargs)
<ide>
<ide>
<ide> def product(a, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> def amax(a, axis=None, out=None, keepdims=np._NoValue):
<ide> kwargs = {}
<ide> if keepdims is not np._NoValue:
<ide> kwargs['keepdims'] = keepdims
<add>
<ide> if type(a) is not mu.ndarray:
<ide> try:
<ide> amax = a.max
<ide> except AttributeError:
<del> return _methods._amax(a, axis=axis,
<del> out=out, **kwargs)
<del> return amax(axis=axis, out=out, **kwargs)
<del> else:
<del> return _methods._amax(a, axis=axis,
<del> out=out, **kwargs)
<add> pass
<add> else:
<add> return amax(axis=axis, out=out, **kwargs)
<add>
<add> return _methods._amax(a, axis=axis,
<add> out=out, **kwargs)
<ide>
<ide>
<ide> def amin(a, axis=None, out=None, keepdims=np._NoValue):
<ide> def amin(a, axis=None, out=None, keepdims=np._NoValue):
<ide> try:
<ide> amin = a.min
<ide> except AttributeError:
<del> return _methods._amin(a, axis=axis,
<del> out=out, **kwargs)
<del> return amin(axis=axis, out=out, **kwargs)
<del> else:
<del> return _methods._amin(a, axis=axis,
<del> out=out, **kwargs)
<add> pass
<add> else:
<add> return amin(axis=axis, out=out, **kwargs)
<add>
<add> return _methods._amin(a, axis=axis,
<add> out=out, **kwargs)
<ide>
<ide>
<ide> def alen(a):
<ide> def prod(a, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> try:
<ide> prod = a.prod
<ide> except AttributeError:
<del> return _methods._prod(a, axis=axis, dtype=dtype,
<del> out=out, **kwargs)
<del> return prod(axis=axis, dtype=dtype, out=out, **kwargs)
<del> else:
<del> return _methods._prod(a, axis=axis, dtype=dtype,
<del> out=out, **kwargs)
<add> pass
<add> else:
<add> return prod(axis=axis, dtype=dtype, out=out, **kwargs)
<add>
<add> return _methods._prod(a, axis=axis, dtype=dtype,
<add> out=out, **kwargs)
<ide>
<ide>
<ide> def cumprod(a, axis=None, dtype=None, out=None): | 1 |
Javascript | Javascript | remove unused export | 5a71cbe7a9e192c23b847be6e0c575b6705f6939 | <ide><path>packages/react-reconciler/src/ReactFiberReconciler.js
<ide> export function updateContainer(
<ide> }
<ide>
<ide> export {
<del> flushRoot,
<ide> batchedEventUpdates,
<ide> batchedUpdates,
<ide> unbatchedUpdates,
<ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.js
<ide> export function computeExpirationForFiber(
<ide> return expirationTime;
<ide> }
<ide>
<del>let lastUniqueAsyncExpiration = NoWork;
<del>export function computeUniqueAsyncExpiration(): ExpirationTime {
<del> const currentTime = requestCurrentTime();
<del> let result = computeAsyncExpiration(currentTime);
<del> if (result <= lastUniqueAsyncExpiration) {
<del> // Since we assume the current time monotonically increases, we only hit
<del> // this branch when computeUniqueAsyncExpiration is fired multiple times
<del> // within a 200ms window (or whatever the async bucket size is).
<del> result -= 1;
<del> }
<del> lastUniqueAsyncExpiration = result;
<del> return result;
<del>}
<del>
<ide> export function scheduleUpdateOnFiber(
<ide> fiber: Fiber,
<ide> expirationTime: ExpirationTime,
<ide> function finishSyncRender(root, exitStatus, expirationTime) {
<ide> }
<ide>
<ide> export function flushRoot(root: FiberRoot, expirationTime: ExpirationTime) {
<del> if ((executionContext & (RenderContext | CommitContext)) !== NoContext) {
<del> invariant(
<del> false,
<del> 'work.commit(): Cannot commit while already rendering. This likely ' +
<del> 'means you attempted to commit from inside a lifecycle method.',
<del> );
<del> }
<ide> markRootExpiredAtTime(root, expirationTime);
<ide> ensureRootIsScheduled(root);
<del> flushSyncCallbackQueue();
<add> if ((executionContext & (RenderContext | CommitContext)) === NoContext) {
<add> flushSyncCallbackQueue();
<add> }
<ide> }
<ide>
<ide> export function flushDiscreteUpdates() { | 2 |
Ruby | Ruby | add env.ldflags and use | 53cf7e843b05a1dc1247312c9358023234bb9411 | <ide><path>Library/Homebrew/extend/ENV.rb
<ide> def cxx; self['CXX'] or "g++"; end
<ide>
<ide> # CFLAGS are read quite a bit
<ide> def cflags; ENV['CFLAGS']; end
<add> def ldflags; ENV['LDFLAGS']; end
<ide>
<ide> def m64
<ide> append_to_cflags '-m64' | 1 |
Java | Java | update copyright header | 2fc2c29e9a729aed82f0e7ff23082960980f7841 | <ide><path>spring-context/src/main/java/org/springframework/scheduling/annotation/AsyncResult.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-core/src/main/java/org/springframework/util/concurrent/FailureCallback.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-core/src/main/java/org/springframework/util/concurrent/ListenableFuture.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-core/src/main/java/org/springframework/util/concurrent/ListenableFutureAdapter.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-core/src/main/java/org/springframework/util/concurrent/ListenableFutureCallback.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-core/src/main/java/org/springframework/util/concurrent/ListenableFutureCallbackRegistry.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-core/src/main/java/org/springframework/util/concurrent/SuccessCallback.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/tcp/reactor/AbstractPromiseToListenableFutureAdapter.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-test/src/main/java/org/springframework/test/web/client/MockRestServiceServer.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-web/src/main/java/org/springframework/http/MediaType.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-web/src/main/java/org/springframework/web/bind/annotation/CrossOrigin.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-web/src/main/java/org/springframework/web/client/AsyncRestTemplate.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-web/src/main/java/org/springframework/web/client/RestTemplate.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * You may obtain a copy of the License at
<ide> *
<del> * http://www.apache.org/licenses/LICENSE-2.0
<add> * http://www.apache.org/licenses/LICENSE-2.0
<ide> *
<ide> * Unless required by applicable law or agreed to in writing, software
<ide> * distributed under the License is distributed on an "AS IS" BASIS,
<ide><path>spring-web/src/main/java/org/springframework/web/context/request/async/WebAsyncManager.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>spring-web/src/test/java/org/springframework/web/client/RestTemplateTests.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * You may obtain a copy of the License at
<ide> *
<del> * http://www.apache.org/licenses/LICENSE-2.0
<add> * http://www.apache.org/licenses/LICENSE-2.0
<ide> *
<ide> * Unless required by applicable law or agreed to in writing, software
<ide> * distributed under the License is distributed on an "AS IS" BASIS,
<ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/ResponseEntityExceptionHandler.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License. | 16 |
Text | Text | add v1.2.7 changes | e31560cf6b85ddbb340cb0af421c1d0f5586e142 | <ide><path>CHANGELOG.md
<add><a name="1.2.7"></a>
<add># 1.2.7 emoji-clairvoyance (2014-01-03)
<add>
<add>
<add>## Bug Fixes
<add>
<add>- **$animate:**
<add> - ensue class-based animations are always skipped before structural post-digest tasks are run
<add> ([bc492c0f](https://github.com/angular/angular.js/commit/bc492c0fc17257ddf2bc5964e205379aa766b3d8),
<add> [#5582](https://github.com/angular/angular.js/issues/5582))
<add> - remove trailing `s` from computed transition duration styles
<add> ([50bf0296](https://github.com/angular/angular.js/commit/50bf029625d603fc652f0f413e709f43803743db))
<add>- **$http:**
<add> ([3d38fff8](https://github.com/angular/angular.js/commit/3d38fff8b4ea2fd60fadef2028ea4dcddfccb1a4))
<add> - use ActiveX XHR when making PATCH requests on IE8
<add> ([6c17d02b](https://github.com/angular/angular.js/commit/6c17d02bc4cc02f478775d62e1f9f77da9da82ad),
<add> [#2518](https://github.com/angular/angular.js/issues/2518), [#5043](https://github.com/angular/angular.js/issues/5043))
<add> - fix 'type mismatch' error on IE8 after each request
<add> ([fd9a03e1](https://github.com/angular/angular.js/commit/fd9a03e147aac7e952c6dda1f381fd4662276ba2))
<add> - Ignore multiple calls to onreadystatechange with readyState=4
<add> ([4f572366](https://github.com/angular/angular.js/commit/4f57236614415eea919221ea5f99c4d8689b3267),
<add> [#5426](https://github.com/angular/angular.js/issues/5426))
<add>- **$injector:** remove the `INSTANTIATING` flag properly when done
<add> ([186a5912](https://github.com/angular/angular.js/commit/186a5912288acfff0ee59dae29af83c37c987921),
<add> [#4361](https://github.com/angular/angular.js/issues/4361), [#5577](https://github.com/angular/angular.js/issues/5577))
<add>- **$location:**
<add> - remove base href domain if the URL begins with '//'
<add> ([760f2fb7](https://github.com/angular/angular.js/commit/760f2fb73178e56c37397b3c5876f7dac96f0455),
<add> [#5606](https://github.com/angular/angular.js/issues/5606))
<add> - fix $location.path() behaviour when $locationChangeStart is triggered by the browser
<add> ([cf686285](https://github.com/angular/angular.js/commit/cf686285c22d528440e173fdb65ad1052d96df3c),
<add> [#4989](https://github.com/angular/angular.js/issues/4989), [#5089](https://github.com/angular/angular.js/issues/5089), [#5118](https://github.com/angular/angular.js/issues/5118), [#5580](https://github.com/angular/angular.js/issues/5580))
<add> - re-assign history after BFCache back on Android browser
<add> ([bddd46c8](https://github.com/angular/angular.js/commit/bddd46c8ecf49cfe6c999cd6b4a69b7d7e1f9a33),
<add> [#5425](https://github.com/angular/angular.js/issues/5425))
<add>- **$resource:** prevent URL template from collapsing into an empty string
<add> ([131e4014](https://github.com/angular/angular.js/commit/131e4014b831ac81b7979c4523da81ebc5861c70),
<add> [#5455](https://github.com/angular/angular.js/issues/5455), [#5493](https://github.com/angular/angular.js/issues/5493))
<add>- **$sanitize:** consider the `size` attribute as a valid/allowed attribute
<add> ([056c8493](https://github.com/angular/angular.js/commit/056c8493521988dbb330c6636135b505737da918),
<add> [#5522](https://github.com/angular/angular.js/issues/5522))
<add>- **Scope:** don't let watch deregistration mess up the dirty-checking digest loop
<add> ([884ef0db](https://github.com/angular/angular.js/commit/884ef0dbcdfe614cedc824d079361b53e675d033),
<add> [#5525](https://github.com/angular/angular.js/issues/5525))
<add>- **input:**
<add> - use apply on the change event only when one isn't already in progress
<add> ([a80049fd](https://github.com/angular/angular.js/commit/a80049fd0ac858eeeb645a4209cb2a661d0b4c33),
<add> [#5293](https://github.com/angular/angular.js/issues/5293))
<add> - prevent double $digest when using jQuery trigger.
<add> ([1147f219](https://github.com/angular/angular.js/commit/1147f21999edf9a434cd8d24865a6455e744d858),
<add> [#5293](https://github.com/angular/angular.js/issues/5293))
<add>- **ngRepeat:** allow for more flexible coding style in ngRepeat expression
<add> ([c9705b75](https://github.com/angular/angular.js/commit/c9705b755645a4bfe066243f2ba15a733c3787e1),
<add> [#5537](https://github.com/angular/angular.js/issues/5537), [#5598](https://github.com/angular/angular.js/issues/5598))
<add>- **ngRoute:** instantiate controller when template is empty
<add> ([498365f2](https://github.com/angular/angular.js/commit/498365f219f65d6c29bdf2f03610a4d3646009bb),
<add> [#5550](https://github.com/angular/angular.js/issues/5550))
<add>- **ngShow/ngHide, ngIf:** functions with zero args should be truthy
<add> ([01c5be46](https://github.com/angular/angular.js/commit/01c5be4681e34cdc5f5c461b7a618fefe8038919),
<add> [#5414](https://github.com/angular/angular.js/issues/5414))
<add>
<add>
<add>## Performance Improvements
<add>
<add>- **Scope:** limit propagation of $broadcast to scopes that have listeners for the event
<add> ([80e7a455](https://github.com/angular/angular.js/commit/80e7a4558490f7ffd33d142844b9153a5ed00e86),
<add> [#5341](https://github.com/angular/angular.js/issues/5341), [#5371](https://github.com/angular/angular.js/issues/5371))
<add>
<ide> <a name="1.2.6"></a>
<ide> # 1.2.6 taco-salsafication (2013-12-19)
<ide> | 1 |
Javascript | Javascript | remove tostring of dangerouslysetinnerhtml | edeea0720791f998b505f2ecdcf866c7e539e7a2 | <ide><path>packages/react-dom/src/client/ReactDOMComponent.js
<ide> import possibleStandardNames from '../shared/possibleStandardNames';
<ide> import {validateProperties as validateARIAProperties} from '../shared/ReactDOMInvalidARIAHook';
<ide> import {validateProperties as validateInputProperties} from '../shared/ReactDOMNullInputValuePropHook';
<ide> import {validateProperties as validateUnknownProperties} from '../shared/ReactDOMUnknownPropertyHook';
<del>import {toStringOrTrustedType} from './ToStringValue';
<ide>
<ide> import {
<ide> enableDeprecatedFlareAPI,
<ide> export function diffProperties(
<ide> const lastHtml = lastProp ? lastProp[HTML] : undefined;
<ide> if (nextHtml != null) {
<ide> if (lastHtml !== nextHtml) {
<del> (updatePayload = updatePayload || []).push(
<del> propKey,
<del> toStringOrTrustedType(nextHtml),
<del> );
<add> (updatePayload = updatePayload || []).push(propKey, nextHtml);
<ide> }
<ide> } else {
<ide> // TODO: It might be too late to clear this if we have children | 1 |
Python | Python | add fit_generator methods in models | 47d074fec3836206450facbbbc0965acd482f69c | <ide><path>keras/models.py
<ide> import pprint
<ide> from six.moves import range
<ide> import six
<add>import time
<add>import threading
<add>try:
<add> import queue
<add>except ImportError:
<add> import Queue as queue
<ide>
<ide> from . import backend as K
<ide> from . import optimizers
<ide> def load_weights(self, filepath):
<ide> self.layers[k].set_weights(weights)
<ide> f.close()
<ide>
<add> def fit_generator(self, generator, samples_per_epoch, nb_epoch,
<add> verbose=1, show_accuracy=False, callbacks=[],
<add> validation_data=None, class_weight=None, nb_worker=1):
<add> '''Fit a model on data generated batch-by-batch by a Python generator.
<add> The generator is run in parallel to the model, for efficiency,
<add> and can be run by multiple workers at the same time.
<add> For instance, this allows you to do real-time data augmentation
<add> on images on CPU in parallel to training your model on GPU.
<add>
<add> # Arguments
<add> generator: a Python generator,
<add> yielding either (X, y) or (X, y, sample_weight).
<add> The generator is expected to loop over its data
<add> indefinitely. An epoch finishes when `samples_per_epoch`
<add> samples have been seen by the model.
<add> The output of the generator must be a tuple of either 2 or 3
<add> numpy arrays.
<add> If the output tuple has two elements, they are assumed to be
<add> (input_data, target_data).
<add> If it has three elements, they are assumed to be
<add> (input_data, target_data, sample_weight).
<add> All arrays should contain the same number of samples.
<add> samples_per_epoch: integer, number of samples to process before
<add> starting a new epoch.
<add> nb_epoch: integer, total number of iterations on the data.
<add> verbose: verbosity mode, 0, 1, or 2.
<add> show_accuracy: boolean. Whether to display accuracy (only relevant
<add> for classification problems).
<add> callbacks: list of callbacks to be called during training.
<add> validation_data: tuple of 2 or 3 numpy arrays. If 2 elements,
<add> they are assumed to be (input_data, target_data);
<add> if 3 elements, they are assumed to be
<add> (input_data, target_data, sample weights).
<add> class_weight: dictionary mapping class indices to a weight
<add> for the class.
<add> nb_worker: integer, number of workers to use for running
<add> the generator (in parallel to model training).
<add> If using multiple workers, the processing order of batches
<add> generated by the model will be non-deterministic.
<add> If using multiple workers, make sure to protect
<add> any thread-unsafe operation done by the generator
<add> using a Python mutex.
<add>
<add> # Returns
<add>
<add> A `History` object.
<add>
<add> # Examples
<add>
<add> ```python
<add> def generate_arrays_from_file(path):
<add> while 1:
<add> f = open(path)
<add> for line in f:
<add> # create numpy arrays of input data
<add> # and labels, from each line in the file
<add> x, y = process_line(line)
<add> yield x, y
<add> f.close()
<add>
<add> model.fit_generator(generate_arrays_from_file('/my_file.txt'),
<add> samples_per_epoch=10000, nb_epoch=10)
<add> ```
<add> '''
<add> max_queue_size = 10 # maximum number of batches in queue
<add> wait_time = 0.05 # in seconds
<add> epoch = 0
<add> do_validation = bool(validation_data)
<add> if show_accuracy:
<add> out_labels = ['loss', 'acc']
<add> else:
<add> out_labels = ['loss']
<add> metrics = ['loss', 'acc', 'val_loss', 'val_acc']
<add>
<add> # prepare callbacks
<add> history = cbks.History()
<add> if verbose:
<add> callbacks = [history, cbks.BaseLogger()] + callbacks
<add> else:
<add> callbacks = [history] + callbacks
<add> callbacks = cbks.CallbackList(callbacks)
<add>
<add> callbacks._set_model(self)
<add> callbacks._set_params({
<add> 'nb_epoch': nb_epoch,
<add> 'nb_sample': samples_per_epoch,
<add> 'verbose': verbose,
<add> 'do_validation': do_validation,
<add> 'metrics': metrics,
<add> })
<add> callbacks.on_train_begin()
<add>
<add> # util function to validate the batches produced
<add> # by the generator
<add> def input_validation(generator_output):
<add> if not hasattr(generator_output, '__len__'):
<add> _stop.set()
<add> raise Exception('The generator output must be a tuple.')
<add> if len(generator_output) == 2:
<add> X, y = generator_output
<add> sample_weight = None
<add> elif len(generator_output) == 3:
<add> X, y, sample_weight = generator_output
<add> else:
<add> _stop.set()
<add> raise Exception('The generator output tuple must have 2 or 3 elements.')
<add> return X, y, sample_weight
<add>
<add> # start generator thread storing batches into a queue
<add> generator_queue = queue.Queue()
<add> _stop = threading.Event()
<add>
<add> def generator_task():
<add> i = 0
<add> while not _stop.is_set():
<add> try:
<add> if generator_queue.qsize() < max_queue_size:
<add> generator_output = generator.next()
<add> generator_queue.put(generator_output)
<add> i += 1
<add> else:
<add> time.sleep(wait_time)
<add> except KeyboardInterrupt:
<add> _stop.set()
<add> return
<add>
<add> generator_threads = [threading.Thread(target=generator_task) for _ in range(nb_worker)]
<add> for thread in generator_threads:
<add> thread.start()
<add>
<add> self.stop_training = False
<add> while epoch < nb_epoch:
<add> callbacks.on_epoch_begin(epoch)
<add> samples_seen = 0
<add> batch_index = 0
<add> while samples_seen < samples_per_epoch:
<add> while not _stop.is_set():
<add> if not generator_queue.empty():
<add> generator_output = generator_queue.get()
<add> break
<add> else:
<add> time.sleep(wait_time)
<add>
<add> X, y, sample_weight = input_validation(generator_output)
<add>
<add> batch_logs = {}
<add> batch_size = len(X[0])
<add> batch_logs['batch'] = batch_index
<add> batch_logs['size'] = batch_size
<add> callbacks.on_batch_begin(batch_index, batch_logs)
<add> outs = self.train_on_batch(X, y,
<add> accuracy=show_accuracy,
<add> sample_weight=sample_weight,
<add> class_weight=class_weight)
<add> if type(outs) != list:
<add> outs = [outs]
<add> for l, o in zip(out_labels, outs):
<add> batch_logs[l] = o
<add>
<add> callbacks.on_batch_end(batch_index, batch_logs)
<add>
<add> # construct epoch logs
<add> epoch_logs = {}
<add> batch_index += 1
<add> samples_seen += batch_size
<add> if samples_seen >= samples_per_epoch: # epoch finished
<add> if do_validation:
<add> if hasattr(validation_data, 'next'):
<add> # assumed to be generator
<add> # TODO: call self.evaluate_generator()
<add> _stop.set()
<add> raise NotImplementedError()
<add> else:
<add> # input validation
<add> X, y, sample_weight = input_validation(validation_data)
<add> val_outs = self.evaluate(X, y,
<add> show_accuracy=show_accuracy,
<add> sample_weight=sample_weight,
<add> verbose=0)
<add> if type(val_outs) != list:
<add> val_outs = [val_outs]
<add> # same labels assumed
<add> for l, o in zip(out_labels, val_outs):
<add> epoch_logs['val_' + l] = o
<add>
<add> callbacks.on_epoch_end(epoch, epoch_logs)
<add> epoch += 1
<add> if self.stop_training:
<add> break
<add> _stop.set()
<add> callbacks.on_train_end()
<add> return history
<add>
<ide>
<ide> class Graph(Model, containers.Graph):
<ide> '''Arbitrary connection graph.
<ide> def load_weights(self, filepath):
<ide> weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
<ide> self.set_weights(weights)
<ide> f.close()
<add>
<add> def fit_generator(self, generator, samples_per_epoch, nb_epoch,
<add> verbose=1, callbacks=[],
<add> validation_data=None, class_weight={}, nb_worker=1):
<add> '''Fit a model on data generated batch-by-batch by a Python generator.
<add> The generator is run in parallel to the model, for efficiency,
<add> and can be run by multiple workers at the same time.
<add> For instance, this allows you to do real-time data augmentation
<add> on images on CPU in parallel to training your model on GPU.
<add>
<add> # Arguments
<add> generator: a generator.
<add> The output of the generator must be either a dictionary
<add> mapping inputs and outputs names to numpy arrays, or
<add> a tuple of dictionaries (input_data, sample_weight).
<add> All arrays should contain the same number of samples.
<add> The generator is expected to loop over its data
<add> indefinitely. An epoch finishes when `samples_per_epoch`
<add> samples have been seen by the model.
<add> samples_per_epoch: integer, number of samples to process before
<add> going to the next epoch.
<add> nb_epoch: integer, total number of iterations on the data.
<add> verbose: verbosity mode, 0, 1, or 2.
<add> callbacks: list of callbacks to be called during training.
<add> validation_data: dictionary mapping input names and outputs names
<add> to appropriate numpy arrays to be used as
<add> held-out validation data.
<add> All arrays should contain the same number of samples.
<add> class_weight: dictionary mapping class indices to a weight
<add> for the class.
<add> nb_worker: integer, number of workers to use for running
<add> the generator (in parallel to model training).
<add> If using multiple workers, the processing order of batches
<add> generated by the model will be non-deterministic.
<add> If using multiple workers, make sure to protect
<add> any thread-unsafe operation done by the generator
<add> using a Python mutex.
<add>
<add> # Returns
<add>
<add> A `History` object.
<add>
<add> # Examples
<add>
<add> ```python
<add> def generate_arrays_from_file(path):
<add> while 1:
<add> f = open(path)
<add> for line in f:
<add> # create numpy arrays of input data
<add> # and labels, from each line in the file
<add> x1, x2, y = process_line(line)
<add> yield {'input_1': x1, 'input_2': x2, 'output': y}
<add> f.close()
<add>
<add> graph.fit_generator(generate_arrays_from_file('/my_file.txt'),
<add> samples_per_epoch=10000, nb_epoch=10)
<add> ```
<add> '''
<add> max_queue_size = 10 # maximum number of batches in queue
<add> wait_time = 0.05 # in seconds
<add> epoch = 0
<add> do_validation = bool(validation_data)
<add> out_labels = ['loss']
<add> metrics = ['loss', 'val_loss']
<add> if not class_weight:
<add> class_weight = {}
<add>
<add> # prepare callbacks
<add> history = cbks.History()
<add> if verbose:
<add> callbacks = [history, cbks.BaseLogger()] + callbacks
<add> else:
<add> callbacks = [history] + callbacks
<add> callbacks = cbks.CallbackList(callbacks)
<add>
<add> callbacks._set_model(self)
<add> callbacks._set_params({
<add> 'nb_epoch': nb_epoch,
<add> 'nb_sample': samples_per_epoch,
<add> 'verbose': verbose,
<add> 'do_validation': do_validation,
<add> 'metrics': metrics,
<add> })
<add> callbacks.on_train_begin()
<add>
<add> # util function to validate the batches produced
<add> # by the generator
<add> def input_validation(generator_output):
<add> if type(generator_output) in [list, tuple]:
<add> if len(generator_output) == 2:
<add> data, sample_weight = generator_output
<add> else:
<add> _stop.set()
<add> raise Exception('The generator output tuple must have '
<add> '2 dictionary elements: '
<add> '(data, sample_weight).')
<add> elif type(generator_output) == dict:
<add> data = generator_output
<add> sample_weight = {}
<add> else:
<add> _stop.set()
<add> raise Exception('The generator output must be '
<add> 'a data dictionary or a tuple '
<add> '(data, sample_weight).')
<add> assert type(data) == dict
<add> assert type(sample_weight) == dict
<add> return data, sample_weight
<add>
<add> # start generator thread storing batches into a queue
<add> generator_queue = queue.Queue()
<add> _stop = threading.Event()
<add>
<add> def generator_task():
<add> i = 0
<add> while not _stop.is_set():
<add> try:
<add> if generator_queue.qsize() < max_queue_size:
<add> generator_output = generator.next()
<add> generator_queue.put(generator_output)
<add> i += 1
<add> else:
<add> time.sleep(wait_time)
<add> except KeyboardInterrupt:
<add> _stop.set()
<add> return
<add>
<add> generator_threads = [threading.Thread(target=generator_task) for _ in range(nb_worker)]
<add> for thread in generator_threads:
<add> thread.start()
<add>
<add> self.stop_training = False
<add> while epoch < nb_epoch:
<add> callbacks.on_epoch_begin(epoch)
<add> samples_seen = 0
<add> batch_index = 0
<add> while samples_seen < samples_per_epoch:
<add> while not _stop.is_set():
<add> if not generator_queue.empty():
<add> generator_output = generator_queue.get()
<add> break
<add> else:
<add> time.sleep(wait_time)
<add>
<add> data, sample_weight = input_validation(generator_output)
<add>
<add> batch_logs = {}
<add> batch_size = len(data[data.keys()[0]])
<add> batch_logs['batch'] = batch_index
<add> batch_logs['size'] = batch_size
<add> callbacks.on_batch_begin(batch_index, batch_logs)
<add> outs = self.train_on_batch(data,
<add> sample_weight=sample_weight,
<add> class_weight=class_weight)
<add> if type(outs) != list:
<add> outs = [outs]
<add> for l, o in zip(out_labels, outs):
<add> batch_logs[l] = o
<add>
<add> callbacks.on_batch_end(batch_index, batch_logs)
<add>
<add> # construct epoch logs
<add> epoch_logs = {}
<add> batch_index += 1
<add> samples_seen += batch_size
<add> if samples_seen >= samples_per_epoch: # epoch finished
<add> if do_validation:
<add> if hasattr(validation_data, 'next'):
<add> # assumed to be generator
<add> # TODO: call self.evaluate_generator()
<add> _stop.set()
<add> raise NotImplementedError()
<add> else:
<add> # input validation
<add> data, sample_weight = input_validation(validation_data)
<add> val_outs = self.evaluate(data,
<add> sample_weight=sample_weight,
<add> verbose=0)
<add> if type(val_outs) != list:
<add> val_outs = [val_outs]
<add> # same labels assumed
<add> for l, o in zip(out_labels, val_outs):
<add> epoch_logs['val_' + l] = o
<add>
<add> callbacks.on_epoch_end(epoch, epoch_logs)
<add> epoch += 1
<add> if self.stop_training:
<add> break
<add> _stop.set()
<add> callbacks.on_train_end()
<add> return history
<ide><path>tests/keras/test_models.py
<ide> # SEQUENTIAL TEST #
<ide> ####################
<ide>
<add>def test_sequential_fit_generator():
<add> def data_generator(train):
<add> if train:
<add> max_batch_index = len(X_train) // batch_size
<add> else:
<add> max_batch_index = len(X_test) // batch_size
<add> i = 0
<add> while 1:
<add> if train:
<add> yield (X_train[i * batch_size: (i + 1) * batch_size], y_train[i * batch_size: (i + 1) * batch_size])
<add> else:
<add> yield (X_test[i * batch_size: (i + 1) * batch_size], y_test[i * batch_size: (i + 1) * batch_size])
<add> i += 1
<add> i = i % max_batch_index
<add>
<add> model = Sequential()
<add> model.add(Dense(nb_hidden, input_shape=(input_dim,)))
<add> model.add(Activation('relu'))
<add> model.add(Dense(nb_class))
<add> model.add(Activation('softmax'))
<add> model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
<add>
<add> model.fit_generator(data_generator(True), len(X_train), nb_epoch, show_accuracy=False)
<add> model.fit_generator(data_generator(True), len(X_train), nb_epoch, show_accuracy=True)
<add> model.fit_generator(data_generator(True), len(X_train), nb_epoch, show_accuracy=False, validation_data=(X_test, y_test))
<add> model.fit_generator(data_generator(True), len(X_train), nb_epoch, show_accuracy=True, validation_data=(X_test, y_test))
<add>
<add> loss = model.evaluate(X_train, y_train, verbose=0)
<add> assert(loss < 0.8)
<add>
<ide>
<ide> def test_sequential():
<ide> model = Sequential()
<ide> def test_sequential():
<ide>
<ide> model.train_on_batch(X_train[:32], y_train[:32])
<ide>
<del> loss = model.evaluate(X_train, y_train, verbose=0)
<add> loss = model.evaluate(X_test, y_test, verbose=0)
<ide> assert(loss < 0.8)
<ide>
<ide> model.predict(X_test, verbose=0)
<ide> def test_sequential():
<ide> model.load_weights(fname)
<ide> os.remove(fname)
<ide>
<del> nloss = model.evaluate(X_train, y_train, verbose=0)
<add> nloss = model.evaluate(X_test, y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide> # test json serialization
<ide> def test_merge_sum():
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
<ide>
<del> loss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> loss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss < 0.7)
<ide>
<ide> model.predict([X_test, X_test], verbose=0)
<ide> def test_merge_sum():
<ide> os.remove(fname)
<ide> model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
<ide>
<del> nloss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> nloss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide>
<ide> def test_merge_concat():
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
<ide>
<del> loss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> loss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss < 0.7)
<ide>
<ide> model.predict([X_test, X_test], verbose=0)
<ide> def test_merge_concat():
<ide> model.load_weights(fname)
<ide> os.remove(fname)
<ide>
<del> nloss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> nloss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide>
<ide> def test_merge_recursivity():
<ide> model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
<ide> model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
<ide>
<del> loss = model.evaluate([X_train, X_train, X_train], y_train, verbose=0)
<add> loss = model.evaluate([X_test, X_test, X_test], y_test, verbose=0)
<ide> assert(loss < 0.7)
<ide>
<ide> model.predict([X_test, X_test, X_test], verbose=0)
<ide> def test_merge_recursivity():
<ide> model.load_weights(fname)
<ide> os.remove(fname)
<ide>
<del> nloss = model.evaluate([X_train, X_train, X_train], y_train, verbose=0)
<add> nloss = model.evaluate([X_test, X_test, X_test], y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide>
<ide> def test_merge_overlap():
<ide>
<ide> model.train_on_batch(X_train[:32], y_train[:32])
<ide>
<del> loss = model.evaluate(X_train, y_train, verbose=0)
<add> loss = model.evaluate(X_test, y_test, verbose=0)
<ide> assert(loss < 0.9)
<ide> model.predict(X_test, verbose=0)
<ide> model.predict_classes(X_test, verbose=0)
<ide> def test_merge_overlap():
<ide> model.load_weights(fname)
<ide> os.remove(fname)
<ide>
<del> nloss = model.evaluate(X_train, y_train, verbose=0)
<add> nloss = model.evaluate(X_test, y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide>
<ide> def output_shape(input_shapes):
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
<ide>
<del> loss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> loss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss < 0.7)
<ide>
<ide> model.predict([X_test, X_test], verbose=0)
<ide> def output_shape(input_shapes):
<ide> model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
<ide> os.remove(fname)
<ide>
<del> nloss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> nloss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide>
<ide> def test_siamese_1():
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
<ide>
<del> loss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> loss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss < 0.8)
<ide>
<ide> model.predict([X_test, X_test], verbose=0)
<ide> def test_siamese_1():
<ide> os.remove(fname)
<ide> model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
<ide>
<del> nloss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> nloss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide>
<ide> def test_siamese_2():
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
<ide> model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
<ide>
<del> loss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> loss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss < 0.8)
<ide>
<ide> model.predict([X_test, X_test], verbose=0)
<ide> def test_siamese_2():
<ide> os.remove(fname)
<ide> model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
<ide>
<del> nloss = model.evaluate([X_train, X_train], y_train, verbose=0)
<add> nloss = model.evaluate([X_test, X_test], y_test, verbose=0)
<ide> assert(loss == nloss)
<ide>
<ide>
<ide> def test_siamese_2():
<ide> output_shape=(1,))
<ide>
<ide>
<add>def test_graph_fit_generator():
<add> def data_generator_graph(train):
<add> while 1:
<add> if train:
<add> yield {'input1': X_train_graph, 'output1': y_train_graph}
<add> else:
<add> yield {'input1': X_test_graph, 'output1': y_test_graph}
<add>
<add> graph = Graph()
<add> graph.add_input(name='input1', input_shape=(32,))
<add>
<add> graph.add_node(Dense(16), name='dense1', input='input1')
<add> graph.add_node(Dense(4), name='dense2', input='input1')
<add> graph.add_node(Dense(4), name='dense3', input='dense1')
<add>
<add> graph.add_output(name='output1',
<add> inputs=['dense2', 'dense3'],
<add> merge_mode='sum')
<add> graph.compile('rmsprop', {'output1': 'mse'})
<add>
<add> graph.fit_generator(data_generator_graph(True), 1000, nb_epoch=4)
<add> graph.fit_generator(data_generator_graph(True), 1000, nb_epoch=4)
<add> graph.fit_generator(data_generator_graph(True), 1000, nb_epoch=4, validation_data={'input1': X_test_graph, 'output1': y_test_graph})
<add> graph.fit_generator(data_generator_graph(True), 1000, nb_epoch=4, validation_data={'input1': X_test_graph, 'output1': y_test_graph})
<add>
<add> loss = graph.evaluate({'input1': X_test_graph, 'output1': y_test_graph}, verbose=0)
<add> assert(loss < 3.)
<add>
<add>
<ide> def test_1o_1i():
<ide> # test a non-sequential graph with 1 input and 1 output
<ide> np.random.seed(1337)
<ide> def test_1o_1i():
<ide> assert(len(out) == 1)
<ide> loss = graph.test_on_batch({'input1': X_test_graph, 'output1': y_test_graph})
<ide> loss = graph.train_on_batch({'input1': X_test_graph, 'output1': y_test_graph})
<del> loss = graph.evaluate({'input1': X_test_graph, 'output1': y_test_graph})
<add> loss = graph.evaluate({'input1': X_test_graph, 'output1': y_test_graph}, verbose=0)
<ide> assert(loss < 2.5)
<ide>
<ide> # test validation split | 2 |
Text | Text | fix doc typo in userviewset example | b0201bcfbf57cd3abf75746c149e1eb70ba19c55 | <ide><path>docs/api-guide/viewsets.md
<ide> Both of these come with a trade-off. Using regular views and URL confs is more
<ide>
<ide> The default routers included with REST framework will provide routes for a standard set of create/retrieve/update/destroy style operations, as shown below:
<ide>
<del> class UserViewSet(viewsets.VietSet):
<add> class UserViewSet(viewsets.ViewSet):
<ide> """
<ide> Example empty viewset demonstrating the standard
<ide> actions that will be handled by a router class. | 1 |
Python | Python | update spacy.load() helper functions | dd6dc4c1207d36295641d1d5660b72d19844aa53 | <ide><path>spacy/util.py
<ide> def load_model(name, **overrides):
<ide> if not data_path or not data_path.exists():
<ide> raise IOError("Can't find spaCy data path: %s" % path2str(data_path))
<ide> if isinstance(name, basestring_):
<del> if (data_path / name).exists(): # in data dir or shortcut
<del> spec = importlib.util.spec_from_file_location('model', data_path / name)
<del> cls = importlib.util.module_from_spec(spec)
<del> spec.loader.exec_module(cls)
<del> return cls.load(**overrides)
<add> if name in set([d.name for d in data_path.iterdir()]): # in data dir / shortcut
<add> return load_model_from_link(name, **overrides)
<ide> if is_package(name): # installed as package
<del> cls = importlib.import_module(name)
<del> return cls.load(**overrides)
<add> return load_model_from_package(name, **overrides)
<ide> if Path(name).exists(): # path to model data directory
<del> model_path = Path(name)
<del> meta = get_package_meta(model_path)
<del> cls = get_lang_class(meta['lang'])
<del> nlp = cls(pipeline=meta.get('pipeline', True), meta=meta)
<del> return nlp.from_disk(model_path, **overrides)
<add> return load_model_from_path(Path(name), **overrides)
<ide> elif hasattr(name, 'exists'): # Path or Path-like to model data
<del> meta = get_package_meta(name)
<del> cls = get_lang_class(meta['lang'])
<del> nlp = cls(pipeline=meta.get('pipeline', True), meta=meta)
<del> return nlp.from_disk(name, **overrides)
<add> return load_model_from_path(name, **overrides)
<ide> raise IOError("Can't find model '%s'" % name)
<ide>
<ide>
<add>def load_model_from_link(name, **overrides):
<add> """Load a model from a shortcut link, or directory in spaCy data path."""
<add> spec = importlib.util.spec_from_file_location('model', get_data_path() / name)
<add> try:
<add> cls = importlib.util.module_from_spec(spec)
<add> except AttributeError:
<add> raise IOError(
<add> "Cant' load '%s'. If you're using a shortcut link, make sure it "
<add> "points to a valid model package (not just a data directory)." % name)
<add> spec.loader.exec_module(cls)
<add> return cls.load(**overrides)
<add>
<add>
<add>def load_model_from_package(name, **overrides):
<add> """Load a model from an installed package."""
<add> cls = importlib.import_module(name)
<add> return cls.load(**overrides)
<add>
<add>
<add>def load_model_from_path(model_path, meta=False, **overrides):
<add> """Load a model from a data directory path. Creates Language class with
<add> pipeline from meta.json and then calls from_disk() with path."""
<add> if not meta:
<add> meta = get_model_meta(model_path)
<add> cls = get_lang_class(meta['lang'])
<add> nlp = cls(pipeline=meta.get('pipeline', True), meta=meta, **overrides)
<add> return nlp.from_disk(model_path)
<add>
<add>
<ide> def load_model_from_init_py(init_file, **overrides):
<ide> """Helper function to use in the `load()` method of a model package's
<ide> __init__.py.
<ide> def load_model_from_init_py(init_file, **overrides):
<ide> data_path = model_path / data_dir
<ide> if not model_path.exists():
<ide> raise ValueError("Can't find model directory: %s" % path2str(data_path))
<del> cls = get_lang_class(meta['lang'])
<del> nlp = cls(pipeline=meta.get('pipeline', True), meta=meta)
<del> return nlp.from_disk(data_path, **overrides)
<add> return load_model_from_path(data_path, meta, **overrides)
<ide>
<ide>
<ide> def get_model_meta(path): | 1 |
Python | Python | prevent lemmatization of base nouns | 4f400fa486ebf4fa7ef5aa90607cca68acb301a8 | <ide><path>spacy/lemmatizer.py
<ide> def __call__(self, string, univ_pos, morphology=None):
<ide> def is_base_form(self, univ_pos, morphology=None):
<ide> '''Check whether we're dealing with an uninflected paradigm, so we can
<ide> avoid lemmatization entirely.'''
<del> print("Is base form?", univ_pos, morphology)
<ide> morphology = {} if morphology is None else morphology
<ide> others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]
<ide> true_morph_key = morphology.get('morph', 0)
<del> if univ_pos == 'noun' and morphology.get('number') == 'sing' and not others:
<add> if univ_pos == 'noun' and morphology.get('Number') == 'sing' and not others:
<ide> return True
<del> elif univ_pos == 'verb' and morphology.get('verbform') == 'inf' and not others:
<add> elif univ_pos == 'verb' and morphology.get('VerbForm') == 'inf' and not others:
<ide> return True
<ide> elif univ_pos == 'adj' and morphology.get('Degree') == 'pos':
<ide> return True
<del> elif true_morph_key in \
<del> (VerbForm_inf, VerbForm_none, Number_sing, Degree_pos):
<add> elif true_morph_key in (VerbForm_inf, VerbForm_none, Number_sing, Degree_pos):
<ide> return True
<ide> else:
<ide> return False
<ide><path>spacy/tests/regression/test_issue903.py
<add># coding: utf8
<add>from __future__ import unicode_literals
<add>
<add>import pytest
<add>from ...tokens import Doc
<add>
<add>
<add>@pytest.mark.parametrize('text,tag,lemma',
<add> [("anus", "NN", "anus"),
<add> ("princess", "NN", "princess")])
<add>def test_issue912(en_vocab, text, tag, lemma):
<add> '''Test base-forms of adjectives are preserved.'''
<add> doc = Doc(en_vocab, words=[text])
<add> doc[0].tag_ = tag
<add> assert doc[0].lemma_ == lemma
<add> | 2 |
Javascript | Javascript | move utility functions to internal/fs | 2620358624b6c0f6c7d02dc2e4333eae9e73b3ea | <ide><path>lib/fs.js
<ide> const { Buffer } = require('buffer');
<ide> const errors = require('internal/errors');
<ide> const { Readable, Writable } = require('stream');
<ide> const EventEmitter = require('events');
<del>const { FSReqWrap } = binding;
<add>const { FSReqWrap, statValues } = binding;
<ide> const { FSEvent } = process.binding('fs_event_wrap');
<ide> const internalFS = require('internal/fs');
<ide> const { getPathFromURL } = require('internal/url');
<ide> const internalUtil = require('internal/util');
<ide> const {
<del> assertEncoding,
<del> stringToFlags
<add> copyObject,
<add> getOptions,
<add> isUint32,
<add> modeNum,
<add> nullCheck,
<add> preprocessSymlinkDestination,
<add> Stats,
<add> statsFromValues,
<add> stringToFlags,
<add> stringToSymlinkType,
<add> toUnixTimestamp,
<add> validateBuffer,
<add> validateLen,
<add> validateOffsetLengthRead,
<add> validateOffsetLengthWrite,
<add> validatePath,
<add> validateUint32
<ide> } = internalFS;
<ide> const {
<ide> CHAR_FORWARD_SLASH,
<ide> const errnoException = errors.errnoException;
<ide>
<ide> let truncateWarn = true;
<ide>
<del>function isInt32(n) { return n === (n | 0); }
<del>function isUint32(n) { return n === (n >>> 0); }
<del>
<ide> function showTruncateDeprecation() {
<ide> if (truncateWarn) {
<ide> process.emitWarning(
<ide> function showTruncateDeprecation() {
<ide> }
<ide> }
<ide>
<del>function getOptions(options, defaultOptions) {
<del> if (options === null || options === undefined ||
<del> typeof options === 'function') {
<del> return defaultOptions;
<del> }
<del>
<del> if (typeof options === 'string') {
<del> defaultOptions = util._extend({}, defaultOptions);
<del> defaultOptions.encoding = options;
<del> options = defaultOptions;
<del> } else if (typeof options !== 'object') {
<del> throw new errors.TypeError('ERR_INVALID_ARG_TYPE',
<del> 'options',
<del> ['string', 'Object'],
<del> options);
<del> }
<del>
<del> if (options.encoding !== 'buffer')
<del> assertEncoding(options.encoding);
<del> return options;
<del>}
<del>
<del>function copyObject(source) {
<del> var target = {};
<del> for (var key in source)
<del> target[key] = source[key];
<del> return target;
<del>}
<del>
<ide> function handleErrorFromBinding(ctx) {
<ide> if (ctx.errno !== undefined) { // libuv error numbers
<ide> const err = errors.uvException(ctx);
<ide> function makeCallback(cb) {
<ide> };
<ide> }
<ide>
<del>function validateBuffer(buffer) {
<del> if (!isUint8Array(buffer)) {
<del> const err = new errors.TypeError('ERR_INVALID_ARG_TYPE', 'buffer',
<del> ['Buffer', 'Uint8Array']);
<del> Error.captureStackTrace(err, validateBuffer);
<del> throw err;
<del> }
<del>}
<del>
<del>function validateLen(len) {
<del> let err;
<del>
<del> if (!isInt32(len))
<del> err = new errors.TypeError('ERR_INVALID_ARG_TYPE', 'len', 'integer');
<del>
<del> if (err !== undefined) {
<del> Error.captureStackTrace(err, validateLen);
<del> throw err;
<del> }
<del>}
<del>
<del>function validateOffsetLengthRead(offset, length, bufferLength) {
<del> let err;
<del>
<del> if (offset < 0 || offset >= bufferLength) {
<del> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'offset');
<del> } else if (length < 0 || offset + length > bufferLength) {
<del> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'length');
<del> }
<del>
<del> if (err !== undefined) {
<del> Error.captureStackTrace(err, validateOffsetLengthRead);
<del> throw err;
<del> }
<del>}
<del>
<del>function validateOffsetLengthWrite(offset, length, byteLength) {
<del> let err;
<del>
<del> if (offset > byteLength) {
<del> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'offset');
<del> } else if (offset + length > byteLength || offset + length > kMaxLength) {
<del> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'length');
<del> }
<del>
<del> if (err !== undefined) {
<del> Error.captureStackTrace(err, validateOffsetLengthWrite);
<del> throw err;
<del> }
<del>}
<del>
<del>// Check if the path contains null types if it is a string nor Uint8Array,
<del>// otherwise return silently.
<del>function nullCheck(path, propName, throwError = true) {
<del> const pathIsString = typeof path === 'string';
<del> const pathIsUint8Array = isUint8Array(path);
<del>
<del> // We can only perform meaningful checks on strings and Uint8Arrays.
<del> if (!pathIsString && !pathIsUint8Array) {
<del> return;
<del> }
<del>
<del> if (pathIsString && path.indexOf('\u0000') === -1) {
<del> return;
<del> } else if (pathIsUint8Array && path.indexOf(0) === -1) {
<del> return;
<del> }
<del>
<del> const err = new errors.Error(
<del> 'ERR_INVALID_ARG_VALUE', propName, path,
<del> 'must be a string or Uint8Array without null bytes');
<del>
<del> if (throwError) {
<del> Error.captureStackTrace(err, nullCheck);
<del> throw err;
<del> }
<del> return err;
<del>}
<del>
<del>function validatePath(path, propName) {
<del> let err;
<del>
<del> if (propName === undefined) {
<del> propName = 'path';
<del> }
<del>
<del> if (typeof path !== 'string' && !isUint8Array(path)) {
<del> err = new errors.TypeError('ERR_INVALID_ARG_TYPE', propName,
<del> ['string', 'Buffer', 'URL']);
<del> } else {
<del> err = nullCheck(path, propName, false);
<del> }
<del>
<del> if (err !== undefined) {
<del> Error.captureStackTrace(err, validatePath);
<del> throw err;
<del> }
<del>}
<del>
<del>function validateUint32(value, propName) {
<del> let err;
<del>
<del> if (!isUint32(value))
<del> err = new errors.TypeError('ERR_INVALID_ARG_TYPE', propName, 'integer');
<del>
<del> if (err !== undefined) {
<del> Error.captureStackTrace(err, validateUint32);
<del> throw err;
<del> }
<del>}
<del>
<ide> // Special case of `makeCallback()` that is specific to async `*stat()` calls as
<ide> // an optimization, since the data passed back to the callback needs to be
<ide> // transformed anyway.
<ide> function isFd(path) {
<ide> return (path >>> 0) === path;
<ide> }
<ide>
<del>// Constructor for file stats.
<del>function Stats(
<del> dev,
<del> mode,
<del> nlink,
<del> uid,
<del> gid,
<del> rdev,
<del> blksize,
<del> ino,
<del> size,
<del> blocks,
<del> atim_msec,
<del> mtim_msec,
<del> ctim_msec,
<del> birthtim_msec
<del>) {
<del> this.dev = dev;
<del> this.mode = mode;
<del> this.nlink = nlink;
<del> this.uid = uid;
<del> this.gid = gid;
<del> this.rdev = rdev;
<del> this.blksize = blksize;
<del> this.ino = ino;
<del> this.size = size;
<del> this.blocks = blocks;
<del> this.atimeMs = atim_msec;
<del> this.mtimeMs = mtim_msec;
<del> this.ctimeMs = ctim_msec;
<del> this.birthtimeMs = birthtim_msec;
<del> this.atime = new Date(atim_msec + 0.5);
<del> this.mtime = new Date(mtim_msec + 0.5);
<del> this.ctime = new Date(ctim_msec + 0.5);
<del> this.birthtime = new Date(birthtim_msec + 0.5);
<del>}
<ide> fs.Stats = Stats;
<ide>
<del>Stats.prototype._checkModeProperty = function(property) {
<del> return ((this.mode & S_IFMT) === property);
<del>};
<del>
<del>Stats.prototype.isDirectory = function() {
<del> return this._checkModeProperty(constants.S_IFDIR);
<del>};
<del>
<del>Stats.prototype.isFile = function() {
<del> return this._checkModeProperty(S_IFREG);
<del>};
<del>
<del>Stats.prototype.isBlockDevice = function() {
<del> return this._checkModeProperty(constants.S_IFBLK);
<del>};
<del>
<del>Stats.prototype.isCharacterDevice = function() {
<del> return this._checkModeProperty(constants.S_IFCHR);
<del>};
<del>
<del>Stats.prototype.isSymbolicLink = function() {
<del> return this._checkModeProperty(S_IFLNK);
<del>};
<del>
<del>Stats.prototype.isFIFO = function() {
<del> return this._checkModeProperty(S_IFIFO);
<del>};
<del>
<del>Stats.prototype.isSocket = function() {
<del> return this._checkModeProperty(S_IFSOCK);
<del>};
<del>
<del>const statValues = binding.statValues;
<del>
<del>function statsFromValues(stats = statValues) {
<del> return new Stats(stats[0], stats[1], stats[2], stats[3], stats[4], stats[5],
<del> stats[6] < 0 ? undefined : stats[6], stats[7], stats[8],
<del> stats[9] < 0 ? undefined : stats[9], stats[10], stats[11],
<del> stats[12], stats[13]);
<del>}
<del>
<ide> // Don't allow mode to accidentally be overwritten.
<ide> Object.defineProperties(fs, {
<ide> F_OK: { enumerable: true, value: constants.F_OK || 0 },
<ide> fs.closeSync = function(fd) {
<ide> handleErrorFromBinding(ctx);
<ide> };
<ide>
<del>function modeNum(m, def) {
<del> if (typeof m === 'number')
<del> return m;
<del> if (typeof m === 'string')
<del> return parseInt(m, 8);
<del> if (def)
<del> return modeNum(def);
<del> return undefined;
<del>}
<del>
<ide> fs.open = function(path, flags, mode, callback_) {
<ide> var callback = makeCallback(arguments[arguments.length - 1]);
<ide> mode = modeNum(mode, 0o666);
<ide> fs.readlinkSync = function(path, options) {
<ide> return result;
<ide> };
<ide>
<del>function preprocessSymlinkDestination(path, type, linkPath) {
<del> if (!isWindows) {
<del> // No preprocessing is needed on Unix.
<del> return path;
<del> } else if (type === 'junction') {
<del> // Junctions paths need to be absolute and \\?\-prefixed.
<del> // A relative target is relative to the link's parent directory.
<del> path = pathModule.resolve(linkPath, '..', path);
<del> return pathModule.toNamespacedPath(path);
<del> } else {
<del> // Windows symlinks don't tolerate forward slashes.
<del> return ('' + path).replace(/\//g, '\\');
<del> }
<del>}
<del>
<del>function stringToSymlinkType(type) {
<del> let flags = 0;
<del> if (typeof type === 'string') {
<del> switch (type) {
<del> case 'dir':
<del> flags |= constants.UV_FS_SYMLINK_DIR;
<del> break;
<del> case 'junction':
<del> flags |= constants.UV_FS_SYMLINK_JUNCTION;
<del> break;
<del> case 'file':
<del> break;
<del> default:
<del> const err = new errors.Error('ERR_FS_INVALID_SYMLINK_TYPE', type);
<del> Error.captureStackTrace(err, stringToSymlinkType);
<del> throw err;
<del> }
<del> }
<del> return flags;
<del>}
<del>
<ide> fs.symlink = function(target, path, type_, callback_) {
<ide> var type = (typeof type_ === 'string' ? type_ : null);
<ide> var callback = makeCallback(arguments[arguments.length - 1]);
<ide> fs.chownSync = function(path, uid, gid) {
<ide> return binding.chown(pathModule.toNamespacedPath(path), uid, gid);
<ide> };
<ide>
<del>// converts Date or number to a fractional UNIX timestamp
<del>function toUnixTimestamp(time, name = 'time') {
<del> // eslint-disable-next-line eqeqeq
<del> if (typeof time === 'string' && +time == time) {
<del> return +time;
<del> }
<del> if (Number.isFinite(time)) {
<del> if (time < 0) {
<del> return Date.now() / 1000;
<del> }
<del> return time;
<del> }
<del> if (util.isDate(time)) {
<del> // convert to 123.456 UNIX timestamp
<del> return time.getTime() / 1000;
<del> }
<del> throw new errors.TypeError('ERR_INVALID_ARG_TYPE',
<del> name,
<del> ['Date', 'Time in seconds'],
<del> time);
<del>}
<del>
<ide> // exported for unit tests, not for public consumption
<ide> fs._toUnixTimestamp = toUnixTimestamp;
<ide>
<ide><path>lib/internal/fs.js
<ide> 'use strict';
<ide>
<del>const { Buffer } = require('buffer');
<add>const { Buffer, kMaxLength } = require('buffer');
<ide> const { Writable } = require('stream');
<ide> const errors = require('internal/errors');
<add>const { isUint8Array } = require('internal/util/types');
<ide> const fs = require('fs');
<add>const pathModule = require('path');
<ide> const util = require('util');
<ide>
<ide> const {
<ide> const {
<ide> O_RDWR,
<ide> O_SYNC,
<ide> O_TRUNC,
<del> O_WRONLY
<add> O_WRONLY,
<add> S_IFBLK,
<add> S_IFCHR,
<add> S_IFDIR,
<add> S_IFIFO,
<add> S_IFLNK,
<add> S_IFMT,
<add> S_IFREG,
<add> S_IFSOCK,
<add> UV_FS_SYMLINK_DIR,
<add> UV_FS_SYMLINK_JUNCTION
<ide> } = process.binding('constants').fs;
<add>const { statValues } = process.binding('fs');
<add>
<add>const isWindows = process.platform === 'win32';
<ide>
<ide> function assertEncoding(encoding) {
<ide> if (encoding && !Buffer.isEncoding(encoding)) {
<ide> throw new errors.TypeError('ERR_INVALID_OPT_VALUE_ENCODING', encoding);
<ide> }
<ide> }
<ide>
<add>function copyObject(source) {
<add> var target = {};
<add> for (var key in source)
<add> target[key] = source[key];
<add> return target;
<add>}
<add>
<add>function getOptions(options, defaultOptions) {
<add> if (options === null || options === undefined ||
<add> typeof options === 'function') {
<add> return defaultOptions;
<add> }
<add>
<add> if (typeof options === 'string') {
<add> defaultOptions = util._extend({}, defaultOptions);
<add> defaultOptions.encoding = options;
<add> options = defaultOptions;
<add> } else if (typeof options !== 'object') {
<add> throw new errors.TypeError('ERR_INVALID_ARG_TYPE',
<add> 'options',
<add> ['string', 'Object'],
<add> options);
<add> }
<add>
<add> if (options.encoding !== 'buffer')
<add> assertEncoding(options.encoding);
<add> return options;
<add>}
<add>
<add>function isInt32(n) { return n === (n | 0); }
<add>function isUint32(n) { return n === (n >>> 0); }
<add>
<add>function modeNum(m, def) {
<add> if (typeof m === 'number')
<add> return m;
<add> if (typeof m === 'string')
<add> return parseInt(m, 8);
<add> if (def)
<add> return modeNum(def);
<add> return undefined;
<add>}
<add>
<add>// Check if the path contains null types if it is a string nor Uint8Array,
<add>// otherwise return silently.
<add>function nullCheck(path, propName, throwError = true) {
<add> const pathIsString = typeof path === 'string';
<add> const pathIsUint8Array = isUint8Array(path);
<add>
<add> // We can only perform meaningful checks on strings and Uint8Arrays.
<add> if (!pathIsString && !pathIsUint8Array) {
<add> return;
<add> }
<add>
<add> if (pathIsString && path.indexOf('\u0000') === -1) {
<add> return;
<add> } else if (pathIsUint8Array && path.indexOf(0) === -1) {
<add> return;
<add> }
<add>
<add> const err = new errors.Error(
<add> 'ERR_INVALID_ARG_VALUE', propName, path,
<add> 'must be a string or Uint8Array without null bytes');
<add>
<add> if (throwError) {
<add> Error.captureStackTrace(err, nullCheck);
<add> throw err;
<add> }
<add> return err;
<add>}
<add>
<add>function preprocessSymlinkDestination(path, type, linkPath) {
<add> if (!isWindows) {
<add> // No preprocessing is needed on Unix.
<add> return path;
<add> } else if (type === 'junction') {
<add> // Junctions paths need to be absolute and \\?\-prefixed.
<add> // A relative target is relative to the link's parent directory.
<add> path = pathModule.resolve(linkPath, '..', path);
<add> return pathModule.toNamespacedPath(path);
<add> } else {
<add> // Windows symlinks don't tolerate forward slashes.
<add> return ('' + path).replace(/\//g, '\\');
<add> }
<add>}
<add>
<add>// Constructor for file stats.
<add>function Stats(
<add> dev,
<add> mode,
<add> nlink,
<add> uid,
<add> gid,
<add> rdev,
<add> blksize,
<add> ino,
<add> size,
<add> blocks,
<add> atim_msec,
<add> mtim_msec,
<add> ctim_msec,
<add> birthtim_msec
<add>) {
<add> this.dev = dev;
<add> this.mode = mode;
<add> this.nlink = nlink;
<add> this.uid = uid;
<add> this.gid = gid;
<add> this.rdev = rdev;
<add> this.blksize = blksize;
<add> this.ino = ino;
<add> this.size = size;
<add> this.blocks = blocks;
<add> this.atimeMs = atim_msec;
<add> this.mtimeMs = mtim_msec;
<add> this.ctimeMs = ctim_msec;
<add> this.birthtimeMs = birthtim_msec;
<add> this.atime = new Date(atim_msec + 0.5);
<add> this.mtime = new Date(mtim_msec + 0.5);
<add> this.ctime = new Date(ctim_msec + 0.5);
<add> this.birthtime = new Date(birthtim_msec + 0.5);
<add>}
<add>
<add>Stats.prototype._checkModeProperty = function(property) {
<add> return ((this.mode & S_IFMT) === property);
<add>};
<add>
<add>Stats.prototype.isDirectory = function() {
<add> return this._checkModeProperty(S_IFDIR);
<add>};
<add>
<add>Stats.prototype.isFile = function() {
<add> return this._checkModeProperty(S_IFREG);
<add>};
<add>
<add>Stats.prototype.isBlockDevice = function() {
<add> return this._checkModeProperty(S_IFBLK);
<add>};
<add>
<add>Stats.prototype.isCharacterDevice = function() {
<add> return this._checkModeProperty(S_IFCHR);
<add>};
<add>
<add>Stats.prototype.isSymbolicLink = function() {
<add> return this._checkModeProperty(S_IFLNK);
<add>};
<add>
<add>Stats.prototype.isFIFO = function() {
<add> return this._checkModeProperty(S_IFIFO);
<add>};
<add>
<add>Stats.prototype.isSocket = function() {
<add> return this._checkModeProperty(S_IFSOCK);
<add>};
<add>
<add>function statsFromValues(stats = statValues) {
<add> return new Stats(stats[0], stats[1], stats[2], stats[3], stats[4], stats[5],
<add> stats[6] < 0 ? undefined : stats[6], stats[7], stats[8],
<add> stats[9] < 0 ? undefined : stats[9], stats[10], stats[11],
<add> stats[12], stats[13]);
<add>}
<add>
<ide> function stringToFlags(flags) {
<ide> if (typeof flags === 'number') {
<ide> return flags;
<ide> function stringToFlags(flags) {
<ide> throw new errors.TypeError('ERR_INVALID_OPT_VALUE', 'flags', flags);
<ide> }
<ide>
<add>function stringToSymlinkType(type) {
<add> let flags = 0;
<add> if (typeof type === 'string') {
<add> switch (type) {
<add> case 'dir':
<add> flags |= UV_FS_SYMLINK_DIR;
<add> break;
<add> case 'junction':
<add> flags |= UV_FS_SYMLINK_JUNCTION;
<add> break;
<add> case 'file':
<add> break;
<add> default:
<add> const err = new errors.Error('ERR_FS_INVALID_SYMLINK_TYPE', type);
<add> Error.captureStackTrace(err, stringToSymlinkType);
<add> throw err;
<add> }
<add> }
<add> return flags;
<add>}
<add>
<ide> // Temporary hack for process.stdout and process.stderr when piped to files.
<ide> function SyncWriteStream(fd, options) {
<ide> Writable.call(this);
<ide> SyncWriteStream.prototype.destroy = function() {
<ide> return true;
<ide> };
<ide>
<add>// converts Date or number to a fractional UNIX timestamp
<add>function toUnixTimestamp(time, name = 'time') {
<add> // eslint-disable-next-line eqeqeq
<add> if (typeof time === 'string' && +time == time) {
<add> return +time;
<add> }
<add> if (Number.isFinite(time)) {
<add> if (time < 0) {
<add> return Date.now() / 1000;
<add> }
<add> return time;
<add> }
<add> if (util.isDate(time)) {
<add> // convert to 123.456 UNIX timestamp
<add> return time.getTime() / 1000;
<add> }
<add> throw new errors.TypeError('ERR_INVALID_ARG_TYPE',
<add> name,
<add> ['Date', 'Time in seconds'],
<add> time);
<add>}
<add>
<add>function validateBuffer(buffer) {
<add> if (!isUint8Array(buffer)) {
<add> const err = new errors.TypeError('ERR_INVALID_ARG_TYPE', 'buffer',
<add> ['Buffer', 'Uint8Array']);
<add> Error.captureStackTrace(err, validateBuffer);
<add> throw err;
<add> }
<add>}
<add>
<add>function validateLen(len) {
<add> let err;
<add>
<add> if (!isInt32(len))
<add> err = new errors.TypeError('ERR_INVALID_ARG_TYPE', 'len', 'integer');
<add>
<add> if (err !== undefined) {
<add> Error.captureStackTrace(err, validateLen);
<add> throw err;
<add> }
<add>}
<add>
<add>function validateOffsetLengthRead(offset, length, bufferLength) {
<add> let err;
<add>
<add> if (offset < 0 || offset >= bufferLength) {
<add> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'offset');
<add> } else if (length < 0 || offset + length > bufferLength) {
<add> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'length');
<add> }
<add>
<add> if (err !== undefined) {
<add> Error.captureStackTrace(err, validateOffsetLengthRead);
<add> throw err;
<add> }
<add>}
<add>
<add>function validateOffsetLengthWrite(offset, length, byteLength) {
<add> let err;
<add>
<add> if (offset > byteLength) {
<add> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'offset');
<add> } else if (offset + length > byteLength || offset + length > kMaxLength) {
<add> err = new errors.RangeError('ERR_OUT_OF_RANGE', 'length');
<add> }
<add>
<add> if (err !== undefined) {
<add> Error.captureStackTrace(err, validateOffsetLengthWrite);
<add> throw err;
<add> }
<add>}
<add>
<add>function validatePath(path, propName) {
<add> let err;
<add>
<add> if (propName === undefined) {
<add> propName = 'path';
<add> }
<add>
<add> if (typeof path !== 'string' && !isUint8Array(path)) {
<add> err = new errors.TypeError('ERR_INVALID_ARG_TYPE', propName,
<add> ['string', 'Buffer', 'URL']);
<add> } else {
<add> err = nullCheck(path, propName, false);
<add> }
<add>
<add> if (err !== undefined) {
<add> Error.captureStackTrace(err, validatePath);
<add> throw err;
<add> }
<add>}
<add>
<add>function validateUint32(value, propName) {
<add> let err;
<add>
<add> if (!isUint32(value))
<add> err = new errors.TypeError('ERR_INVALID_ARG_TYPE', propName, 'integer');
<add>
<add> if (err !== undefined) {
<add> Error.captureStackTrace(err, validateUint32);
<add> throw err;
<add> }
<add>}
<add>
<ide> module.exports = {
<ide> assertEncoding,
<add> copyObject,
<add> getOptions,
<add> isInt32,
<add> isUint32,
<add> modeNum,
<add> nullCheck,
<add> preprocessSymlinkDestination,
<add> realpathCacheKey: Symbol('realpathCacheKey'),
<add> statsFromValues,
<ide> stringToFlags,
<add> stringToSymlinkType,
<add> Stats,
<ide> SyncWriteStream,
<del> realpathCacheKey: Symbol('realpathCacheKey')
<add> toUnixTimestamp,
<add> validateBuffer,
<add> validateLen,
<add> validateOffsetLengthRead,
<add> validateOffsetLengthWrite,
<add> validatePath,
<add> validateUint32
<ide> }; | 2 |
Python | Python | fix has_app_context and has_request_context | 095651be9eec58ddb0c2eb6158318b1c703c67c5 | <ide><path>src/flask/ctx.py
<ide> def __init__(self, username, remote_addr=None):
<ide>
<ide> .. versionadded:: 0.7
<ide> """
<del> return _cv_app.get(None) is not None
<add> return _cv_request.get(None) is not None
<ide>
<ide>
<ide> def has_app_context() -> bool:
<ide> def has_app_context() -> bool:
<ide>
<ide> .. versionadded:: 0.9
<ide> """
<del> return _cv_request.get(None) is not None
<add> return _cv_app.get(None) is not None
<ide>
<ide>
<ide> class AppContext: | 1 |
Javascript | Javascript | resolve more conflicts | 02e0047e92de0f1575fb92c091806c4eb4b6ef26 | <ide><path>src/css.js
<ide> if ( !jQuery.support.opacity ) {
<ide>
<ide> style.filter = ralpha.test(filter) ?
<ide> filter.replace(ralpha, opacity) :
<del><<<<<<< HEAD
<del> style.filter + " " + opacity;
<del>=======
<ide> filter + " " + opacity;
<del>>>>>>>> 312df0441b16981dd697d74fcbc1e1f212b47b7e
<ide> }
<ide> };
<ide> } | 1 |
Python | Python | specify loader for yaml loading | 4a5770827edf1c3974274ba3e4169d0e5ba7478a | <ide><path>official/modeling/hyperparams/base_config.py
<ide> def replace(self, **kwargs):
<ide> def from_yaml(cls, file_path: str):
<ide> # Note: This only works if the Config has all default values.
<ide> with tf.io.gfile.GFile(file_path, 'r') as f:
<del> loaded = yaml.load(f)
<add> loaded = yaml.load(f, Loader=yaml.FullLoader)
<ide> config = cls()
<ide> config.override(loaded)
<ide> return config
<ide><path>official/modeling/hyperparams/params_dict.py
<ide> def _get_kvs(tokens, params_dict):
<ide> def read_yaml_to_params_dict(file_path):
<ide> """Reads a YAML file to a ParamsDict."""
<ide> with tf.io.gfile.GFile(file_path, 'r') as f:
<del> params_dict = yaml.load(f)
<add> params_dict = yaml.load(f, Loader=yaml.FullLoader)
<ide> return ParamsDict(params_dict)
<ide>
<ide>
<ide> def override_params_dict(params, dict_or_string_or_yaml_file, is_strict):
<ide> nested_csv_str_to_json_str(dict_or_string_or_yaml_file))
<ide> except ValueError:
<ide> pass
<del> params_dict = yaml.load(dict_or_string_or_yaml_file)
<add> params_dict = yaml.load(dict_or_string_or_yaml_file, Loader=yaml.FullLoader)
<ide> if isinstance(params_dict, dict):
<ide> params.override(params_dict, is_strict)
<ide> else:
<ide> with tf.io.gfile.GFile(dict_or_string_or_yaml_file) as f:
<del> params.override(yaml.load(f), is_strict)
<add> params.override(yaml.load(f, Loader=yaml.FullLoader), is_strict)
<ide> else:
<ide> raise ValueError('Unknown input type to parse.')
<ide> return params | 2 |
Ruby | Ruby | fix a test failure on linux | 434e8d8e2fc4dbd51e3193e8560ecad65f916e7e | <ide><path>Library/Homebrew/test/cmd/install_spec.rb
<ide> def install
<ide> it "succeeds when a non-fatal requirement isn't satisfied" do
<ide> setup_test_formula "testball1", <<~EOS
<ide> class NonFatalRequirement < Requirement
<del> satisfy { false }
<add> satisfy(build_env: false) { false }
<ide> end
<ide>
<ide> depends_on NonFatalRequirement | 1 |
Javascript | Javascript | remove a newline | 7ed0340488bc8d0d1791cebd564594764526f66f | <ide><path>src/ng/animate.js
<ide> var $AnimateProvider = ['$provide', /** @this */ function($provide) {
<ide> var reservedRegex = new RegExp('(\\s+|\\/)' + NG_ANIMATE_CLASSNAME + '(\\s+|\\/)');
<ide> if (reservedRegex.test(this.$$classNameFilter.toString())) {
<ide> throw $animateMinErr('nongcls','$animateProvider.classNameFilter(regex) prohibits accepting a regex value which matches/contains the "{0}" CSS class.', NG_ANIMATE_CLASSNAME);
<del>
<ide> }
<ide> }
<ide> } | 1 |
Text | Text | add travis badge | fe9e72ac163d7951d854dfba211269158664d65c | <ide><path>README.md
<ide> <img width="112" alt="screen shot 2016-10-25 at 2 37 27 pm" src="https://cloud.githubusercontent.com/assets/13041/19686250/971bf7f8-9ac0-11e6-975c-188defd82df1.png">
<ide>
<add>[](https://travis-ci.org/zeit/next.js)
<ide> [](https://zeit.chat)
<ide>
<ide> Next.js is a minimalistic framework for server-rendered React applications. | 1 |
Mixed | Python | add graham scan algorithm | a796ccf1ce2594bffdb938156987a0cbb16ee52e | <ide><path>DIRECTORY.md
<ide> * [Binary Count Trailing Zeros](https://github.com/TheAlgorithms/Python/blob/master/bit_manipulation/binary_count_trailing_zeros.py)
<ide> * [Binary Or Operator](https://github.com/TheAlgorithms/Python/blob/master/bit_manipulation/binary_or_operator.py)
<ide> * [Binary Xor Operator](https://github.com/TheAlgorithms/Python/blob/master/bit_manipulation/binary_xor_operator.py)
<add> * [Count Number Of One Bits](https://github.com/TheAlgorithms/Python/blob/master/bit_manipulation/count_number_of_one_bits.py)
<add> * [Reverse Bits](https://github.com/TheAlgorithms/Python/blob/master/bit_manipulation/reverse_bits.py)
<ide> * [Single Bit Manipulation Operations](https://github.com/TheAlgorithms/Python/blob/master/bit_manipulation/single_bit_manipulation_operations.py)
<ide>
<ide> ## Blockchain
<ide> * [Fenwick Tree](https://github.com/TheAlgorithms/Python/blob/master/data_structures/binary_tree/fenwick_tree.py)
<ide> * [Lazy Segment Tree](https://github.com/TheAlgorithms/Python/blob/master/data_structures/binary_tree/lazy_segment_tree.py)
<ide> * [Lowest Common Ancestor](https://github.com/TheAlgorithms/Python/blob/master/data_structures/binary_tree/lowest_common_ancestor.py)
<add> * [Merge Two Binary Trees](https://github.com/TheAlgorithms/Python/blob/master/data_structures/binary_tree/merge_two_binary_trees.py)
<ide> * [Non Recursive Segment Tree](https://github.com/TheAlgorithms/Python/blob/master/data_structures/binary_tree/non_recursive_segment_tree.py)
<ide> * [Number Of Possible Binary Trees](https://github.com/TheAlgorithms/Python/blob/master/data_structures/binary_tree/number_of_possible_binary_trees.py)
<ide> * [Red Black Tree](https://github.com/TheAlgorithms/Python/blob/master/data_structures/binary_tree/red_black_tree.py)
<ide>
<ide> ## Graphics
<ide> * [Bezier Curve](https://github.com/TheAlgorithms/Python/blob/master/graphics/bezier_curve.py)
<add> * [Koch Snowflake](https://github.com/TheAlgorithms/Python/blob/master/graphics/koch_snowflake.py)
<ide> * [Vector3 For 2D Rendering](https://github.com/TheAlgorithms/Python/blob/master/graphics/vector3_for_2d_rendering.py)
<ide>
<ide> ## Graphs
<ide> * [Frequency Finder](https://github.com/TheAlgorithms/Python/blob/master/other/frequency_finder.py)
<ide> * [Game Of Life](https://github.com/TheAlgorithms/Python/blob/master/other/game_of_life.py)
<ide> * [Gauss Easter](https://github.com/TheAlgorithms/Python/blob/master/other/gauss_easter.py)
<add> * [Graham Scan](https://github.com/TheAlgorithms/Python/blob/master/other/graham_scan.py)
<ide> * [Greedy](https://github.com/TheAlgorithms/Python/blob/master/other/greedy.py)
<ide> * [Integeration By Simpson Approx](https://github.com/TheAlgorithms/Python/blob/master/other/integeration_by_simpson_approx.py)
<ide> * [Largest Subarray Sum](https://github.com/TheAlgorithms/Python/blob/master/other/largest_subarray_sum.py)
<ide> * [Merge Insertion Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/merge_insertion_sort.py)
<ide> * [Merge Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/merge_sort.py)
<ide> * [Natural Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/natural_sort.py)
<add> * [Odd Even Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/odd_even_sort.py)
<ide> * [Odd Even Transposition Parallel](https://github.com/TheAlgorithms/Python/blob/master/sorts/odd_even_transposition_parallel.py)
<ide> * [Odd Even Transposition Single Threaded](https://github.com/TheAlgorithms/Python/blob/master/sorts/odd_even_transposition_single_threaded.py)
<ide> * [Pancake Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/pancake_sort.py)
<ide> * [Recursive Quick Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/recursive_quick_sort.py)
<ide> * [Selection Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/selection_sort.py)
<ide> * [Shell Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/shell_sort.py)
<add> * [Slowsort](https://github.com/TheAlgorithms/Python/blob/master/sorts/slowsort.py)
<ide> * [Stooge Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/stooge_sort.py)
<ide> * [Strand Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/strand_sort.py)
<ide> * [Tim Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/tim_sort.py)
<ide><path>other/graham_scan.py
<add>"""
<add>This is a pure Python implementation of the merge-insertion sort algorithm
<add>Source: https://en.wikipedia.org/wiki/Graham_scan
<add>
<add>For doctests run following command:
<add>python3 -m doctest -v graham_scan.py
<add>"""
<add>
<add>from __future__ import annotations
<add>
<add>from collections import deque
<add>from enum import Enum
<add>from math import atan2, degrees
<add>from sys import maxsize
<add>
<add>
<add>def graham_scan(points: list[list[int, int]]) -> list[list[int, int]]:
<add> """Pure implementation of graham scan algorithm in Python
<add>
<add> :param points: The unique points on coordinates.
<add> :return: The points on convex hell.
<add>
<add> Examples:
<add> >>> graham_scan([(9, 6), (3, 1), (0, 0), (5, 5), (5, 2), (7, 0), (3, 3), (1, 4)])
<add> [(0, 0), (7, 0), (9, 6), (5, 5), (1, 4)]
<add>
<add> >>> graham_scan([(0, 0), (1, 0), (1, 1), (0, 1)])
<add> [(0, 0), (1, 0), (1, 1), (0, 1)]
<add>
<add> >>> graham_scan([(0, 0), (1, 1), (2, 2), (3, 3), (-1, 2)])
<add> [(0, 0), (1, 1), (2, 2), (3, 3), (-1, 2)]
<add>
<add> >>> graham_scan([(-100, 20), (99, 3), (1, 10000001), (5133186, -25), (-66, -4)])
<add> [(5133186, -25), (1, 10000001), (-100, 20), (-66, -4)]
<add> """
<add>
<add> if len(points) <= 2:
<add> # There is no convex hull
<add> raise ValueError("graham_scan: argument must contain more than 3 points.")
<add> if len(points) == 3:
<add> return points
<add> # find the lowest and the most left point
<add> minidx = 0
<add> miny, minx = maxsize, maxsize
<add> for i, point in enumerate(points):
<add> x = point[0]
<add> y = point[1]
<add> if y < miny:
<add> miny = y
<add> minx = x
<add> minidx = i
<add> if y == miny:
<add> if x < minx:
<add> minx = x
<add> minidx = i
<add>
<add> # remove the lowest and the most left point from points for preparing for sort
<add> points.pop(minidx)
<add>
<add> def angle_comparer(point: list[int, int], minx: int, miny: int) -> float:
<add> """Return the angle toward to point from (minx, miny)
<add>
<add> :param point: The target point
<add> minx: The starting point's x
<add> miny: The starting point's y
<add> :return: the angle
<add>
<add> Examples:
<add> >>> angle_comparer([1,1], 0, 0)
<add> 45.0
<add>
<add> >>> angle_comparer([100,1], 10, 10)
<add> -5.710593137499642
<add>
<add> >>> angle_comparer([5,5], 2, 3)
<add> 33.690067525979785
<add> """
<add> # sort the points accorgind to the angle from the lowest and the most left point
<add> x = point[0]
<add> y = point[1]
<add> angle = degrees(atan2(y - miny, x - minx))
<add> return angle
<add>
<add> sorted_points = sorted(points, key=lambda point: angle_comparer(point, minx, miny))
<add> # This insert actually costs complexity,
<add> # and you should insteadly add (minx, miny) into stack later.
<add> # I'm using insert just for easy understanding.
<add> sorted_points.insert(0, (minx, miny))
<add>
<add> # traversal from the lowest and the most left point in anti-clockwise direction
<add> # if direction gets right, the previous point is not the convex hull.
<add> class Direction(Enum):
<add> left = 1
<add> straight = 2
<add> right = 3
<add>
<add> def check_direction(
<add> starting: list[int, int], via: list[int, int], target: list[int, int]
<add> ) -> Direction:
<add> """Return the direction toward to the line from via to target from starting
<add>
<add> :param starting: The starting point
<add> via: The via point
<add> target: The target point
<add> :return: the Direction
<add>
<add> Examples:
<add> >>> check_direction([1,1], [2,2], [3,3])
<add> Direction.straight
<add>
<add> >>> check_direction([60,1], [-50,199], [30,2])
<add> Direction.left
<add>
<add> >>> check_direction([0,0], [5,5], [10,0])
<add> Direction.right
<add> """
<add> x0, y0 = starting
<add> x1, y1 = via
<add> x2, y2 = target
<add> via_angle = degrees(atan2(y1 - y0, x1 - x0))
<add> if via_angle < 0:
<add> via_angle += 360
<add> target_angle = degrees(atan2(y2 - y0, x2 - x0))
<add> if target_angle < 0:
<add> target_angle += 360
<add> # t-
<add> # \ \
<add> # \ v
<add> # \|
<add> # s
<add> # via_angle is always lower than target_angle, if direction is left.
<add> # If they are same, it means they are on a same line of convex hull.
<add> if target_angle > via_angle:
<add> return Direction.left
<add> if target_angle == via_angle:
<add> return Direction.straight
<add> if target_angle < via_angle:
<add> return Direction.right
<add>
<add> stack = deque()
<add> stack.append(sorted_points[0])
<add> stack.append(sorted_points[1])
<add> stack.append(sorted_points[2])
<add> # In any ways, the first 3 points line are towards left.
<add> # Because we sort them the angle from minx, miny.
<add> current_direction = Direction.left
<add>
<add> for i in range(3, len(sorted_points)):
<add> while True:
<add> starting = stack[-2]
<add> via = stack[-1]
<add> target = sorted_points[i]
<add> next_direction = check_direction(starting, via, target)
<add>
<add> if next_direction == Direction.left:
<add> current_direction = Direction.left
<add> break
<add> if next_direction == Direction.straight:
<add> if current_direction == Direction.left:
<add> # We keep current_direction as left.
<add> # Because if the straight line keeps as straight,
<add> # we want to know if this straight line is towards left.
<add> break
<add> elif current_direction == Direction.right:
<add> # If the straight line is towards right,
<add> # every previous points on those straigh line is not convex hull.
<add> stack.pop()
<add> if next_direction == Direction.right:
<add> stack.pop()
<add> stack.append(sorted_points[i])
<add> return list(stack) | 2 |
Text | Text | add missing deprecation number | 2cfdf28413fd9a7bfab65cb49cff6e50ab0c21ec | <ide><path>doc/api/deprecations.md
<ide> Type: Documentation-only
<ide> The [`crypto.Certificate()` constructor][] is deprecated. Use
<ide> [static methods of `crypto.Certificate()`][] instead.
<ide>
<del>### DEP0XXX: `fs.rmdir(path, { recursive: true })`
<add>### DEP0147: `fs.rmdir(path, { recursive: true })`
<ide> <!-- YAML
<ide> changes:
<ide> - version: REPLACEME | 1 |
Text | Text | change belongs_to example to has_one [ci skip] | c2e51b839491f8d440f7c75c22e9050a058f4467 | <ide><path>guides/source/active_record_validations.md
<ide> to map the association. This way, it is not only checked that the foreign key
<ide> is not empty but also that the referenced object exists.
<ide>
<ide> ```ruby
<del>class LineItem < ApplicationRecord
<del> belongs_to :order
<del> validates :order, presence: true
<add>class Supplier < ApplicationRecord
<add> has_one :account
<add> validates :account, presence: true
<ide> end
<ide> ```
<ide> | 1 |
Text | Text | adjust readme to match ie support | 0b4e70d2ba9ee8bd951adf9fe26593025e299298 | <ide><path>README.md
<ide> Promise based HTTP client for the browser and node.js
<ide>
<ide>  |  |  |  |  |  |
<ide> --- | --- | --- | --- | --- | --- |
<del>Latest ✔ | Latest ✔ | Latest ✔ | Latest ✔ | Latest ✔ | 8+ ✔ |
<add>Latest ✔ | Latest ✔ | Latest ✔ | Latest ✔ | Latest ✔ | 11 ✔ |
<ide>
<ide> [](https://saucelabs.com/u/axios)
<ide> | 1 |
Go | Go | add volume events | 9d12d093009d3c4bf3bd4ebad3f8327c36d2d584 | <ide><path>container/container_unix.go
<ide> func detachMounted(path string) error {
<ide> }
<ide>
<ide> // UnmountVolumes unmounts all volumes
<del>func (container *Container) UnmountVolumes(forceSyscall bool) error {
<add>func (container *Container) UnmountVolumes(forceSyscall bool, volumeEventLog func(name, action string, attributes map[string]string)) error {
<ide> var (
<ide> volumeMounts []volume.MountPoint
<ide> err error
<ide> func (container *Container) UnmountVolumes(forceSyscall bool) error {
<ide> if err := volumeMount.Volume.Unmount(); err != nil {
<ide> return err
<ide> }
<add>
<add> attributes := map[string]string{
<add> "driver": volumeMount.Volume.DriverName(),
<add> "container": container.ID,
<add> }
<add> volumeEventLog(volumeMount.Volume.Name(), "unmount", attributes)
<ide> }
<ide> }
<ide>
<ide><path>container/container_windows.go
<ide> func (container *Container) IpcMounts() []execdriver.Mount {
<ide> }
<ide>
<ide> // UnmountVolumes explicitly unmounts volumes from the container.
<del>func (container *Container) UnmountVolumes(forceSyscall bool) error {
<add>func (container *Container) UnmountVolumes(forceSyscall bool, volumeEventLog func(name, action string, attributes map[string]string)) error {
<ide> return nil
<ide> }
<ide>
<ide><path>daemon/archive.go
<ide> func (daemon *Daemon) containerStatPath(container *container.Container, path str
<ide> defer daemon.Unmount(container)
<ide>
<ide> err = daemon.mountVolumes(container)
<del> defer container.UnmountVolumes(true)
<add> defer container.UnmountVolumes(true, daemon.LogVolumeEvent)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> func (daemon *Daemon) containerArchivePath(container *container.Container, path
<ide> defer func() {
<ide> if err != nil {
<ide> // unmount any volumes
<del> container.UnmountVolumes(true)
<add> container.UnmountVolumes(true, daemon.LogVolumeEvent)
<ide> // unmount the container's rootfs
<ide> daemon.Unmount(container)
<ide> }
<ide> func (daemon *Daemon) containerArchivePath(container *container.Container, path
<ide>
<ide> content = ioutils.NewReadCloserWrapper(data, func() error {
<ide> err := data.Close()
<del> container.UnmountVolumes(true)
<add> container.UnmountVolumes(true, daemon.LogVolumeEvent)
<ide> daemon.Unmount(container)
<ide> container.Unlock()
<ide> return err
<ide> func (daemon *Daemon) containerExtractToDir(container *container.Container, path
<ide> defer daemon.Unmount(container)
<ide>
<ide> err = daemon.mountVolumes(container)
<del> defer container.UnmountVolumes(true)
<add> defer container.UnmountVolumes(true, daemon.LogVolumeEvent)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> func (daemon *Daemon) containerCopy(container *container.Container, resource str
<ide> defer func() {
<ide> if err != nil {
<ide> // unmount any volumes
<del> container.UnmountVolumes(true)
<add> container.UnmountVolumes(true, daemon.LogVolumeEvent)
<ide> // unmount the container's rootfs
<ide> daemon.Unmount(container)
<ide> }
<ide> func (daemon *Daemon) containerCopy(container *container.Container, resource str
<ide>
<ide> reader := ioutils.NewReadCloserWrapper(archive, func() error {
<ide> err := archive.Close()
<del> container.UnmountVolumes(true)
<add> container.UnmountVolumes(true, daemon.LogVolumeEvent)
<ide> daemon.Unmount(container)
<ide> container.Unlock()
<ide> return err
<ide><path>daemon/container_operations_unix.go
<ide> func (daemon *Daemon) DisconnectFromNetwork(container *container.Container, n li
<ide> if err := container.ToDiskLocking(); err != nil {
<ide> return fmt.Errorf("Error saving container to disk: %v", err)
<ide> }
<add>
<add> attributes := map[string]string{
<add> "container": container.ID,
<add> }
<add> daemon.LogNetworkEventWithAttributes(n, "disconnect", attributes)
<ide> return nil
<ide> }
<ide>
<ide> func (daemon *Daemon) releaseNetwork(container *container.Container) {
<ide> }
<ide>
<ide> sid := container.NetworkSettings.SandboxID
<del> networks := container.NetworkSettings.Networks
<del> for n := range networks {
<del> networks[n] = &networktypes.EndpointSettings{}
<add> settings := container.NetworkSettings.Networks
<add> for n := range settings {
<add> settings[n] = &networktypes.EndpointSettings{}
<ide> }
<ide>
<ide> container.NetworkSettings = &network.Settings{Networks: networks}
<ide>
<del> if sid == "" || len(networks) == 0 {
<add> if sid == "" || len(settings) == 0 {
<ide> return
<ide> }
<ide>
<ide> func (daemon *Daemon) releaseNetwork(container *container.Container) {
<ide> if err := sb.Delete(); err != nil {
<ide> logrus.Errorf("Error deleting sandbox id %s for container %s: %v", sid, container.ID, err)
<ide> }
<add>
<add> attributes := map[string]string{
<add> "container": container.ID,
<add> }
<add> for nwID := range settings {
<add> daemon.logNetworkEventWithID(nwID, "disconnect", attributes)
<add> }
<ide> }
<ide>
<ide> func (daemon *Daemon) setupIpcDirs(c *container.Container) error {
<ide><path>daemon/create.go
<ide> func (daemon *Daemon) VolumeCreate(name, driverName string, opts map[string]stri
<ide> if (driverName != "" && v.DriverName() != driverName) || (driverName == "" && v.DriverName() != volume.DefaultDriverName) {
<ide> return nil, derr.ErrorVolumeNameTaken.WithArgs(name, v.DriverName())
<ide> }
<add>
<add> if driverName == "" {
<add> driverName = volume.DefaultDriverName
<add> }
<add> daemon.LogVolumeEvent(name, "create", map[string]string{"driver": driverName})
<ide> return volumeToAPIType(v), nil
<ide> }
<ide><path>daemon/delete.go
<ide> func (daemon *Daemon) VolumeRm(name string) error {
<ide> }
<ide> return derr.ErrorCodeRmVolume.WithArgs(name, err)
<ide> }
<add> daemon.LogVolumeEvent(v.Name(), "destroy", map[string]string{"driver": v.DriverName()})
<ide> return nil
<ide> }
<ide><path>daemon/events.go
<ide> func (daemon *Daemon) LogNetworkEvent(nw libnetwork.Network, action string) {
<ide> func (daemon *Daemon) LogNetworkEventWithAttributes(nw libnetwork.Network, action string, attributes map[string]string) {
<ide> attributes["name"] = nw.Name()
<ide> attributes["type"] = nw.Type()
<add> daemon.logNetworkEventWithID(nw.ID(), action, attributes)
<add>}
<ide>
<add>func (daemon *Daemon) logNetworkEventWithID(id, action string, attributes map[string]string) {
<ide> actor := events.Actor{
<del> ID: nw.ID(),
<add> ID: id,
<ide> Attributes: attributes,
<ide> }
<ide> daemon.EventsService.Log(action, events.NetworkEventType, actor)
<ide><path>daemon/start.go
<ide> func (daemon *Daemon) Cleanup(container *container.Container) {
<ide> daemon.unregisterExecCommand(container, eConfig)
<ide> }
<ide>
<del> if err := container.UnmountVolumes(false); err != nil {
<add> if err := container.UnmountVolumes(false, daemon.LogVolumeEvent); err != nil {
<ide> logrus.Warnf("%s cleanup: Failed to umount volumes: %v", container.ID, err)
<ide> }
<ide> }
<ide><path>daemon/volumes_unix.go
<ide> package daemon
<ide> import (
<ide> "os"
<ide> "sort"
<add> "strconv"
<ide>
<ide> "github.com/docker/docker/container"
<ide> "github.com/docker/docker/daemon/execdriver"
<ide> func (daemon *Daemon) setupMounts(container *container.Container) ([]execdriver.
<ide> Writable: m.RW,
<ide> Propagation: m.Propagation,
<ide> }
<add> if m.Volume != nil {
<add> attributes := map[string]string{
<add> "driver": m.Volume.DriverName(),
<add> "container": container.ID,
<add> "destination": m.Destination,
<add> "read/write": strconv.FormatBool(m.RW),
<add> "propagation": m.Propagation,
<add> }
<add> daemon.LogVolumeEvent(m.Volume.Name(), "mount", attributes)
<add> }
<ide> mounts = append(mounts, mnt)
<ide> }
<ide> }
<ide><path>integration-cli/docker_cli_events_test.go
<ide> func (s *DockerSuite) TestEventsFilterContainer(c *check.C) {
<ide> func (s *DockerSuite) TestEventsStreaming(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide>
<del> eventCreate := make(chan struct{})
<del> eventStart := make(chan struct{})
<del> eventDie := make(chan struct{})
<del> eventDestroy := make(chan struct{})
<del>
<ide> observer, err := newEventObserver(c)
<ide> c.Assert(err, checker.IsNil)
<ide> err = observer.Start()
<ide> func (s *DockerSuite) TestEventsStreaming(c *check.C) {
<ide>
<ide> out, _ := dockerCmd(c, "run", "-d", "busybox:latest", "true")
<ide> containerID := strings.TrimSpace(out)
<del> matchCreate := regexp.MustCompile(containerID + `: \(from busybox:latest\) create\z`)
<del> matchStart := regexp.MustCompile(containerID + `: \(from busybox:latest\) start\z`)
<del> matchDie := regexp.MustCompile(containerID + `: \(from busybox:latest\) die\z`)
<del> matchDestroy := regexp.MustCompile(containerID + `: \(from busybox:latest\) destroy\z`)
<del>
<del> matcher := func(text string) {
<del> switch {
<del> case matchCreate.MatchString(text):
<del> close(eventCreate)
<del> case matchStart.MatchString(text):
<del> close(eventStart)
<del> case matchDie.MatchString(text):
<del> close(eventDie)
<del> case matchDestroy.MatchString(text):
<del> close(eventDestroy)
<del> }
<del> }
<del> go observer.Match(matcher)
<ide>
<del> select {
<del> case <-time.After(5 * time.Second):
<del> c.Fatal(observer.TimeoutError(containerID, "create"))
<del> case <-testActions["create"]:
<del> // ignore, done
<add> testActions := map[string]chan bool{
<add> "create": make(chan bool),
<add> "start": make(chan bool),
<add> "die": make(chan bool),
<add> "destroy": make(chan bool),
<ide> }
<ide>
<del> select {
<del> case <-time.After(5 * time.Second):
<del> c.Fatal(observer.TimeoutError(containerID, "start"))
<del> case <-testActions["start"]:
<del> // ignore, done
<del> }
<add> go observer.Match(matchEventLine(containerID, "container", testActions))
<ide>
<ide> select {
<ide> case <-time.After(5 * time.Second):
<del> c.Fatal(observer.TimeoutError(containerID, "die"))
<add> c.Fatal(observer.TimeoutError(containerID, "create/start/die"))
<add> case <-testActions["create"]:
<add> case <-testActions["start"]:
<ide> case <-testActions["die"]:
<ide> // ignore, done
<ide> }
<ide> func (s *DockerSuite) TestEventsStreaming(c *check.C) {
<ide> select {
<ide> case <-time.After(5 * time.Second):
<ide> c.Fatal(observer.TimeoutError(containerID, "destroy"))
<del> case <-eventDestroy:
<add> case <-testActions["destroy"]:
<ide> // ignore, done
<ide> }
<ide> }
<ide><path>integration-cli/docker_cli_events_unix_test.go
<ide> func (s *DockerSuite) TestEventsContainerFilterBeforeCreate(c *check.C) {
<ide> <-ch
<ide> c.Assert(out, checker.Contains, cID, check.Commentf("Missing event of container (foo)"))
<ide> }
<add>
<add>func (s *DockerSuite) TestVolumeEvents(c *check.C) {
<add> testRequires(c, DaemonIsLinux)
<add>
<add> since := daemonTime(c).Unix()
<add>
<add> // Observe create/mount volume actions
<add> dockerCmd(c, "volume", "create", "--name", "test-event-volume-local")
<add> dockerCmd(c, "run", "--name", "test-volume-container", "--volume", "test-event-volume-local:/foo", "-d", "busybox", "true")
<add> waitRun("test-volume-container")
<add>
<add> // Observe unmount/destroy volume actions
<add> dockerCmd(c, "rm", "-f", "test-volume-container")
<add> dockerCmd(c, "volume", "rm", "test-event-volume-local")
<add>
<add> out, _ := dockerCmd(c, "events", fmt.Sprintf("--since=%d", since), fmt.Sprintf("--until=%d", daemonTime(c).Unix()))
<add> events := strings.Split(strings.TrimSpace(out), "\n")
<add> c.Assert(len(events), checker.GreaterThan, 4)
<add>
<add> volumeEvents := eventActionsByIDAndType(c, events, "test-event-volume-local", "volume")
<add> c.Assert(volumeEvents, checker.HasLen, 4)
<add> c.Assert(volumeEvents[0], checker.Equals, "create")
<add> c.Assert(volumeEvents[1], checker.Equals, "mount")
<add> c.Assert(volumeEvents[2], checker.Equals, "unmount")
<add> c.Assert(volumeEvents[3], checker.Equals, "destroy")
<add>}
<ide><path>integration-cli/events_utils.go
<ide> import (
<ide> "fmt"
<ide> "io"
<ide> "os/exec"
<add> "regexp"
<ide> "strconv"
<ide> "strings"
<ide> | 12 |
Javascript | Javascript | fix devtools inspector | 6c0f5d11785192c237937fe8cdd5514137a1f5ff | <ide><path>Libraries/Inspector/Inspector.js
<ide> class Inspector extends React.Component<
<ide> _onAgentShowNativeHighlight = node => {
<ide> clearTimeout(this._hideTimeoutID);
<ide>
<del> node.measure((x, y, width, height, left, top) => {
<add> // Shape of `node` is different in Fabric.
<add> const component = node.canonical ?? node;
<add>
<add> component.measure((x, y, width, height, left, top) => {
<ide> this.setState({
<ide> hierarchy: [],
<ide> inspected: { | 1 |
Javascript | Javascript | add missing test for $destroy event | c0d638a94b914edc76c5532c08a47ec4e60308d4 | <ide><path>test/jqLiteSpec.js
<ide> describe('jqLite', function() {
<ide> var scope, a, b, c;
<ide>
<add>
<add> beforeEach(module(provideLog));
<add>
<ide> beforeEach(function() {
<ide> a = jqLite('<div>A</div>')[0];
<ide> b = jqLite('<div>B</div>')[0];
<ide> describe('jqLite', function() {
<ide> expect(jqLite(c).data('prop')).toBeUndefined();
<ide> });
<ide>
<del> it('should call $destroy function if element removed', function() {
<add> it('should emit $destroy event if element removed via remove()', function() {
<ide> var log = '';
<ide> var element = jqLite(a);
<ide> element.bind('$destroy', function() {log+= 'destroy;';});
<ide> element.remove();
<ide> expect(log).toEqual('destroy;');
<ide> });
<ide>
<add>
<add> it('should emit $destroy event if an element is removed via html()', inject(function(log) {
<add> var element = jqLite('<div><span>x</span></div>');
<add> element.find('span').bind('$destroy', log.fn('destroyed'));
<add>
<add> element.html('');
<add>
<add> expect(element.html()).toBe('');
<add> expect(log).toEqual('destroyed');
<add> }));
<add>
<add>
<ide> it('should retrieve all data if called without params', function() {
<ide> var element = jqLite(a);
<ide> expect(element.data()).toEqual({}); | 1 |
Text | Text | update the readme to use jdk1.8 b88 | 23737a4516a2e582b2010ea58cf10d25620035b1 | <ide><path>README.md
<ide> a cross-platform, self-contained bootstrap mechanism for the build.
<ide>
<ide> ### prerequisites
<ide>
<del>[Git][] and the latest [Early Access build of OpenJDK 1.8][JDK18].
<add>[Git][] and [Early Access build of OpenJDK 1.8 build 88][JDK18 build 88].
<ide>
<ide> ### check out sources
<ide> `git clone git://github.com/SpringSource/spring-framework.git`
<ide> The Spring Framework is released under version 2.0 of the [Apache License][].
<ide> [Gradle]: http://gradle.org
<ide> [`./gradlew`]: http://vimeo.com/34436402
<ide> [Git]: http://help.github.com/set-up-git-redirect
<del>[JDK18]: http://jdk8.java.net/download.html
<add>[JDK18 build 88]: https://jdk8.java.net/archive/8-b88.html
<ide> [Gradle build and release FAQ]: https://github.com/SpringSource/spring-framework/wiki/Gradle-build-and-release-FAQ
<ide> [Pull requests]: http://help.github.com/send-pull-requests
<ide> [contributor guidelines]: https://github.com/SpringSource/spring-framework/blob/master/CONTRIBUTING.md | 1 |
Ruby | Ruby | allow accessing version in `livecheck` blocks | 3a4c7223df13161c834d85258ba8567c5e55a722 | <ide><path>Library/Homebrew/livecheck.rb
<ide> # This information is used by the `brew livecheck` command to control its
<ide> # behavior.
<ide> class Livecheck
<add> extend Forwardable
<add>
<ide> # A very brief description of why the formula/cask is skipped (e.g. `No longer
<ide> # developed or maintained`).
<ide> # @return [String, nil]
<ide> def url(val = nil)
<ide> end
<ide> end
<ide>
<add> delegate version: :@formula_or_cask
<add> private :version
<add>
<ide> # Returns a `Hash` of all instance variable values.
<ide> # @return [Hash]
<ide> def to_hash | 1 |
Python | Python | remove final_size parameter of resnet | 8ff6115343e37f6d62cea3f4ab26127799bf8775 | <ide><path>official/resnet/cifar10_main.py
<ide> def __init__(self, resnet_size, data_format=None, num_classes=_NUM_CLASSES,
<ide> first_pool_stride=None,
<ide> block_sizes=[num_blocks] * 3,
<ide> block_strides=[1, 2, 2],
<del> final_size=64,
<ide> resnet_version=resnet_version,
<ide> data_format=data_format,
<ide> dtype=dtype
<ide><path>official/resnet/imagenet_main.py
<ide> def __init__(self, resnet_size, data_format=None, num_classes=_NUM_CLASSES,
<ide> # For bigger models, we want to use "bottleneck" layers
<ide> if resnet_size < 50:
<ide> bottleneck = False
<del> final_size = 512
<ide> else:
<ide> bottleneck = True
<del> final_size = 2048
<ide>
<ide> super(ImagenetModel, self).__init__(
<ide> resnet_size=resnet_size,
<ide> def __init__(self, resnet_size, data_format=None, num_classes=_NUM_CLASSES,
<ide> first_pool_stride=2,
<ide> block_sizes=_get_block_sizes(resnet_size),
<ide> block_strides=[1, 2, 2, 2],
<del> final_size=final_size,
<ide> resnet_version=resnet_version,
<ide> data_format=data_format,
<ide> dtype=dtype
<ide><path>official/resnet/resnet_model.py
<ide> def __init__(self, resnet_size, bottleneck, num_classes, num_filters,
<ide> kernel_size,
<ide> conv_stride, first_pool_size, first_pool_stride,
<ide> block_sizes, block_strides,
<del> final_size, resnet_version=DEFAULT_VERSION, data_format=None,
<add> resnet_version=DEFAULT_VERSION, data_format=None,
<ide> dtype=DEFAULT_DTYPE):
<ide> """Creates a model for classifying an image.
<ide>
<ide> def __init__(self, resnet_size, bottleneck, num_classes, num_filters,
<ide> i-th set.
<ide> block_strides: List of integers representing the desired stride size for
<ide> each of the sets of block layers. Should be same length as block_sizes.
<del> final_size: The expected size of the model after the second pooling.
<ide> resnet_version: Integer representing which version of the ResNet network
<ide> to use. See README for details. Valid values: [1, 2]
<ide> data_format: Input format ('channels_last', 'channels_first', or None).
<ide> def __init__(self, resnet_size, bottleneck, num_classes, num_filters,
<ide> self.first_pool_stride = first_pool_stride
<ide> self.block_sizes = block_sizes
<ide> self.block_strides = block_strides
<del> self.final_size = final_size
<ide> self.dtype = dtype
<ide> self.pre_activation = resnet_version == 2
<ide>
<ide> def __call__(self, inputs, training):
<ide> inputs = tf.reduce_mean(inputs, axes, keepdims=True)
<ide> inputs = tf.identity(inputs, 'final_reduce_mean')
<ide>
<del> inputs = tf.reshape(inputs, [-1, self.final_size])
<add> inputs = tf.squeeze(inputs, axes)
<ide> inputs = tf.layers.dense(inputs=inputs, units=self.num_classes)
<ide> inputs = tf.identity(inputs, 'final_dense')
<ide> return inputs | 3 |
Ruby | Ruby | add test for env.fortran | 3fc6cc1a3a4b8f1b7ca42dc0a7dd7cf8fad91b18 | <ide><path>Library/Homebrew/rubocops/lines_cop.rb
<ide> def audit_formula(_node, _class_node, _parent_class_node, body_node)
<ide> problem "Use 'build.head?' instead of inspecting 'version'"
<ide> end
<ide>
<del> # find_instance_method_call(body_node, :ENV, :fortran) do
<del> # next if depends_on?(:fortran)
<del> # problem "Use `depends_on :fortran` instead of `ENV.fortran`"
<del> # end
<del> #
<add> find_instance_method_call(body_node, "ENV", :fortran) do
<add> next if depends_on?(:fortran)
<add> problem "Use `depends_on :fortran` instead of `ENV.fortran`"
<add> end
<add>
<ide> # find_instance_method_call(body_node, :ARGV, :include?) do |m|
<ide> # param = parameters(m).first
<ide> # next unless match = regex_match_group(param, %r{--(HEAD|devel)})
<ide><path>Library/Homebrew/test/rubocops/lines_cop_spec.rb
<ide> class Foo < Formula
<ide> end
<ide> end
<ide>
<add> it "with ENV.fortran" do
<add> source = <<-EOS.undent
<add> class Foo < Formula
<add> desc "foo"
<add> url 'http://example.com/foo-1.0.tgz'
<add> test do
<add> ENV.fortran
<add> end
<add> end
<add> EOS
<add>
<add> expected_offenses = [{ message: "Use `depends_on :fortran` instead of `ENV.fortran`",
<add> severity: :convention,
<add> line: 5,
<add> column: 4,
<add> source: source }]
<add>
<add> inspect_source(cop, source)
<add>
<add> expected_offenses.zip(cop.offenses).each do |expected, actual|
<add> expect_offense(expected, actual)
<add> end
<add> end
<add>
<ide> end
<ide> def expect_offense(expected, actual)
<ide> expect(actual.message).to eq(expected[:message]) | 2 |
Go | Go | normalize comment formatting | 5331e6ab2d4b3cbf1a33db65ce3e11cb047cef1f | <ide><path>pkg/tailfile/tailfile.go
<ide> var eol = []byte("\n")
<ide> // ErrNonPositiveLinesNumber is an error returned if the lines number was negative.
<ide> var ErrNonPositiveLinesNumber = errors.New("The number of lines to extract from the file must be positive")
<ide>
<del>//TailFile returns last n lines of the passed in file.
<add>// TailFile returns last n lines of the passed in file.
<ide> func TailFile(f *os.File, n int) ([][]byte, error) {
<ide> size, err := f.Seek(0, io.SeekEnd)
<ide> if err != nil { | 1 |
Java | Java | add permanent/temporary redirect to serverresponse | 56d669f849d4b4caaef97b93385c873ac810ecd9 | <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/ServerResponse.java
<ide> static BodyBuilder status(HttpStatus status) {
<ide> }
<ide>
<ide> /**
<del> * Create a builder with the status set to {@linkplain HttpStatus#OK OK}.
<add> * Create a builder with the status set to {@linkplain HttpStatus#OK 200 OK}.
<ide> * @return the created builder
<ide> */
<ide> static BodyBuilder ok() {
<ide> return status(HttpStatus.OK);
<ide> }
<ide>
<ide> /**
<del> * Create a new builder with a {@linkplain HttpStatus#CREATED CREATED} status
<add> * Create a new builder with a {@linkplain HttpStatus#CREATED 201 Created} status
<ide> * and a location header set to the given URI.
<ide> * @param location the location URI
<ide> * @return the created builder
<ide> static BodyBuilder created(URI location) {
<ide> }
<ide>
<ide> /**
<del> * Create a builder with an {@linkplain HttpStatus#ACCEPTED ACCEPTED} status.
<add> * Create a builder with an {@linkplain HttpStatus#ACCEPTED 202 Accepted} status.
<ide> * @return the created builder
<ide> */
<ide> static BodyBuilder accepted() {
<ide> return status(HttpStatus.ACCEPTED);
<ide> }
<ide>
<ide> /**
<del> * Create a builder with a {@linkplain HttpStatus#NO_CONTENT NO_CONTENT} status.
<add> * Create a builder with a {@linkplain HttpStatus#NO_CONTENT 204 No Content} status.
<ide> * @return the created builder
<ide> */
<ide> static HeadersBuilder<?> noContent() {
<ide> return status(HttpStatus.NO_CONTENT);
<ide> }
<ide>
<ide> /**
<del> * Create a builder with a {@linkplain HttpStatus#BAD_REQUEST BAD_REQUEST} status.
<add> * Create a builder with a {@linkplain HttpStatus#TEMPORARY_REDIRECT 307 Temporary Redirect}
<add> * status and a location header set to the given URI.
<add> * @param location the location URI
<add> * @return the created builder
<add> */
<add> static BodyBuilder temporaryRedirect(URI location) {
<add> BodyBuilder builder = status(HttpStatus.TEMPORARY_REDIRECT);
<add> return builder.location(location);
<add> }
<add>
<add> /**
<add> * Create a builder with a {@linkplain HttpStatus#PERMANENT_REDIRECT 308 Permanent Redirect}
<add> * status and a location header set to the given URI.
<add> * @param location the location URI
<add> * @return the created builder
<add> */
<add> static BodyBuilder permanentRedirect(URI location) {
<add> BodyBuilder builder = status(HttpStatus.PERMANENT_REDIRECT);
<add> return builder.location(location);
<add> }
<add>
<add> /**
<add> * Create a builder with a {@linkplain HttpStatus#BAD_REQUEST 400 Bad Request} status.
<ide> * @return the created builder
<ide> */
<ide> static BodyBuilder badRequest() {
<ide> return status(HttpStatus.BAD_REQUEST);
<ide> }
<ide>
<ide> /**
<del> * Create a builder with a {@linkplain HttpStatus#NOT_FOUND NOT_FOUND} status.
<add> * Create a builder with a {@linkplain HttpStatus#NOT_FOUND 404 Not Found} status.
<ide> *
<ide> * @return the created builder
<ide> */
<ide> static HeadersBuilder<?> notFound() {
<ide>
<ide> /**
<ide> * Create a builder with an
<del> * {@linkplain HttpStatus#UNPROCESSABLE_ENTITY UNPROCESSABLE_ENTITY} status.
<add> * {@linkplain HttpStatus#UNPROCESSABLE_ENTITY 422 Unprocessable Entity} status.
<ide> * @return the created builder
<ide> */
<ide> static BodyBuilder unprocessableEntity() {
<ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/function/server/DefaultServerResponseBuilderTests.java
<ide> import org.springframework.mock.http.server.reactive.test.MockServerHttpResponse;
<ide> import org.springframework.web.server.ServerWebExchange;
<ide>
<del>import static org.junit.Assert.*;
<del>import static org.mockito.Mockito.*;
<add>import static org.junit.Assert.assertEquals;
<add>import static org.mockito.Mockito.mock;
<add>import static org.mockito.Mockito.when;
<ide>
<ide> /**
<ide> * @author Arjen Poutsma
<ide> public void noContent() throws Exception {
<ide>
<ide> }
<ide>
<add> @Test
<add> public void temporaryRedirect() throws Exception {
<add> URI location = URI.create("http://example.com");
<add> Mono<ServerResponse> result = ServerResponse.temporaryRedirect(location).build();
<add> StepVerifier.create(result)
<add> .expectNextMatches(response -> HttpStatus.TEMPORARY_REDIRECT.equals(response.statusCode()) &&
<add> location.equals(response.headers().getLocation()))
<add> .expectComplete()
<add> .verify();
<add> }
<add>
<add> @Test
<add> public void permanentRedirect() throws Exception {
<add> URI location = URI.create("http://example.com");
<add> Mono<ServerResponse> result = ServerResponse.permanentRedirect(location).build();
<add> StepVerifier.create(result)
<add> .expectNextMatches(response -> HttpStatus.PERMANENT_REDIRECT.equals(response.statusCode()) &&
<add> location.equals(response.headers().getLocation()))
<add> .expectComplete()
<add> .verify();
<add> }
<add>
<ide> @Test
<ide> public void badRequest() throws Exception {
<ide> Mono<ServerResponse> result = ServerResponse.badRequest().build(); | 2 |
PHP | PHP | add missing docblocks | 9475cb4a99e88723f192f645cc4dbcd15b6796e1 | <ide><path>src/Illuminate/Container/BoundMethod.php
<ide> protected static function normalizeMethod($callback)
<ide> /**
<ide> * Get all dependencies for a given method.
<ide> *
<del> * @param \Illuminate\Container\Container
<add> * @param \Illuminate\Container\Container $container
<ide> * @param callable|string $callback
<ide> * @param array $parameters
<ide> * @return array
<ide><path>src/Illuminate/Database/Eloquent/Relations/BelongsToMany.php
<ide> class BelongsToMany extends Relation
<ide> * @param string $foreignPivotKey
<ide> * @param string $relatedPivotKey
<ide> * @param string $parentKey
<del> * @param string $localKey
<add> * @param string $relatedKey
<ide> * @param string $relationName
<ide> * @return void
<ide> */
<ide><path>src/Illuminate/Database/Events/QueryExecuted.php
<ide> class QueryExecuted
<ide> * @param string $sql
<ide> * @param array $bindings
<ide> * @param float $time
<del> * @param
<add> * @param string $connection
<add> * @return void
<ide> */
<ide> public function __construct($sql, $bindings, $time, $connection)
<ide> {
<ide><path>src/Illuminate/Notifications/Messages/SlackMessage.php
<ide> public function linkNames()
<ide> /**
<ide> * Find and link channel names and usernames.
<ide> *
<add> * @param string $unfurl
<ide> * @return $this
<ide> */
<ide> public function unfurlLinks($unfurl)
<ide> public function unfurlLinks($unfurl)
<ide> /**
<ide> * Find and link channel names and usernames.
<ide> *
<add> * @param string $unfurl
<ide> * @return $this
<ide> */
<ide> public function unfurlMedia($unfurl) | 4 |
Python | Python | replace getters/setters with properties - round 5 | 308a8b6b50e540711a02437e4a5105fcc49ef114 | <ide><path>glances/outputs/glances_bars.py
<ide> class Bar(object):
<ide> import time
<ide> b = Bar(10)
<ide> for p in range(0, 100):
<del> b.set_percent(p)
<add> b.percent = p
<ide> print("\r%s" % b),
<ide> time.sleep(0.1)
<ide> sys.stdout.flush()
<ide> def __init__(self, size,
<ide> self.__empty_char = empty_char
<ide> self.__with_text = with_text
<ide>
<del> def get_size(self, with_decoration=False):
<add> @property
<add> def size(self, with_decoration=False):
<ide> # Return the bar size, with or without decoration
<ide> if with_decoration:
<ide> return self.__size
<ide> if self.__with_text:
<ide> return self.__size - 6
<ide>
<del> def set_size(self, size):
<del> self.__size = size
<add> # @size.setter
<add> # def size(self, value):
<add> # self.__size = value
<ide>
<del> def get_percent(self):
<add> @property
<add> def percent(self):
<ide> return self.__percent
<ide>
<del> def set_percent(self, percent):
<del> assert percent >= 0
<del> assert percent <= 100
<del> self.__percent = percent
<add> @percent.setter
<add> def percent(self, value):
<add> assert value >= 0
<add> assert value <= 100
<add> self.__percent = value
<ide>
<ide> def __str__(self):
<del> """Return the bars"""
<del> frac, whole = modf(self.get_size() * self.get_percent() / 100.0)
<add> """Return the bars."""
<add> frac, whole = modf(self.size * self.percent / 100.0)
<ide> ret = curses_bars[8] * int(whole)
<ide> if frac > 0:
<ide> ret += curses_bars[int(frac * 8)]
<ide> whole += 1
<del> ret += self.__empty_char * int(self.get_size() - whole)
<add> ret += self.__empty_char * int(self.size - whole)
<ide> if self.__with_text:
<del> ret = '{0}{1:>5}%'.format(ret, self.get_percent())
<add> ret = '{0}{1:>5}%'.format(ret, self.percent)
<ide> return self.__pre_char + ret + self.__post_char
<ide><path>glances/plugins/glances_quicklook.py
<ide> def msg_curse(self, args=None, max_width=10):
<ide>
<ide> # Build the string message
<ide> for key in ['cpu', 'mem', 'swap']:
<del> bar.set_percent(self.stats[key])
<add> bar.percent = self.stats[key]
<ide> msg = '{0:>4} '.format(key.upper())
<ide> ret.append(self.curse_add_line(msg))
<ide> msg = '{0}'.format(bar) | 2 |
Python | Python | show original autodoc signatures | d40288496c18d86d16f8aad144e8cf57c46dd3bf | <ide><path>docs/conf.py
<ide> pygments_style = 'tango'
<ide> html_theme = 'default'
<ide> html_theme_options = {}
<add>
<add>
<add># unwrap decorators
<add>def unwrap_decorators():
<add> import sphinx.util.inspect as inspect
<add> import functools
<add>
<add> old_getargspec = inspect.getargspec
<add> def getargspec(x):
<add> return old_getargspec(getattr(x, '_original_function', x))
<add> inspect.getargspec = getargspec
<add>
<add> old_update_wrapper = functools.update_wrapper
<add> def update_wrapper(wrapper, wrapped, *a, **kw):
<add> rv = old_update_wrapper(wrapper, wrapped, *a, **kw)
<add> rv._original_function = wrapped
<add> return rv
<add> functools.update_wrapper = update_wrapper
<add>
<add>unwrap_decorators()
<add>del unwrap_decorators | 1 |
Text | Text | return largest numbers in array - portuguese | 117e198b1e37f606cd9ea213801a36e3c2d43185 | <ide><path>curriculum/challenges/portuguese/02-javascript-algorithms-and-data-structures/basic-algorithm-scripting/return-largest-numbers-in-arrays.portuguese.md
<ide> largestOfFour([[4, 5, 1, 3], [13, 27, 18, 26], [32, 35, 37, 39], [1000, 1001, 85
<ide> <section id='solution'>
<ide>
<ide> ```js
<del>// solution required
<add>function largestOfFour(arr) {
<add> return arr.map(i => Math.max(...i));
<add>}
<add>
<add>largestOfFour([[4, 5, 1, 3], [13, 27, 18, 26], [32, 35, 37, 39], [1000, 1001, 857, 1]]);
<ide> ```
<ide> </section> | 1 |
Mixed | Javascript | improve createrequire() example | 58a59a8d6ba2de899dd6093e644ee3cf0d4b9a0a | <ide><path>.eslintrc.js
<ide> module.exports = {
<ide> {
<ide> files: [
<ide> 'doc/api/esm.md',
<add> 'doc/api/modules.md',
<ide> 'test/es-module/test-esm-type-flag.js',
<ide> 'test/es-module/test-esm-type-flag-alias.js',
<ide> '*.mjs',
<ide><path>doc/api/modules.md
<ide> added: v12.2.0
<ide> * Returns: {require} Require function
<ide>
<ide> ```js
<del>const { createRequire } = require('module');
<del>const requireUtil = createRequire(require.resolve('../src/utils/'));
<add>import { createRequire } from 'module';
<add>const require = createRequire(import.meta.url);
<ide>
<del>// Require `../src/utils/some-tool`
<del>requireUtil('./some-tool');
<add>// sibling-module.js is a CommonJS module.
<add>const siblingModule = require('./sibling-module');
<ide> ```
<ide>
<ide> ### module.createRequireFromPath(filename) | 2 |
Ruby | Ruby | improve method description | a3c4d035f05287b09f7216a594d333fd0d85650a | <ide><path>actioncable/lib/action_cable/channel/naming.rb
<ide> module Naming
<ide> #
<ide> # ChatChannel.channel_name # => 'chat'
<ide> # Chats::AppearancesChannel.channel_name # => 'chats:appearances'
<add> # FooChats::BarAppearancesChannel.channel_name # => 'foo_chats:bar_appearances'
<ide> def channel_name
<ide> @channel_name ||= name.sub(/Channel$/, '').gsub('::',':').underscore
<ide> end | 1 |
Ruby | Ruby | fix documentation comments for form_tag | e9c09c4b7305b6fa2961e0764f4ecc57cd6ae666 | <ide><path>actionview/lib/action_view/helpers/form_tag_helper.rb
<ide> module FormTagHelper
<ide> #
<ide> # <%= form_tag('/posts', remote: true) %>
<ide> # # => <form action="/posts" method="post" data-remote="true">
<del>
<add> #
<ide> # form_tag(false, method: :get)
<ide> # # => <form method="get">
<ide> # | 1 |
PHP | PHP | support php 7 throwables | 03221c9a0ac66d6b3f6f1b239d125ded467bf360 | <ide><path>src/Illuminate/Database/Connection.php
<ide> use Closure;
<ide> use DateTime;
<ide> use Exception;
<add>use Throwable;
<ide> use LogicException;
<ide> use RuntimeException;
<ide> use Illuminate\Support\Arr;
<ide> public function prepareBindings(array $bindings)
<ide> /**
<ide> * Execute a Closure within a transaction.
<ide> *
<del> * @param \Closure $callback
<add> * @param \Throwable $callback
<ide> * @return mixed
<ide> *
<ide> * @throws \Exception
<ide> public function transaction(Closure $callback)
<ide> catch (Exception $e) {
<ide> $this->rollBack();
<ide>
<add> throw $e;
<add> } catch (Throwable $e) {
<add> $this->rollBack();
<add>
<ide> throw $e;
<ide> }
<ide>
<ide><path>src/Illuminate/Database/ConnectionInterface.php
<ide> public function prepareBindings(array $bindings);
<ide> * @param \Closure $callback
<ide> * @return mixed
<ide> *
<del> * @throws \Exception
<add> * @throws \Throwable
<ide> */
<ide> public function transaction(Closure $callback);
<ide>
<ide><path>src/Illuminate/Database/SqlServerConnection.php
<ide>
<ide> use Closure;
<ide> use Exception;
<add>use Throwable;
<ide> use Doctrine\DBAL\Driver\PDOSqlsrv\Driver as DoctrineDriver;
<ide> use Illuminate\Database\Query\Processors\SqlServerProcessor;
<ide> use Illuminate\Database\Query\Grammars\SqlServerGrammar as QueryGrammar;
<ide> class SqlServerConnection extends Connection
<ide> * @param \Closure $callback
<ide> * @return mixed
<ide> *
<del> * @throws \Exception
<add> * @throws \Throwable
<ide> */
<ide> public function transaction(Closure $callback)
<ide> {
<ide> public function transaction(Closure $callback)
<ide> catch (Exception $e) {
<ide> $this->pdo->exec('ROLLBACK TRAN');
<ide>
<add> throw $e;
<add> } catch (Throwable $e) {
<add> $this->pdo->exec('ROLLBACK TRAN');
<add>
<ide> throw $e;
<ide> }
<ide>
<ide><path>src/Illuminate/Foundation/Bootstrap/HandleExceptions.php
<ide>
<ide> namespace Illuminate\Foundation\Bootstrap;
<ide>
<add>use Exception;
<ide> use ErrorException;
<ide> use Illuminate\Contracts\Foundation\Application;
<ide> use Symfony\Component\Console\Output\ConsoleOutput;
<ide> use Symfony\Component\Debug\Exception\FatalErrorException;
<add>use Symfony\Component\Debug\Exception\FatalThrowableError;
<ide>
<ide> class HandleExceptions
<ide> {
<ide> public function handleError($level, $message, $file = '', $line = 0, $context =
<ide> * the HTTP and Console kernels. But, fatal error exceptions must
<ide> * be handled differently since they are not normal exceptions.
<ide> *
<del> * @param \Exception $e
<add> * @param \Throwable $e
<ide> * @return void
<ide> */
<ide> public function handleException($e)
<ide> {
<add> if (!$e instanceof Exception) {
<add> $e = new FatalThrowableError($e);
<add> }
<add>
<ide> $this->getExceptionHandler()->report($e);
<ide>
<ide> if ($this->app->runningInConsole()) {
<ide><path>src/Illuminate/Foundation/Console/Kernel.php
<ide> namespace Illuminate\Foundation\Console;
<ide>
<ide> use Exception;
<add>use Throwable;
<ide> use Illuminate\Contracts\Events\Dispatcher;
<ide> use Illuminate\Console\Scheduling\Schedule;
<ide> use Illuminate\Console\Application as Artisan;
<ide> use Illuminate\Contracts\Foundation\Application;
<ide> use Illuminate\Contracts\Console\Kernel as KernelContract;
<add>use Symfony\Component\Debug\Exception\FatalThrowableError;
<ide>
<ide> class Kernel implements KernelContract
<ide> {
<ide> public function handle($input, $output = null)
<ide>
<ide> $this->renderException($output, $e);
<ide>
<add> return 1;
<add> } catch (Throwable $e) {
<add> $e = new FatalThrowableError($e);
<add>
<add> $this->reportException($e);
<add>
<add> $this->renderException($output, $e);
<add>
<ide> return 1;
<ide> }
<ide> }
<ide><path>src/Illuminate/Foundation/Http/Kernel.php
<ide> namespace Illuminate\Foundation\Http;
<ide>
<ide> use Exception;
<add>use Throwable;
<ide> use RuntimeException;
<ide> use Illuminate\Routing\Router;
<ide> use Illuminate\Pipeline\Pipeline;
<ide> use Illuminate\Support\Facades\Facade;
<ide> use Illuminate\Contracts\Foundation\Application;
<ide> use Illuminate\Contracts\Http\Kernel as KernelContract;
<add>use Symfony\Component\Debug\Exception\FatalThrowableError;
<ide>
<ide> class Kernel implements KernelContract
<ide> {
<ide> public function handle($request)
<ide> } catch (Exception $e) {
<ide> $this->reportException($e);
<ide>
<add> $response = $this->renderException($request, $e);
<add> } catch (Throwable $e) {
<add> $e = new FatalThrowableError($e);
<add>
<add> $this->reportException($e);
<add>
<ide> $response = $this->renderException($request, $e);
<ide> }
<ide>
<ide><path>src/Illuminate/Queue/SyncQueue.php
<ide> namespace Illuminate\Queue;
<ide>
<ide> use Exception;
<add>use Throwable;
<ide> use Illuminate\Queue\Jobs\SyncJob;
<ide> use Illuminate\Contracts\Queue\Job;
<ide> use Illuminate\Contracts\Queue\Queue as QueueContract;
<ide> class SyncQueue extends Queue implements QueueContract
<ide> * @param mixed $data
<ide> * @param string $queue
<ide> * @return mixed
<del> * @throws \Exception
<add> * @throws \Throwable
<ide> */
<ide> public function push($job, $data = '', $queue = null)
<ide> {
<ide> public function push($job, $data = '', $queue = null)
<ide> } catch (Exception $e) {
<ide> $this->handleFailedJob($queueJob);
<ide>
<add> throw $e;
<add> } catch (Throwable $e) {
<add> $this->handleFailedJob($queueJob);
<add>
<ide> throw $e;
<ide> }
<ide>
<ide><path>src/Illuminate/Queue/Worker.php
<ide> namespace Illuminate\Queue;
<ide>
<ide> use Exception;
<add>use Throwable;
<ide> use Illuminate\Contracts\Queue\Job;
<ide> use Illuminate\Contracts\Events\Dispatcher;
<add>use Illuminate\Contracts\Debug\ExceptionHandler;
<ide> use Illuminate\Queue\Failed\FailedJobProviderInterface;
<add>use Symfony\Component\Debug\Exception\FatalThrowableError;
<ide> use Illuminate\Contracts\Cache\Repository as CacheContract;
<del>use Illuminate\Contracts\Debug\ExceptionHandler;
<ide>
<ide> class Worker
<ide> {
<ide> protected function runNextJobForDaemon($connectionName, $queue, $delay, $sleep,
<ide> if ($this->exceptions) {
<ide> $this->exceptions->report($e);
<ide> }
<add> } catch (Throwable $e) {
<add> if ($this->exceptions) {
<add> $this->exceptions->report(new FatalThrowableError($e));
<add> }
<ide> }
<ide> }
<ide>
<ide> protected function getNextJob($connection, $queue)
<ide> * @param int $delay
<ide> * @return void
<ide> *
<del> * @throws \Exception
<add> * @throws \Throwable
<ide> */
<ide> public function process($connection, Job $job, $maxTries = 0, $delay = 0)
<ide> {
<ide> public function process($connection, Job $job, $maxTries = 0, $delay = 0)
<ide> $job->release($delay);
<ide> }
<ide>
<add> throw $e;
<add> } catch (Throwable $e) {
<add> if (!$job->isDeleted()) {
<add> $job->release($delay);
<add> }
<add>
<ide> throw $e;
<ide> }
<ide> }
<ide><path>src/Illuminate/View/Engines/PhpEngine.php
<ide> namespace Illuminate\View\Engines;
<ide>
<ide> use Exception;
<add>use Throwable;
<add>use Symfony\Component\Debug\Exception\FatalThrowableError;
<ide>
<ide> class PhpEngine implements EngineInterface
<ide> {
<ide> protected function evaluatePath($__path, $__data)
<ide> include $__path;
<ide> } catch (Exception $e) {
<ide> $this->handleViewException($e, $obLevel);
<add> } catch (Throwable $e) {
<add> $this->handleViewException(new FatalThrowableError($e), $obLevel);
<ide> }
<ide>
<ide> return ltrim(ob_get_clean()); | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.