content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Javascript | Javascript | pass context to dom wrappers, use it in <option> | 2fafe3eb76e7cb9133edf113166eb7144ac81b7a | <ide><path>src/renderers/dom/client/wrappers/ReactDOMOption.js
<ide> var ReactChildren = require('ReactChildren');
<ide> var ReactClass = require('ReactClass');
<ide> var ReactDOMSelect = require('ReactDOMSelect');
<ide> var ReactElement = require('ReactElement');
<del>var ReactInstanceMap = require('ReactInstanceMap');
<ide> var ReactPropTypes = require('ReactPropTypes');
<ide>
<ide> var assign = require('Object.assign');
<ide> var ReactDOMOption = ReactClass.createClass({
<ide> }
<ide>
<ide> // Look up whether this option is 'selected' via parent-based context
<del> var context = ReactInstanceMap.get(this)._context;
<add> var context = this.context;
<ide> var selectValue = context[valueContextKey];
<ide>
<ide> // If context key is null (e.g., no specified value or after initial mount)
<ide><path>src/renderers/shared/reconciler/ReactCompositeComponent.js
<ide> var ReactCompositeComponentMixin = {
<ide> */
<ide> _maskContext: function(context) {
<ide> var maskedContext = null;
<del> // This really should be getting the component class for the element,
<del> // but we know that we're not going to need it for built-ins.
<del> if (typeof this._currentElement.type === 'string') {
<del> return emptyObject;
<del> }
<del> var contextTypes = this._currentElement.type.contextTypes;
<add> var Component = ReactNativeComponent.getComponentClassForElement(
<add> this._currentElement
<add> );
<add> var contextTypes = Component.contextTypes;
<ide> if (!contextTypes) {
<ide> return emptyObject;
<ide> } | 2 |
Python | Python | add `versionadded` directives to `numpy.typing` | 82d991c9fbdab578670c6fc66b254c97c4a8446f | <ide><path>numpy/typing/__init__.py
<ide> Typing (:mod:`numpy.typing`)
<ide> ============================
<ide>
<add>.. versionadded:: 1.20
<add>
<ide> .. warning::
<ide>
<ide> Some of the types in this module rely on features only present in
<ide> Mypy plugin
<ide> -----------
<ide>
<add>.. versionadded:: 1.21
<add>
<ide> .. automodule:: numpy.typing.mypy_plugin
<ide>
<ide> Differences from the runtime NumPy API
<ide> class NBitBase:
<ide> Each subsequent subclass is herein used for representing a lower level
<ide> of precision, *e.g.* ``64Bit > 32Bit > 16Bit``.
<ide>
<add> .. versionadded:: 1.20
<add>
<ide> Examples
<ide> --------
<ide> Below is a typical usage example: `NBitBase` is herein used for annotating a
<ide><path>numpy/typing/_add_docstring.py
<ide> def _parse_docstrings() -> str:
<ide> * (Nested) sequences.
<ide> * Objects implementing the `~class.__array__` protocol.
<ide>
<add> .. versionadded:: 1.20
<add>
<ide> See Also
<ide> --------
<ide> :term:`array_like`:
<ide> def _parse_docstrings() -> str:
<ide> * Character codes or the names of :class:`type` objects.
<ide> * Objects with the ``.dtype`` attribute.
<ide>
<add> .. versionadded:: 1.20
<add>
<ide> See Also
<ide> --------
<ide> :ref:`Specifying and constructing data types <arrays.dtypes.constructing>`
<ide> def _parse_docstrings() -> str:
<ide> Can be used during runtime for typing arrays with a given dtype
<ide> and unspecified shape.
<ide>
<add> .. versionadded:: 1.21
<add>
<ide> Examples
<ide> --------
<ide> .. code-block:: python
<ide><path>numpy/typing/mypy_plugin.py
<ide> likes of `~numpy.float128` and `~numpy.complex256`. Without the plugin *all*
<ide> extended-precision types will, as far as mypy is concerned, be available
<ide> to all platforms.
<del>* Assigning the (platform-dependent) precision of `~numpy.ctypeslib.c_intp`.
<del> Without the plugin aforementioned type will default to `ctypes.c_int64`.
<del>
<add>* .. versionadded:: 1.22
<add> Assigning the (platform-dependent) precision of `~numpy.ctypeslib.c_intp`.
<add> Without the plugin aforementioned type will default to `ctypes.c_int64`.
<ide>
<ide> Examples
<ide> -------- | 3 |
Javascript | Javascript | update chrome on sl to 34 | 28ef2637c111d2940d2062c3cc8be29e7984f35a | <ide><path>karma-shared.conf.js
<ide> module.exports = function(config, specificOptions) {
<ide> customLaunchers: {
<ide> 'SL_Chrome': {
<ide> base: 'SauceLabs',
<del> browserName: 'chrome'
<add> browserName: 'chrome',
<add> version: '34'
<ide> },
<ide> 'SL_Firefox': {
<ide> base: 'SauceLabs', | 1 |
Javascript | Javascript | fix object id counter | f8ed7bb31a821561aaaeee742b565c5c3b5c2e43 | <ide><path>src/evaluator.js
<ide> var PartialEvaluator = (function PartialEvaluatorClosure() {
<ide> font = xref.fetchIfRef(font) || fontRes.get(fontName);
<ide> assertWellFormed(isDict(font));
<ide>
<add> ++self.objIdCounter;
<ide> if (!font.loadedName) {
<del> ++self.objIdCounter;
<ide> font.translated = self.translateFont(font, xref, resources,
<ide> dependency);
<ide> if (font.translated) { | 1 |
Python | Python | remove trailing whitespace | 52e7d634dfd9cbc864291dc2d760f4f679ce5d20 | <ide><path>spacy/tests/tagger/test_lemmatizer.py
<ide> def test_noun_lemmas(lemmatizer):
<ide> def test_base_form_dive(lemmatizer):
<ide> if lemmatizer is None:
<ide> return None
<del>
<add>
<ide> do = lemmatizer.noun
<ide> assert do('dive', number='sing') == set(['dive'])
<ide> assert do('dive', number='plur') == set(['diva'])
<ide> def test_base_form_dive(lemmatizer):
<ide> def test_base_form_saw(lemmatizer):
<ide> if lemmatizer is None:
<ide> return None
<del>
<add>
<ide> do = lemmatizer.verb
<ide> assert do('saw', verbform='past') == set(['see'])
<ide>
<ide>
<ide> def test_smart_quotes(lemmatizer):
<ide> if lemmatizer is None:
<ide> return None
<del>
<add>
<ide> do = lemmatizer.punct
<ide> assert do('“') == set(['"'])
<ide> assert do('“') == set(['"'])
<ide> def test_smart_quotes(lemmatizer):
<ide> def test_pickle_lemmatizer(lemmatizer):
<ide> if lemmatizer is None:
<ide> return None
<del>
<add>
<ide> file_ = io.BytesIO()
<ide> pickle.dump(lemmatizer, file_)
<ide>
<ide> file_.seek(0)
<del>
<add>
<ide> loaded = pickle.load(file_) | 1 |
PHP | PHP | escape single quotes after helper serialization | a4aaaff53ac4b04477dd929b30f9986f222b1c79 | <ide><path>lib/Cake/View/Helper/CacheHelper.php
<ide> protected function _writeFile($content, $timestamp, $useCallbacks = false) {
<ide> $response = new CakeResponse(array("charset" => Configure::read("App.encoding")));
<ide> $controller = new ' . $this->_View->name . 'Controller($request, $response);
<ide> $controller->plugin = $this->plugin = \'' . $this->_View->plugin . '\';
<del> $controller->helpers = $this->helpers = unserialize(\'' . serialize($this->_View->helpers) . '\');
<add> $controller->helpers = $this->helpers = unserialize(\'' . str_replace("'", "\'", serialize($this->_View->helpers)) . '\');
<ide> $controller->layout = $this->layout = \'' . $this->_View->layout. '\';
<ide> $controller->theme = $this->theme = \'' . $this->_View->theme . '\';
<ide> $controller->viewVars = $this->viewVars = unserialize(base64_decode(\'' . base64_encode(serialize($this->_View->viewVars)) . '\')); | 1 |
Python | Python | add better types for app decorators | af34b8c9e7adbf0b060738fee12f04c1e46a98fd | <ide><path>src/flask/scaffold.py
<ide> def open_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]:
<ide>
<ide> return open(os.path.join(self.root_path, resource), mode)
<ide>
<del> def _method_route(self, method: str, rule: str, options: dict) -> t.Callable:
<add> def _method_route(
<add> self,
<add> method: str,
<add> rule: str,
<add> options: dict,
<add> ) -> t.Callable[[F], F]:
<ide> if "methods" in options:
<ide> raise TypeError("Use the 'route' decorator to use the 'methods' argument.")
<ide>
<ide> return self.route(rule, methods=[method], **options)
<ide>
<del> def get(self, rule: str, **options: t.Any) -> t.Callable:
<add> def get(self, rule: str, **options: t.Any) -> t.Callable[[F], F]:
<ide> """Shortcut for :meth:`route` with ``methods=["GET"]``.
<ide>
<ide> .. versionadded:: 2.0
<ide> """
<ide> return self._method_route("GET", rule, options)
<ide>
<del> def post(self, rule: str, **options: t.Any) -> t.Callable:
<add> def post(self, rule: str, **options: t.Any) -> t.Callable[[F], F]:
<ide> """Shortcut for :meth:`route` with ``methods=["POST"]``.
<ide>
<ide> .. versionadded:: 2.0
<ide> """
<ide> return self._method_route("POST", rule, options)
<ide>
<del> def put(self, rule: str, **options: t.Any) -> t.Callable:
<add> def put(self, rule: str, **options: t.Any) -> t.Callable[[F], F]:
<ide> """Shortcut for :meth:`route` with ``methods=["PUT"]``.
<ide>
<ide> .. versionadded:: 2.0
<ide> """
<ide> return self._method_route("PUT", rule, options)
<ide>
<del> def delete(self, rule: str, **options: t.Any) -> t.Callable:
<add> def delete(self, rule: str, **options: t.Any) -> t.Callable[[F], F]:
<ide> """Shortcut for :meth:`route` with ``methods=["DELETE"]``.
<ide>
<ide> .. versionadded:: 2.0
<ide> """
<ide> return self._method_route("DELETE", rule, options)
<ide>
<del> def patch(self, rule: str, **options: t.Any) -> t.Callable:
<add> def patch(self, rule: str, **options: t.Any) -> t.Callable[[F], F]:
<ide> """Shortcut for :meth:`route` with ``methods=["PATCH"]``.
<ide>
<ide> .. versionadded:: 2.0
<ide> """
<ide> return self._method_route("PATCH", rule, options)
<ide>
<del> def route(self, rule: str, **options: t.Any) -> t.Callable:
<add> def route(self, rule: str, **options: t.Any) -> t.Callable[[F], F]:
<ide> """Decorate a view function to register it with the given URL
<ide> rule and options. Calls :meth:`add_url_rule`, which has more
<ide> details about the implementation.
<ide> def index():
<ide> :class:`~werkzeug.routing.Rule` object.
<ide> """
<ide>
<del> def decorator(f: t.Callable) -> t.Callable:
<add> def decorator(f: F) -> F:
<ide> endpoint = options.pop("endpoint", None)
<ide> self.add_url_rule(rule, endpoint, f, **options)
<ide> return f | 1 |
Python | Python | fix python2 error on example | 7ed9124a455f9e527a4d9baabbd89c63796fb18f | <ide><path>examples/training/pretrain_textcat.py
<ide> def evaluate_textcat(tokenizer, textcat, texts, cats):
<ide> train_examples=("Number of labelled examples", "option", "eg", int),
<ide> vectors_model=("Name or path to vectors model to learn from")
<ide> )
<del>def main(width: int, embed_size: int, vectors_model,
<add>def main(width, embed_size, vectors_model,
<ide> pretrain_iters=30, train_iters=30, train_examples=1000):
<ide> random.seed(0)
<ide> numpy.random.seed(0) | 1 |
Java | Java | fix javadoc typo | 63844c6d743544339fe001545409c3929a8e11ed | <ide><path>spring-jms/src/main/java/org/springframework/jms/support/converter/MappingJackson2MessageConverter.java
<ide> protected Message mapToMessage(Object object, Session session, ObjectWriter obje
<ide> * sets the resulting value (either a mapped id or the raw Java class name)
<ide> * into the configured type id message property.
<ide> * @param object the payload object to set a type id for
<del> * @param message the JMS Message to set the type id on
<add> * @param message the JMS Message on which to set the type id property
<ide> * @throws JMSException if thrown by JMS methods
<ide> * @see #getJavaTypeForMessage(javax.jms.Message)
<ide> * @see #setTypeIdPropertyName(String)
<ide> protected Object convertFromMessage(Message message, JavaType targetJavaType)
<ide> * <p>The default implementation parses the configured type id property name
<ide> * and consults the configured type id mapping. This can be overridden with
<ide> * a different strategy, e.g. doing some heuristics based on message origin.
<del> * @param message the JMS Message to set the type id on
<add> * @param message the JMS Message from which to get the type id property
<ide> * @throws JMSException if thrown by JMS methods
<ide> * @see #setTypeIdOnMessage(Object, javax.jms.Message)
<ide> * @see #setTypeIdPropertyName(String) | 1 |
Text | Text | add resource introduction to scrum | 806f688eaa5c10e1384d75d9d318c518215d43a9 | <ide><path>guide/english/agile/scrum/index.md
<ide> Here are some more resources:
<ide> - [Why Scrum](https://www.scrumalliance.org/why-scrum) from The Scrum Alliance
<ide> - [Scrum Guide](http://www.scrumguides.org/scrum-guide.html) from Scrum.org
<ide> - [Doing vs Being Agile](http://agilitrix.com/2016/04/doing-agile-vs-being-agile/)
<add>- [YouTube - Introduction to Scrum](https://www.youtube.com/watch?v=9TycLR0TqFA) | 1 |
Javascript | Javascript | fix safari test failures | 22f58bd688e35a473e6088bc47c1ba23ebcb0cdc | <ide><path>src/manipulation.js
<ide> jQuery.extend({
<ide> for ( i = 0; (elem = elems[i]) != null; i++ ) {
<ide> if ( elem || elem === 0 ) {
<ide> // Add nodes directly
<del> if ( typeof elem === "object" ) {
<add> if ( jQuery.type( elem ) === "object" ) {
<ide> jQuery.merge( ret, elem.nodeType ? [ elem ] : elem );
<ide>
<ide> // Convert non-html into a text node | 1 |
Text | Text | update stream.io link | 761f56ef4025543e9cf39346d25641305e7d957d | <ide><path>docs/index.md
<ide> continued development by **[signing up for a paid plan][funding]**.
<ide>
<ide> <ul class="premium-promo promo">
<ide> <li><a href="https://getsentry.com/welcome/" style="background-image: url(https://fund-rest-framework.s3.amazonaws.com/sentry130.png)">Sentry</a></li>
<del> <li><a href="https://getstream.io/try-the-api/?utm_source=drf&utm_medium=banner&utm_campaign=drf" style="background-image: url(https://fund-rest-framework.s3.amazonaws.com/stream-130.png)">Stream</a></li>
<add> <li><a href="https://getstream.io/?utm_source=drf&utm_medium=sponsorship&utm_content=developer" style="background-image: url(https://fund-rest-framework.s3.amazonaws.com/stream-130.png)">Stream</a></li>
<ide> <li><a href="https://software.esg-usa.com" style="background-image: url(https://fund-rest-framework.s3.amazonaws.com/esg-new-logo.png)">ESG</a></li>
<ide> <li><a href="https://rollbar.com" style="background-image: url(https://fund-rest-framework.s3.amazonaws.com/rollbar2.png)">Rollbar</a></li>
<ide> <li><a href="https://retool.com/?utm_source=djangorest&utm_medium=sponsorship" style="background-image: url(https://fund-rest-framework.s3.amazonaws.com/retool-sidebar.png)">Retool</a></li> | 1 |
Ruby | Ruby | add missing require for ordered_hash dependency | 295bf413e57fb0b4da5a73319236403eba493734 | <ide><path>activesupport/lib/active_support/json/encoding.rb
<ide> require 'active_support/core_ext/module/delegation'
<ide> require 'active_support/deprecation'
<ide> require 'active_support/json/variable'
<add>require 'active_support/ordered_hash'
<ide>
<ide> require 'bigdecimal'
<ide> require 'active_support/core_ext/big_decimal/conversions' # for #to_s | 1 |
Go | Go | remove redundant colon introduced by mistake | df64bc3ed0e315a1064937d1a3ad6d0f324b11c8 | <ide><path>client/volume_prune.go
<ide> func (cli *Client) VolumesPrune(ctx context.Context, pruneFilters filters.Args)
<ide> defer ensureReaderClosed(serverResp)
<ide>
<ide> if err := json.NewDecoder(serverResp.body).Decode(&report); err != nil {
<del> return report, fmt.Errorf("Error retrieving volume prune report:: %v", err)
<add> return report, fmt.Errorf("Error retrieving volume prune report: %v", err)
<ide> }
<ide>
<ide> return report, nil | 1 |
Javascript | Javascript | use exact types for options | 7f706e824de78494ee7fb66bc528aac5161a0bdb | <ide><path>packager/src/ModuleGraph/types.flow.js
<ide> type GraphOptions = {|
<ide> skip?: Set<string>,
<ide> |};
<ide>
<del>export type GraphResult = {
<add>export type GraphResult = {|
<ide> entryModules: Array<Module>,
<ide> modules: Array<Module>,
<del>};
<add>|};
<ide>
<ide> export type IdForPathFn = {path: string} => number;
<ide>
<ide> export type OutputFn = (
<ide> idForPath: IdForPathFn,
<ide> ) => OutputResult;
<ide>
<del>type OutputResult = {
<add>type OutputResult = {|
<ide> code: string,
<ide> map: SourceMap,
<del>};
<add>|};
<ide>
<ide> export type PackageData = {|
<ide> browser?: Object | string,
<ide> type ResolveOptions = {
<ide> log?: Console,
<ide> };
<ide>
<del>export type TransformerResult = {
<add>export type TransformerResult = {|
<ide> ast: ?Ast,
<ide> code: string,
<ide> map: ?SourceMap,
<del>};
<add>|};
<ide>
<ide> export type Transformer = {
<ide> transform: ( | 1 |
Javascript | Javascript | add test for disabling autoboot | c8ed6cb6ce96e6a53b8f7095896369442bbc66d9 | <ide><path>packages/ember-application/tests/system/visit_test.js
<add>import run from "ember-metal/run_loop";
<add>import Application from "ember-application/system/application";
<add>
<add>function createApplication() {
<add> var app = Application.extend().create({
<add> autoboot: false
<add> });
<add>
<add> return app;
<add>}
<add>
<add>if (Ember.FEATURES.isEnabled('ember-application-visit')) {
<add> QUnit.module("Ember.Application - visit()");
<add>
<add> // This tests whether the application is "autobooted" by registering an
<add> // instance initializer and asserting it never gets run. Since this is
<add> // inherently testing that async behavior *doesn't* happen, we set a
<add> // 500ms timeout to verify that when autoboot is set to false, the
<add> // instance initializer that would normally get called on DOM ready
<add> // does not fire.
<add> QUnit.test("Applications with autoboot set to false do not autoboot", function(assert) {
<add> QUnit.expect(1);
<add> QUnit.stop();
<add>
<add> run(function() {
<add> var app = createApplication();
<add>
<add> // Start the timeout
<add> var timeout = setTimeout(function() {
<add> ok(true, "500ms elapsed without initializers being called");
<add> QUnit.start();
<add> }, 500);
<add>
<add> // Create an instance initializer that should *not* get run.
<add> app.instanceInitializer({
<add> name: "assert-no-autoboot",
<add> initialize: function() {
<add> clearTimeout(timeout);
<add> QUnit.start();
<add> assert.ok(false, "instance should not have been created");
<add> }
<add> });
<add> });
<add> });
<add>} | 1 |
PHP | PHP | fix cs error | d28c6c4a32d6061a03b59fb8fda3ce4378cf1bf6 | <ide><path>tests/test_app/TestApp/Model/Table/AuthUsersTable.php
<ide> public function findAuth(Query $query, array $options)
<ide>
<ide> return $query;
<ide> }
<del>
<ide> } | 1 |
Javascript | Javascript | update jquery ujs | 57f5fe1850460e6a45bc075731a5dd78b620fa3e | <ide><path>railties/lib/rails/generators/rails/app/templates/public/javascripts/jquery_ujs.js
<add>/*
<add> * jquery-ujs
<add> *
<add> * http://github.com/rails/jquery-ujs/blob/master/src/rails.js
<add> *
<add> * This rails.js file supports jQuery 1.4.3 and 1.4.4 .
<add> *
<add> */
<add>
<ide> jQuery(function ($) {
<ide> var csrf_token = $('meta[name=csrf-token]').attr('content'),
<ide> csrf_param = $('meta[name=csrf-param]').attr('content');
<ide> jQuery(function ($) {
<ide> * Triggers a custom event on an element and returns the event result
<ide> * this is used to get around not being able to ensure callbacks are placed
<ide> * at the end of the chain.
<del> *
<del> * TODO: deprecate with jQuery 1.4.2 release, in favor of subscribing to our
<del> * own events and placing ourselves at the end of the chain.
<ide> */
<ide> triggerAndReturn: function (name, data) {
<ide> var event = new $.Event(name);
<ide> jQuery(function ($) {
<ide> },
<ide>
<ide> /**
<del> * Handles execution of remote calls firing overridable events along the way
<add> * Handles execution of remote calls. Provides following callbacks:
<add> *
<add> * - ajax:beforeSend - is executed before firing ajax call
<add> * - ajax:success - is executed when status is success
<add> * - ajax:complete - is executed when the request finishes, whether in failure or success
<add> * - ajax:error - is execute in case of error
<ide> */
<ide> callRemote: function () {
<ide> var el = this,
<ide> method = el.attr('method') || el.attr('data-method') || 'GET',
<ide> url = el.attr('action') || el.attr('href'),
<del> dataType = el.attr('data-type') || 'script';
<add> dataType = el.attr('data-type') || ($.ajaxSettings && $.ajaxSettings.dataType);
<ide>
<ide> if (url === undefined) {
<del> throw "No URL specified for remote call (action or href must be present).";
<add> throw "No URL specified for remote call (action or href must be present).";
<ide> } else {
<del> if (el.triggerAndReturn('ajax:before')) {
<del> var data = el.is('form') ? el.serializeArray() : [];
<add> var $this = $(this), data = el.is('form') ? el.serializeArray() : [];
<add>
<ide> $.ajax({
<ide> url: url,
<ide> data: data,
<ide> dataType: dataType,
<ide> type: method.toUpperCase(),
<ide> beforeSend: function (xhr) {
<del> el.trigger('ajax:loading', xhr);
<add> if ($this.triggerHandler('ajax:beforeSend') === false) {
<add> return false;
<add> }
<ide> },
<ide> success: function (data, status, xhr) {
<ide> el.trigger('ajax:success', [data, status, xhr]);
<ide> jQuery(function ($) {
<ide> el.trigger('ajax:complete', xhr);
<ide> },
<ide> error: function (xhr, status, error) {
<del> el.trigger('ajax:failure', [xhr, status, error]);
<add> el.trigger('ajax:error', [xhr, status, error]);
<ide> }
<ide> });
<del> }
<del>
<del> el.trigger('ajax:after');
<ide> }
<ide> }
<ide> });
<ide>
<ide> /**
<del> * confirmation handler
<add> * confirmation handler
<ide> */
<del> $('a[data-confirm],input[data-confirm]').live('click', function () {
<add> $('body').delegate('a[data-confirm], button[data-confirm], input[data-confirm]', 'click.rails', function () {
<ide> var el = $(this);
<ide> if (el.triggerAndReturn('confirm')) {
<ide> if (!confirm(el.attr('data-confirm'))) {
<ide> jQuery(function ($) {
<ide> });
<ide>
<ide>
<add>
<ide> /**
<ide> * remote handlers
<ide> */
<del> $('form[data-remote]').live('submit', function (e) {
<add> $('form[data-remote]').live('submit.rails', function (e) {
<ide> $(this).callRemote();
<ide> e.preventDefault();
<ide> });
<ide>
<del> $('a[data-remote],input[data-remote]').live('click', function (e) {
<add> $('a[data-remote],input[data-remote]').live('click.rails', function (e) {
<ide> $(this).callRemote();
<ide> e.preventDefault();
<ide> });
<ide>
<del> $('a[data-method]:not([data-remote])').live('click', function (e){
<add> /**
<add> * <%= link_to "Delete", user_path(@user), :method => :delete, :confirm => "Are you sure?" %>
<add> *
<add> * <a href="/users/5" data-confirm="Are you sure?" data-method="delete" rel="nofollow">Delete</a>
<add> */
<add> $('a[data-method]:not([data-remote])').live('click.rails', function (e){
<ide> var link = $(this),
<ide> href = link.attr('href'),
<ide> method = link.attr('data-method'),
<ide> form = $('<form method="post" action="'+href+'"></form>'),
<ide> metadata_input = '<input name="_method" value="'+method+'" type="hidden" />';
<ide>
<del> if (csrf_param != null && csrf_token != null) {
<del> metadata_input += '<input name="'+csrf_param+'" value="'+csrf_token+'" type="hidden" />';
<add> if (csrf_param !== undefined && csrf_token !== undefined) {
<add> metadata_input += '<input name="'+csrf_param+'" value="'+csrf_token+'" type="hidden" />';
<ide> }
<ide>
<ide> form.hide()
<ide> jQuery(function ($) {
<ide> /**
<ide> * disable-with handlers
<ide> */
<del> var disable_with_input_selector = 'input[data-disable-with]';
<del> var disable_with_form_remote_selector = 'form[data-remote]:has(' + disable_with_input_selector + ')';
<del> var disable_with_form_not_remote_selector = 'form:not([data-remote]):has(' + disable_with_input_selector + ')';
<add> var disable_with_input_selector = 'input[data-disable-with]',
<add> disable_with_form_remote_selector = 'form[data-remote]:has(' + disable_with_input_selector + ')',
<add> disable_with_form_not_remote_selector = 'form:not([data-remote]):has(' + disable_with_input_selector + ')';
<ide>
<ide> var disable_with_input_function = function () {
<ide> $(this).find(disable_with_input_selector).each(function () {
<ide> jQuery(function ($) {
<ide> });
<ide> };
<ide>
<del> $(disable_with_form_remote_selector).live('ajax:before', disable_with_input_function);
<del> $(disable_with_form_not_remote_selector).live('submit', disable_with_input_function);
<add> $(disable_with_form_remote_selector).live('ajax:before.rails', disable_with_input_function);
<add> $(disable_with_form_not_remote_selector).live('submit.rails', disable_with_input_function);
<ide>
<del> $(disable_with_form_remote_selector).live('ajax:complete', function () {
<add> $(disable_with_form_remote_selector).live('ajax:complete.rails', function () {
<ide> $(this).find(disable_with_input_selector).each(function () {
<ide> var input = $(this);
<ide> input.removeAttr('disabled')
<ide> .val(input.data('enable-with'));
<ide> });
<ide> });
<ide>
<add> var jqueryVersion = $().jquery;
<add>
<add> if (!( (jqueryVersion === '1.4.3') || (jqueryVersion === '1.4.4'))){
<add> alert('This rails.js does not support the jQuery version you are using. Please read documentation.');
<add> }
<add>
<ide> }); | 1 |
Go | Go | remove stubs for deprecated package | ee5d8f43e1c47975bf96f45c2854dfdaf584ee48 | <ide><path>pkg/signal/signal_deprecated.go
<del>// Package signal provides helper functions for dealing with signals across
<del>// various operating systems.
<del>package signal // import "github.com/docker/docker/pkg/signal"
<del>
<del>import (
<del> "github.com/docker/docker/pkg/stack"
<del> msignal "github.com/moby/sys/signal"
<del>)
<del>
<del>var (
<del> // DumpStacks appends the runtime stack into file in dir and returns full path
<del> // to that file.
<del> // Deprecated: use github.com/docker/docker/pkg/stack.Dump instead.
<del> DumpStacks = stack.DumpToFile
<del>
<del> // CatchAll catches all signals and relays them to the specified channel.
<del> // SIGURG is not handled, as it's used by the Go runtime to support
<del> // preemptable system calls.
<del> // Deprecated: use github.com/moby/sys/signal.CatchAll instead
<del> CatchAll = msignal.CatchAll
<del>
<del> // StopCatch stops catching the signals and closes the specified channel.
<del> // Deprecated: use github.com/moby/sys/signal.StopCatch instead
<del> StopCatch = msignal.StopCatch
<del>
<del> // ParseSignal translates a string to a valid syscall signal.
<del> // It returns an error if the signal map doesn't include the given signal.
<del> // Deprecated: use github.com/moby/sys/signal.ParseSignal instead
<del> ParseSignal = msignal.ParseSignal
<del>
<del> // ValidSignalForPlatform returns true if a signal is valid on the platform
<del> // Deprecated: use github.com/moby/sys/signal.ValidSignalForPlatform instead
<del> ValidSignalForPlatform = msignal.ValidSignalForPlatform
<del>
<del> // SignalMap is a map of signals for the current platform.
<del> // Deprecated: use github.com/moby/sys/signal.SignalMap instead
<del> SignalMap = msignal.SignalMap
<del>)
<del>
<del>// Signals used in cli/command
<del>const (
<del> // SIGCHLD is a signal sent to a process when a child process terminates, is interrupted, or resumes after being interrupted.
<del> // Deprecated: use github.com/moby/sys/signal.SIGCHLD instead
<del> SIGCHLD = msignal.SIGCHLD
<del> // SIGWINCH is a signal sent to a process when its controlling terminal changes its size
<del> // Deprecated: use github.com/moby/sys/signal.SIGWINCH instead
<del> SIGWINCH = msignal.SIGWINCH
<del> // SIGPIPE is a signal sent to a process when a pipe is written to before the other end is open for reading
<del> // Deprecated: use github.com/moby/sys/signal.SIGPIPE instead
<del> SIGPIPE = msignal.SIGPIPE
<del>
<del> // DefaultStopSignal has been deprecated and removed. The default value is
<del> // now defined in github.com/docker/docker/container. Clients should omit
<del> // the container's stop-signal field if the default should be used.
<del>) | 1 |
Javascript | Javascript | remove single instance of whitelist with allowlist | 3911e78138d1ca0dd41b75b3ebb5ee8d2a051313 | <ide><path>build/three.js
<ide>
<ide> var objectName = results.nodeName.substring( lastDot + 1 );
<ide>
<del> // Object names must be checked against a whitelist. Otherwise, there
<add> // Object names must be checked against an allowlist. Otherwise, there
<ide> // is no way to parse 'foo.bar.baz': 'baz' must be a property, but
<ide> // 'bar' could be the objectName, or part of a nodeName (which can
<ide> // include '.' characters).
<ide><path>build/three.module.js
<ide> Object.assign( PropertyBinding, {
<ide>
<ide> const objectName = results.nodeName.substring( lastDot + 1 );
<ide>
<del> // Object names must be checked against a whitelist. Otherwise, there
<add> // Object names must be checked against an allowlist. Otherwise, there
<ide> // is no way to parse 'foo.bar.baz': 'baz' must be a property, but
<ide> // 'bar' could be the objectName, or part of a nodeName (which can
<ide> // include '.' characters).
<ide><path>src/animation/PropertyBinding.js
<ide> Object.assign( PropertyBinding, {
<ide>
<ide> const objectName = results.nodeName.substring( lastDot + 1 );
<ide>
<del> // Object names must be checked against a whitelist. Otherwise, there
<add> // Object names must be checked against an allowlist. Otherwise, there
<ide> // is no way to parse 'foo.bar.baz': 'baz' must be a property, but
<ide> // 'bar' could be the objectName, or part of a nodeName (which can
<ide> // include '.' characters). | 3 |
Text | Text | reorder the sidebar in order of relevance | 9114f80b03a802eb9a37f4a41cb4dcbc8948b7ed | <ide><path>docs/_sidebar.md
<ide> - **Getting Started**
<ide> - [Introduction](index.md 'Contribute to the freeCodeCamp.org Community')
<ide> - [Frequently Asked Questions](FAQ.md)
<add>- **Translation Contribution**
<add> - [Work on translating resources](how-to-translate-files.md)
<add> - [Work on proofreading translations](how-to-proofread-files.md)
<ide> - **Code Contribution**
<ide> - [Set up freeCodeCamp locally](how-to-setup-freecodecamp-locally.md)
<del> - [Codebase best practices](codebase-best-practices.md)
<add> - [Follow coding best practices](codebase-best-practices.md)
<ide> - [Open a pull request](how-to-open-a-pull-request.md)
<ide> - [Work on coding challenges](how-to-work-on-coding-challenges.md)
<add> - [Work on practice projects](how-to-work-on-practice-projects.md)
<add> - [Work on tutorials with CodeRoad](how-to-work-on-tutorials-that-use-coderoad.md)
<add> - [Work on localized client web app](how-to-work-on-localized-client-webapp.md)
<add> - [Work on Cypress tests](how-to-add-cypress-tests.md)
<ide> - [Work on video challenges](how-to-help-with-video-challenges.md)
<ide> - [Work on the news theme](how-to-work-on-the-news-theme.md)
<ide> - [Work on the docs theme](how-to-work-on-the-docs-theme.md)
<del> - [Work on practice projects](how-to-work-on-practice-projects.md)
<del>- **Translation Contribution**
<del> - [Work on translating resources](how-to-translate-files.md)
<del> - [Work on proofreading translations](how-to-proofread-files.md)
<del>- **Resources**
<del> - [Set up freeCodeCamp on Windows (WSL)](how-to-setup-wsl.md)
<del> - [Add Cypress tests](how-to-add-cypress-tests.md)
<del> - [Work on localized client web app](how-to-work-on-localized-client-webapp.md)
<del> - [Catch outgoing emails locally](how-to-catch-outgoing-emails-locally.md)
<add>- **Additional Guides**
<ide> - [Test translations locally](how-to-test-translations-locally.md)
<ide> - [Understand the curriculum file structure](curriculum-file-structure.md)
<del> - [Work on tutorials w/ CodeRoad](how-to-work-on-tutorials-that-use-coderoad.md)
<add> - [Debug outgoing emails locally](how-to-catch-outgoing-emails-locally.md)
<add> - [Set up freeCodeCamp on Windows (WSL)](how-to-setup-wsl.md)
<ide>
<ide> ---
<ide> | 1 |
Java | Java | consider generics for predicting factorybean types | a0e462581fe1d27d5926809fbb21441a6685ef45 | <ide><path>spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractAutowireCapableBeanFactory.java
<ide> import org.springframework.beans.factory.config.SmartInstantiationAwareBeanPostProcessor;
<ide> import org.springframework.beans.factory.config.TypedStringValue;
<ide> import org.springframework.core.DefaultParameterNameDiscoverer;
<del>import org.springframework.core.GenericTypeResolver;
<ide> import org.springframework.core.MethodParameter;
<ide> import org.springframework.core.NamedThreadLocal;
<ide> import org.springframework.core.ParameterNameDiscoverer;
<ide> import org.springframework.util.ClassUtils;
<ide> import org.springframework.util.ObjectUtils;
<ide> import org.springframework.util.ReflectionUtils;
<add>import org.springframework.util.ReflectionUtils.MethodCallback;
<ide> import org.springframework.util.StringUtils;
<ide>
<ide> /**
<ide> protected Class<?> getTypeForFactoryMethod(String beanName, RootBeanDefinition m
<ide> * if present to determine the object type. If not present, i.e. the FactoryBean is
<ide> * declared as a raw type, checks the FactoryBean's {@code getObjectType} method
<ide> * on a plain instance of the FactoryBean, without bean properties applied yet.
<del> * If this doesn't return a type yet, a full creation of the FactoryBean is
<del> * used as fallback (through delegation to the superclass's implementation).
<add> * If this doesn't return a type yet, and {@code allowInit} is {@code true} a
<add> * full creation of the FactoryBean is used as fallback (through delegation to the
<add> * superclass's implementation).
<ide> * <p>The shortcut check for a FactoryBean is only applied in case of a singleton
<ide> * FactoryBean. If the FactoryBean instance itself is not kept as singleton,
<ide> * it will be fully created to check the type of its exposed object.
<ide> */
<ide> @Override
<del> @Nullable
<del> protected Class<?> getTypeForFactoryBean(String beanName, RootBeanDefinition mbd) {
<add> protected ResolvableType getTypeForFactoryBean(String beanName,
<add> RootBeanDefinition mbd, boolean allowInit) {
<add>
<add> ResolvableType result = ResolvableType.NONE;
<add>
<add> ResolvableType beanType = mbd.hasBeanClass() ?
<add> ResolvableType.forClass(mbd.getBeanClass()) :
<add> ResolvableType.NONE;
<add>
<add> // For instance supplied beans try the target type and bean class
<ide> if (mbd.getInstanceSupplier() != null) {
<del> ResolvableType targetType = mbd.targetType;
<del> if (targetType != null) {
<del> Class<?> result = targetType.as(FactoryBean.class).getGeneric().resolve();
<del> if (result != null) {
<del> return result;
<del> }
<add> result = getFactoryBeanGeneric(mbd.targetType);
<add> if (result.resolve() != null) {
<add> return result;
<ide> }
<del> if (mbd.hasBeanClass()) {
<del> Class<?> result = GenericTypeResolver.resolveTypeArgument(mbd.getBeanClass(), FactoryBean.class);
<del> if (result != null) {
<del> return result;
<del> }
<add> result = getFactoryBeanGeneric(beanType);
<add> if (result.resolve() != null) {
<add> return result;
<ide> }
<ide> }
<ide>
<add> // Consider factory methods
<ide> String factoryBeanName = mbd.getFactoryBeanName();
<ide> String factoryMethodName = mbd.getFactoryMethodName();
<ide>
<add> // Scan the factory bean methods
<ide> if (factoryBeanName != null) {
<ide> if (factoryMethodName != null) {
<del> // Try to obtain the FactoryBean's object type from its factory method declaration
<del> // without instantiating the containing bean at all.
<del> BeanDefinition fbDef = getBeanDefinition(factoryBeanName);
<del> if (fbDef instanceof AbstractBeanDefinition) {
<del> AbstractBeanDefinition afbDef = (AbstractBeanDefinition) fbDef;
<del> if (afbDef.hasBeanClass()) {
<del> Class<?> result = getTypeForFactoryBeanFromMethod(afbDef.getBeanClass(), factoryMethodName);
<del> if (result != null) {
<del> return result;
<del> }
<add> // Try to obtain the FactoryBean's object type from its factory method
<add> // declaration without instantiating the containing bean at all.
<add> BeanDefinition factoryBeanDefinition = getBeanDefinition(factoryBeanName);
<add> if (factoryBeanDefinition instanceof AbstractBeanDefinition &&
<add> ((AbstractBeanDefinition) factoryBeanDefinition).hasBeanClass()) {
<add> Class<?> factoryBeanClass = ((AbstractBeanDefinition) factoryBeanDefinition).getBeanClass();
<add> result = getTypeForFactoryBeanFromMethod(factoryBeanClass, factoryMethodName);
<add> if (result.resolve() != null) {
<add> return result;
<ide> }
<ide> }
<ide> }
<ide> // If not resolvable above and the referenced factory bean doesn't exist yet,
<ide> // exit here - we don't want to force the creation of another bean just to
<ide> // obtain a FactoryBean's object type...
<ide> if (!isBeanEligibleForMetadataCaching(factoryBeanName)) {
<del> return null;
<add> return ResolvableType.NONE;
<ide> }
<ide> }
<ide>
<del> // Let's obtain a shortcut instance for an early getObjectType() call...
<del> FactoryBean<?> fb = (mbd.isSingleton() ?
<del> getSingletonFactoryBeanForTypeCheck(beanName, mbd) :
<del> getNonSingletonFactoryBeanForTypeCheck(beanName, mbd));
<del>
<del> if (fb != null) {
<del> // Try to obtain the FactoryBean's object type from this early stage of the instance.
<del> Class<?> result = getTypeForFactoryBean(fb);
<del> if (result != null) {
<del> return result;
<del> }
<del> else {
<add> // If we're allowed, we can create the factory bean and call getObjectType() early
<add> if (allowInit) {
<add> FactoryBean<?> factoryBean = (mbd.isSingleton() ?
<add> getSingletonFactoryBeanForTypeCheck(beanName, mbd) :
<add> getNonSingletonFactoryBeanForTypeCheck(beanName, mbd));
<add> if (factoryBean != null) {
<add> // Try to obtain the FactoryBean's object type from this early stage of the instance.
<add> Class<?> type = getTypeForFactoryBean(factoryBean);
<add> if (type != null) {
<add> return ResolvableType.forClass(type);
<add> }
<ide> // No type found for shortcut FactoryBean instance:
<ide> // fall back to full creation of the FactoryBean instance.
<del> return super.getTypeForFactoryBean(beanName, mbd);
<add> return super.getTypeForFactoryBean(beanName, mbd, allowInit);
<ide> }
<ide> }
<ide>
<del> if (factoryBeanName == null && mbd.hasBeanClass()) {
<add> if (factoryBeanName == null && mbd.hasBeanClass() && factoryMethodName != null) {
<ide> // No early bean instantiation possible: determine FactoryBean's type from
<ide> // static factory method signature or from class inheritance hierarchy...
<del> if (factoryMethodName != null) {
<del> return getTypeForFactoryBeanFromMethod(mbd.getBeanClass(), factoryMethodName);
<del> }
<del> else {
<del> return GenericTypeResolver.resolveTypeArgument(mbd.getBeanClass(), FactoryBean.class);
<del> }
<add> return getTypeForFactoryBeanFromMethod(mbd.getBeanClass(), factoryMethodName);
<add> }
<add> result = getFactoryBeanGeneric(beanType);
<add> if (result.resolve() != null) {
<add> return result;
<ide> }
<add> return ResolvableType.NONE;
<add> }
<ide>
<del> return null;
<add> private ResolvableType getFactoryBeanGeneric(@Nullable ResolvableType type) {
<add> if (type == null) {
<add> return ResolvableType.NONE;
<add> }
<add> return type.as(FactoryBean.class).getGeneric();
<ide> }
<ide>
<ide> /**
<ide> protected Class<?> getTypeForFactoryBean(String beanName, RootBeanDefinition mbd
<ide> * @param factoryMethodName the name of the factory method
<ide> * @return the common {@code FactoryBean} object type, or {@code null} if none
<ide> */
<del> @Nullable
<del> private Class<?> getTypeForFactoryBeanFromMethod(Class<?> beanClass, final String factoryMethodName) {
<del>
<del> /**
<del> * Holder used to keep a reference to a {@code Class} value.
<del> */
<del> class Holder {
<del>
<del> @Nullable
<del> Class<?> value = null;
<del> }
<del>
<del> final Holder objectType = new Holder();
<del>
<add> private ResolvableType getTypeForFactoryBeanFromMethod(Class<?> beanClass, String factoryMethodName) {
<ide> // CGLIB subclass methods hide generic parameters; look at the original user class.
<del> Class<?> fbClass = ClassUtils.getUserClass(beanClass);
<del>
<del> // Find the given factory method, taking into account that in the case of
<del> // @Bean methods, there may be parameters present.
<del> ReflectionUtils.doWithMethods(fbClass, method -> {
<del> if (method.getName().equals(factoryMethodName) &&
<del> FactoryBean.class.isAssignableFrom(method.getReturnType())) {
<del> Class<?> currentType = GenericTypeResolver.resolveReturnTypeArgument(method, FactoryBean.class);
<del> if (currentType != null) {
<del> objectType.value = ClassUtils.determineCommonAncestor(currentType, objectType.value);
<del> }
<del> }
<del> }, ReflectionUtils.USER_DECLARED_METHODS);
<add> Class<?> factoryBeanClass = ClassUtils.getUserClass(beanClass);
<add> FactoryBeanMethodTypeFinder finder = new FactoryBeanMethodTypeFinder(factoryMethodName);
<add> ReflectionUtils.doWithMethods(factoryBeanClass, finder, ReflectionUtils.USER_DECLARED_METHODS);
<add> return finder.getResult();
<add> }
<ide>
<del> return (objectType.value != null && Object.class != objectType.value ? objectType.value : null);
<add> /**
<add> * This implementation attempts to query the FactoryBean's generic parameter metadata
<add> * if present to determine the object type. If not present, i.e. the FactoryBean is
<add> * declared as a raw type, checks the FactoryBean's {@code getObjectType} method
<add> * on a plain instance of the FactoryBean, without bean properties applied yet.
<add> * If this doesn't return a type yet, a full creation of the FactoryBean is
<add> * used as fallback (through delegation to the superclass's implementation).
<add> * <p>The shortcut check for a FactoryBean is only applied in case of a singleton
<add> * FactoryBean. If the FactoryBean instance itself is not kept as singleton,
<add> * it will be fully created to check the type of its exposed object.
<add> */
<add> @Override
<add> @Deprecated
<add> @Nullable
<add> protected Class<?> getTypeForFactoryBean(String beanName, RootBeanDefinition mbd) {
<add> return getTypeForFactoryBean(beanName, mbd, true).resolve();
<ide> }
<ide>
<ide> /**
<ide> public String getDependencyName() {
<ide> }
<ide> }
<ide>
<add> /**
<add> * {@link MethodCallback} used to find {@link FactoryBean} type information.
<add> */
<add> private static class FactoryBeanMethodTypeFinder implements MethodCallback {
<add>
<add> private final String factoryMethodName;
<add>
<add> private ResolvableType result = ResolvableType.NONE;
<add>
<add>
<add> FactoryBeanMethodTypeFinder(String factoryMethodName) {
<add> this.factoryMethodName = factoryMethodName;
<add> }
<add>
<add>
<add> @Override
<add> public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException {
<add> if (isFactoryBeanMethod(method)) {
<add> ResolvableType returnType = ResolvableType.forMethodReturnType(method);
<add> ResolvableType candidate = returnType.as(FactoryBean.class).getGeneric();
<add> if (this.result == ResolvableType.NONE) {
<add> this.result = candidate;
<add> }
<add> else {
<add> Class<?> resolvedResult = this.result.resolve();
<add> Class<?> commonAncestor = ClassUtils.determineCommonAncestor(candidate.resolve(), resolvedResult);
<add> if (!ObjectUtils.nullSafeEquals(resolvedResult, commonAncestor)) {
<add> this.result = ResolvableType.forClass(commonAncestor);
<add> }
<add> }
<add> }
<add> }
<add>
<add> private boolean isFactoryBeanMethod(Method method) {
<add> return method.getName().equals(this.factoryMethodName) &&
<add> FactoryBean.class.isAssignableFrom(method.getReturnType());
<add> }
<add>
<add>
<add> ResolvableType getResult() {
<add> Class<?> resolved = this.result.resolve();
<add> boolean foundResult = resolved != null && resolved != Object.class;
<add> return (foundResult ? this.result : ResolvableType.NONE);
<add> }
<add>
<add> }
<add>
<ide> }
<ide><path>spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractBeanFactory.java
<ide> import org.springframework.core.NamedThreadLocal;
<ide> import org.springframework.core.ResolvableType;
<ide> import org.springframework.core.convert.ConversionService;
<add>import org.springframework.core.log.LogMessage;
<ide> import org.springframework.lang.Nullable;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.ClassUtils;
<ide> public boolean isPrototype(String name) throws NoSuchBeanDefinitionException {
<ide>
<ide> @Override
<ide> public boolean isTypeMatch(String name, ResolvableType typeToMatch) throws NoSuchBeanDefinitionException {
<add> return isTypeMatch(name, typeToMatch, true);
<add> }
<add>
<add> /**
<add> * Internal extended variant of {@link #isTypeMatch(String, ResolvableType)}
<add> * to check whether the bean with the given name matches the specified type. Allow
<add> * additional constraints to be applied to ensure that beans are not created early.
<add> * @param name the name of the bean to query
<add> * @param typeToMatch the type to match against (as a
<add> * {@code ResolvableType})
<add> * @return {@code true} if the bean type matches, {@code false} if it
<add> * doesn't match or cannot be determined yet
<add> * @throws NoSuchBeanDefinitionException if there is no bean with the given
<add> * name
<add> * @since 5.2
<add> * @see #getBean
<add> * @see #getType
<add> */
<add> boolean isTypeMatch(String name, ResolvableType typeToMatch,
<add> boolean allowFactoryBeanInit) throws NoSuchBeanDefinitionException {
<add>
<ide> String beanName = transformedBeanName(name);
<add> boolean isFactoryDereference = BeanFactoryUtils.isFactoryDereference(name);
<ide>
<ide> // Check manually registered singletons.
<ide> Object beanInstance = getSingleton(beanName, false);
<ide> if (beanInstance != null && beanInstance.getClass() != NullBean.class) {
<ide> if (beanInstance instanceof FactoryBean) {
<del> if (!BeanFactoryUtils.isFactoryDereference(name)) {
<add> if (!isFactoryDereference) {
<ide> Class<?> type = getTypeForFactoryBean((FactoryBean<?>) beanInstance);
<ide> return (type != null && typeToMatch.isAssignableFrom(type));
<ide> }
<ide> else {
<ide> return typeToMatch.isInstance(beanInstance);
<ide> }
<ide> }
<del> else if (!BeanFactoryUtils.isFactoryDereference(name)) {
<add> else if (!isFactoryDereference) {
<ide> if (typeToMatch.isInstance(beanInstance)) {
<ide> // Direct match for exposed instance?
<ide> return true;
<ide> else if (containsSingleton(beanName) && !containsBeanDefinition(beanName)) {
<ide>
<ide> // Retrieve corresponding bean definition.
<ide> RootBeanDefinition mbd = getMergedLocalBeanDefinition(beanName);
<add> BeanDefinitionHolder dbd = mbd.getDecoratedDefinition();
<ide>
<add> // Setup the types that we want to match against
<ide> Class<?> classToMatch = typeToMatch.resolve();
<ide> if (classToMatch == null) {
<ide> classToMatch = FactoryBean.class;
<ide> }
<ide> Class<?>[] typesToMatch = (FactoryBean.class == classToMatch ?
<ide> new Class<?>[] {classToMatch} : new Class<?>[] {FactoryBean.class, classToMatch});
<ide>
<del> // Check decorated bean definition, if any: We assume it'll be easier
<del> // to determine the decorated bean's type than the proxy's type.
<del> BeanDefinitionHolder dbd = mbd.getDecoratedDefinition();
<del> if (dbd != null && !BeanFactoryUtils.isFactoryDereference(name)) {
<del> RootBeanDefinition tbd = getMergedBeanDefinition(dbd.getBeanName(), dbd.getBeanDefinition(), mbd);
<del> Class<?> targetClass = predictBeanType(dbd.getBeanName(), tbd, typesToMatch);
<del> if (targetClass != null && !FactoryBean.class.isAssignableFrom(targetClass)) {
<del> return typeToMatch.isAssignableFrom(targetClass);
<add>
<add> // Attempt to predict the bean type
<add> Class<?> predictedType = null;
<add>
<add> // We're looking for a regular reference but we're a factory bean that has
<add> // a decorated bean definition. The target bean should be the same type
<add> // as FactoryBean would ultimately return.
<add> if (!isFactoryDereference && dbd != null && isFactoryBean(beanName, mbd)) {
<add> // We should only attempt if the user explicitly set lazy-init to true
<add> // and we know the merged bean definition is for a factory bean.
<add> if (!mbd.isLazyInit() || allowFactoryBeanInit) {
<add> RootBeanDefinition tbd = getMergedBeanDefinition(dbd.getBeanName(), dbd.getBeanDefinition(), mbd);
<add> Class<?> targetType = predictBeanType(dbd.getBeanName(), tbd, typesToMatch);
<add> if (targetType != null && !FactoryBean.class.isAssignableFrom(targetType)) {
<add> predictedType = targetType;
<add> }
<ide> }
<ide> }
<ide>
<del> Class<?> beanType = predictBeanType(beanName, mbd, typesToMatch);
<del> if (beanType == null) {
<del> return false;
<add> // If we couldn't use the target type, try regular prediction.
<add> if (predictedType == null) {
<add> predictedType = predictBeanType(beanName, mbd, typesToMatch);
<add> if (predictedType == null) {
<add> return false;
<add> }
<ide> }
<ide>
<del> // Check bean class whether we're dealing with a FactoryBean.
<del> if (FactoryBean.class.isAssignableFrom(beanType)) {
<del> if (!BeanFactoryUtils.isFactoryDereference(name) && beanInstance == null) {
<del> // If it's a FactoryBean, we want to look at what it creates, not the factory class.
<del> beanType = getTypeForFactoryBean(beanName, mbd);
<del> if (beanType == null) {
<add> // Attempt to get the actual ResolvableType for the bean.
<add> ResolvableType beanType = null;
<add>
<add> // If it's a FactoryBean, we want to look at what it creates, not the factory class.
<add> if (FactoryBean.class.isAssignableFrom(predictedType)) {
<add> if (beanInstance == null && !isFactoryDereference) {
<add> beanType = getTypeForFactoryBean(beanName, mbd, allowFactoryBeanInit);
<add> predictedType = (beanType != null) ? beanType.resolve() : null;
<add> if (predictedType == null) {
<ide> return false;
<ide> }
<ide> }
<ide> }
<del> else if (BeanFactoryUtils.isFactoryDereference(name)) {
<add> else if (isFactoryDereference) {
<ide> // Special case: A SmartInstantiationAwareBeanPostProcessor returned a non-FactoryBean
<ide> // type but we nevertheless are being asked to dereference a FactoryBean...
<ide> // Let's check the original bean class and proceed with it if it is a FactoryBean.
<del> beanType = predictBeanType(beanName, mbd, FactoryBean.class);
<del> if (beanType == null || !FactoryBean.class.isAssignableFrom(beanType)) {
<add> predictedType = predictBeanType(beanName, mbd, FactoryBean.class);
<add> if (predictedType == null || !FactoryBean.class.isAssignableFrom(predictedType)) {
<ide> return false;
<ide> }
<ide> }
<ide>
<del> ResolvableType resolvableType = mbd.targetType;
<del> if (resolvableType == null) {
<del> resolvableType = mbd.factoryMethodReturnType;
<add> // We don't have an exact type but if bean definition target type or the factory
<add> // method return type matches the predicted type then we can use that.
<add> if (beanType == null) {
<add> ResolvableType definedType = mbd.targetType;
<add> if (definedType == null) {
<add> definedType = mbd.factoryMethodReturnType;
<add> }
<add> if (definedType != null && definedType.resolve() == predictedType) {
<add> beanType = definedType;
<add> }
<ide> }
<del> if (resolvableType != null && resolvableType.resolve() == beanType) {
<del> return typeToMatch.isAssignableFrom(resolvableType);
<add>
<add> // If we have a bean type use it so that generics are considered
<add> if (beanType != null) {
<add> return typeToMatch.isAssignableFrom(beanType);
<ide> }
<del> return typeToMatch.isAssignableFrom(beanType);
<add>
<add> // If we don't have a bean type, fallback to the predicted type
<add> return typeToMatch.isAssignableFrom(predictedType);
<ide> }
<ide>
<ide> @Override
<ide> public Class<?> getType(String name) throws NoSuchBeanDefinitionException {
<ide> if (beanClass != null && FactoryBean.class.isAssignableFrom(beanClass)) {
<ide> if (!BeanFactoryUtils.isFactoryDereference(name)) {
<ide> // If it's a FactoryBean, we want to look at what it creates, not at the factory class.
<del> return getTypeForFactoryBean(beanName, mbd);
<add> return getTypeForFactoryBean(beanName, mbd, true).resolve();
<ide> }
<ide> else {
<ide> return beanClass;
<ide> protected boolean isFactoryBean(String beanName, RootBeanDefinition mbd) {
<ide> return result;
<ide> }
<ide>
<add> /**
<add> * Determine the bean type for the given FactoryBean definition, as far as possible.
<add> * Only called if there is no singleton instance registered for the target bean
<add> * already. Implementations are only allowed to instantiate the factory bean if
<add> * {@code allowInit} is {@code true}, otherwise they should try to determine the
<add> * result through other means.
<add> * <p>If {@code allowInit} is {@code true}, the default implementation will create
<add> * the FactoryBean via {@code getBean} to call its {@code getObjectType} method.
<add> * Subclasses are encouraged to optimize this, typically by inspecting the generic
<add> * signature of the factory bean class or the factory method that creates it. If
<add> * subclasses do instantiate the FactoryBean, they should consider trying the
<add> * {@code getObjectType} method without fully populating the bean. If this fails, a
<add> * full FactoryBean creation as performed by this implementation should be used as
<add> * fallback.
<add> * @param beanName the name of the bean
<add> * @param mbd the merged bean definition for the bean
<add> * @param allowInit if initialization of the bean is permitted
<add> * @return the type for the bean if determinable, otherwise {@code ResolvableType.NONE}
<add> * @since 5.2
<add> * @see org.springframework.beans.factory.FactoryBean#getObjectType()
<add> * @see #getBean(String)
<add> */
<add> protected ResolvableType getTypeForFactoryBean(String beanName,
<add> RootBeanDefinition mbd, boolean allowInit) {
<add>
<add> if (allowInit && mbd.isSingleton()) {
<add> try {
<add> FactoryBean<?> factoryBean = doGetBean(FACTORY_BEAN_PREFIX + beanName, FactoryBean.class, null, true);
<add> Class<?> objectType = getTypeForFactoryBean(factoryBean);
<add> return (objectType != null) ? ResolvableType.forClass(objectType) : ResolvableType.NONE;
<add> }
<add> catch (BeanCreationException ex) {
<add> if (ex.contains(BeanCurrentlyInCreationException.class)) {
<add> logger.trace(LogMessage.format("Bean currently in creation on FactoryBean type check: %s", ex));
<add> }
<add> else if (mbd.isLazyInit()) {
<add> logger.trace(LogMessage.format("Bean creation exception on lazy FactoryBean type check: %s", ex));
<add> }
<add> else {
<add> logger.debug(LogMessage.format("Bean creation exception on non-lazy FactoryBean type check: %s", ex));
<add> }
<add> onSuppressedException(ex);
<add> }
<add> }
<add> return ResolvableType.NONE;
<add> }
<add>
<ide> /**
<ide> * Determine the bean type for the given FactoryBean definition, as far as possible.
<ide> * Only called if there is no singleton instance registered for the target bean already.
<ide> protected boolean isFactoryBean(String beanName, RootBeanDefinition mbd) {
<ide> * @return the type for the bean if determinable, or {@code null} otherwise
<ide> * @see org.springframework.beans.factory.FactoryBean#getObjectType()
<ide> * @see #getBean(String)
<add> * @deprecated since 5.2 in favor of {@link #getTypeForFactoryBean(String, RootBeanDefinition, boolean)}
<ide> */
<ide> @Nullable
<add> @Deprecated
<ide> protected Class<?> getTypeForFactoryBean(String beanName, RootBeanDefinition mbd) {
<del> if (!mbd.isSingleton()) {
<del> return null;
<del> }
<del> try {
<del> FactoryBean<?> factoryBean = doGetBean(FACTORY_BEAN_PREFIX + beanName, FactoryBean.class, null, true);
<del> return getTypeForFactoryBean(factoryBean);
<del> }
<del> catch (BeanCreationException ex) {
<del> if (ex.contains(BeanCurrentlyInCreationException.class)) {
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Bean currently in creation on FactoryBean type check: " + ex);
<del> }
<del> }
<del> else if (mbd.isLazyInit()) {
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Bean creation exception on lazy FactoryBean type check: " + ex);
<del> }
<del> }
<del> else {
<del> if (logger.isDebugEnabled()) {
<del> logger.debug("Bean creation exception on non-lazy FactoryBean type check: " + ex);
<del> }
<del> }
<del> onSuppressedException(ex);
<del> return null;
<del> }
<add> return getTypeForFactoryBean(beanName, mbd, true).resolve();
<ide> }
<ide>
<ide> /**
<ide><path>spring-beans/src/main/java/org/springframework/beans/factory/support/DefaultListableBeanFactory.java
<ide> import org.springframework.core.annotation.MergedAnnotation;
<ide> import org.springframework.core.annotation.MergedAnnotations;
<ide> import org.springframework.core.annotation.MergedAnnotations.SearchStrategy;
<add>import org.springframework.core.log.LogMessage;
<ide> import org.springframework.lang.Nullable;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.ClassUtils;
<ide> private String[] doGetBeanNamesForType(ResolvableType type, boolean includeNonSi
<ide> if (!mbd.isAbstract() && (allowEagerInit ||
<ide> (mbd.hasBeanClass() || !mbd.isLazyInit() || isAllowEagerClassLoading()) &&
<ide> !requiresEagerInitForType(mbd.getFactoryBeanName()))) {
<del> // In case of FactoryBean, match object created by FactoryBean.
<ide> boolean isFactoryBean = isFactoryBean(beanName, mbd);
<ide> BeanDefinitionHolder dbd = mbd.getDecoratedDefinition();
<del> boolean matchFound =
<del> (allowEagerInit || !isFactoryBean ||
<del> (dbd != null && !mbd.isLazyInit()) || containsSingleton(beanName)) &&
<del> (includeNonSingletons ||
<del> (dbd != null ? mbd.isSingleton() : isSingleton(beanName))) &&
<del> isTypeMatch(beanName, type);
<del> if (!matchFound && isFactoryBean) {
<del> // In case of FactoryBean, try to match FactoryBean instance itself next.
<del> beanName = FACTORY_BEAN_PREFIX + beanName;
<del> matchFound = (includeNonSingletons || mbd.isSingleton()) && isTypeMatch(beanName, type);
<add> boolean matchFound = false;
<add> boolean allowFactoryBeanInit = allowEagerInit || containsSingleton(beanName);
<add> boolean isNonLazyDecorated = dbd != null && !mbd.isLazyInit();
<add> if (!isFactoryBean) {
<add> if (includeNonSingletons || isSingleton(beanName, mbd, dbd)) {
<add> matchFound = isTypeMatch(beanName, type, allowFactoryBeanInit);
<add> }
<add> }
<add> else {
<add> if (includeNonSingletons || isNonLazyDecorated ||
<add> (allowFactoryBeanInit && isSingleton(beanName, mbd, dbd))) {
<add> matchFound = isTypeMatch(beanName, type, allowFactoryBeanInit);
<add> }
<add> if (!matchFound) {
<add> // In case of FactoryBean, try to match FactoryBean instance itself next.
<add> beanName = FACTORY_BEAN_PREFIX + beanName;
<add> matchFound = isTypeMatch(beanName, type, allowFactoryBeanInit);
<add> }
<ide> }
<ide> if (matchFound) {
<ide> result.add(beanName);
<ide> }
<ide> }
<ide> }
<del> catch (CannotLoadBeanClassException ex) {
<add> catch (CannotLoadBeanClassException | BeanDefinitionStoreException ex) {
<ide> if (allowEagerInit) {
<ide> throw ex;
<ide> }
<del> // Probably a class name with a placeholder: let's ignore it for type matching purposes.
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Ignoring bean class loading failure for bean '" + beanName + "'", ex);
<del> }
<del> onSuppressedException(ex);
<del> }
<del> catch (BeanDefinitionStoreException ex) {
<del> if (allowEagerInit) {
<del> throw ex;
<del> }
<del> // Probably some metadata with a placeholder: let's ignore it for type matching purposes.
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Ignoring unresolvable metadata in bean definition '" + beanName + "'", ex);
<del> }
<add> // Probably a placeholder: let's ignore it for type matching purposes.
<add> LogMessage message = (ex instanceof CannotLoadBeanClassException) ?
<add> LogMessage.format("Ignoring bean class loading failure for bean '%s'", beanName) :
<add> LogMessage.format("Ignoring unresolvable metadata in bean definition '%s'", beanName);
<add> logger.trace(message, ex);
<ide> onSuppressedException(ex);
<ide> }
<ide> }
<ide> }
<ide>
<add>
<ide> // Check manually registered singletons too.
<ide> for (String beanName : this.manualSingletonNames) {
<ide> try {
<ide> private String[] doGetBeanNamesForType(ResolvableType type, boolean includeNonSi
<ide> }
<ide> catch (NoSuchBeanDefinitionException ex) {
<ide> // Shouldn't happen - probably a result of circular reference resolution...
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Failed to check manually registered singleton with name '" + beanName + "'", ex);
<del> }
<add> logger.trace(LogMessage.format("Failed to check manually registered singleton with name '%s'", beanName), ex);
<ide> }
<ide> }
<ide>
<ide> return StringUtils.toStringArray(result);
<ide> }
<ide>
<add> private boolean isSingleton(String beanName, RootBeanDefinition mbd, BeanDefinitionHolder dbd) {
<add> return (dbd != null) ? mbd.isSingleton() : isSingleton(beanName);
<add> }
<add>
<ide> /**
<ide> * Check whether the specified bean would need to be eagerly initialized
<ide> * in order to determine its type.
<ide><path>spring-context/src/test/java/org/springframework/context/annotation/ConfigurationWithFactoryBeanBeanEarlyDeductionTests.java
<add>/*
<add> * Copyright 2002-2019 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * https://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.context.annotation;
<add>
<add>import java.util.Arrays;
<add>
<add>import org.junit.Test;
<add>
<add>import org.springframework.beans.BeansException;
<add>import org.springframework.beans.factory.FactoryBean;
<add>import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
<add>import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
<add>import org.springframework.beans.factory.support.AbstractBeanFactory;
<add>
<add>import static org.assertj.core.api.Assertions.assertThat;
<add>
<add>/**
<add> * Test for {@link AbstractBeanFactory} type inference from
<add> * {@link FactoryBean FactoryBeans} defined in the configuration.
<add> *
<add> * @author Phillip Webb
<add> */
<add>public class ConfigurationWithFactoryBeanBeanEarlyDeductionTests {
<add>
<add> @Test
<add> public void preFreezeDirect() {
<add> assertPreFreeze(DirectConfiguration.class);
<add> }
<add>
<add> @Test
<add> public void postFreezeDirect() {
<add> assertPostFreeze(DirectConfiguration.class);
<add> }
<add>
<add> @Test
<add> public void preFreezeGenericMethod() {
<add> assertPreFreeze(GenericMethodConfiguration.class);
<add> }
<add>
<add> @Test
<add> public void postFreezeGenericMethod() {
<add> assertPostFreeze(GenericMethodConfiguration.class);
<add> }
<add>
<add> @Test
<add> public void preFreezeGenericClass() {
<add> assertPreFreeze(GenericClassConfiguration.class);
<add> }
<add>
<add> @Test
<add> public void postFreezeGenericClass() {
<add> assertPostFreeze(GenericClassConfiguration.class);
<add> }
<add>
<add> private void assertPostFreeze(Class<?> configurationClass) {
<add> AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
<add> configurationClass);
<add> assertContainsMyBeanName(context);
<add> }
<add>
<add> private void assertPreFreeze(Class<?> configurationClass,
<add> BeanFactoryPostProcessor... postProcessors) {
<add> NameCollectingBeanFactoryPostProcessor postProcessor = new NameCollectingBeanFactoryPostProcessor();
<add> AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
<add> Arrays.stream(postProcessors).forEach(context::addBeanFactoryPostProcessor);
<add> context.addBeanFactoryPostProcessor(postProcessor);
<add> context.register(configurationClass);
<add> context.refresh();
<add> assertContainsMyBeanName(postProcessor.getNames());
<add> }
<add>
<add> private void assertContainsMyBeanName(AnnotationConfigApplicationContext context) {
<add> assertContainsMyBeanName(context.getBeanNamesForType(MyBean.class, true, false));
<add> }
<add>
<add> private void assertContainsMyBeanName(String[] names) {
<add> assertThat(names).containsExactly("myBean");
<add> }
<add>
<add> private static class NameCollectingBeanFactoryPostProcessor
<add> implements BeanFactoryPostProcessor {
<add>
<add> private String[] names;
<add>
<add> @Override
<add> public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory)
<add> throws BeansException {
<add> this.names = beanFactory.getBeanNamesForType(MyBean.class, true, false);
<add> }
<add>
<add> public String[] getNames() {
<add> return this.names;
<add> }
<add>
<add> }
<add>
<add> @Configuration
<add> static class DirectConfiguration {
<add>
<add> @Bean
<add> MyBean myBean() {
<add> return new MyBean();
<add> }
<add>
<add> }
<add>
<add> @Configuration
<add> static class GenericMethodConfiguration {
<add>
<add> @Bean
<add> FactoryBean<MyBean> myBean() {
<add> return new TestFactoryBean<>(new MyBean());
<add> }
<add>
<add> }
<add>
<add> @Configuration
<add> static class GenericClassConfiguration {
<add>
<add> @Bean
<add> MyFactoryBean myBean() {
<add> return new MyFactoryBean();
<add> }
<add>
<add> }
<add>
<add> static class MyBean {
<add> }
<add>
<add> static class TestFactoryBean<T> implements FactoryBean<T> {
<add>
<add> private final T instance;
<add>
<add> public TestFactoryBean(T instance) {
<add> this.instance = instance;
<add> }
<add>
<add> @Override
<add> public T getObject() throws Exception {
<add> return this.instance;
<add> }
<add>
<add> @Override
<add> public Class<?> getObjectType() {
<add> return this.instance.getClass();
<add> }
<add>
<add> }
<add>
<add> static class MyFactoryBean extends TestFactoryBean<MyBean> {
<add>
<add> public MyFactoryBean() {
<add> super(new MyBean());
<add> }
<add>
<add> }
<add>
<add>} | 4 |
Javascript | Javascript | remove internet test from test/simple/ | d483acc5d9c67052366f10a23ebd46a96f989e05 | <ide><path>test/simple/test-c-ares.js
<ide> dns.lookup('::1', function(error, result, addressType) {
<ide> assert.equal(6, addressType);
<ide> });
<ide>
<del>dns.lookup('ipv6.google.com', function(error, result, addressType) {
<del> if (error) throw error;
<del> console.dir(arguments);
<del> //assert.equal('string', typeof result);
<del> assert.equal(6, addressType);
<del>});
<del>
<ide> // Windows doesn't usually have an entry for localhost 127.0.0.1 in
<ide> // C:\Windows\System32\drivers\etc\hosts
<ide> // so we disable this test on Windows. | 1 |
PHP | PHP | add stubs/incomplete code for request/response | 80f0b6f7145447ebb788281eaf0d4e8ba9ced0a1 | <ide><path>lib/Cake/Network/Http/Request.php
<add><?php
<add>/**
<add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
<add> * Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> *
<add> * Licensed under The MIT License
<add> * Redistributions of files must retain the above copyright notice.
<add> *
<add> * @copyright Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> * @link http://cakephp.org CakePHP(tm) Project
<add> * @since CakePHP(tm) v 3.0.0
<add> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<add> */
<add>namespace Cake\Network\Http;
<add>
<add>/**
<add> * Implements methods for HTTP requests.
<add> */
<add>class Request {
<add>
<add> const METHOD_GET = 'GET';
<add> const METHOD_POST = 'POST';
<add> const METHOD_PUT = 'PUT';
<add> const METHOD_DELETE = 'DELETE';
<add> const METHOD_PATCH = 'PATCH';
<add>/**
<add> * HTTP Version being used.
<add> *
<add> * @var string
<add> */
<add> protected $_version = '1.1';
<add>
<add> protected $_method;
<add> protected $_content;
<add> protected $_url;
<add>
<add>/**
<add> * Headers to be sent.
<add> *
<add> * @var array
<add> */
<add> protected $_headers = [
<add> 'Connection' => 'close',
<add> 'User-Agent' => 'CakePHP'
<add> ];
<add>
<add> public function method($method = null) {
<add> if ($method === null) {
<add> return $this->_method;
<add> }
<add> $this->_method = $method;
<add> return $this;
<add> }
<add>
<add> public function url($url = null) {
<add> if ($url === null) {
<add> return $this->_url;
<add> }
<add> $this->_url = $url;
<add> return $this;
<add> }
<add>
<add> public function header($name = null, $value = null) {
<add>
<add> }
<add>
<add> public function content($content = null) {
<add> if ($content === null) {
<add> return $this->_content;
<add> }
<add> $this->_content = $content;
<add> return $this;
<add> }
<add>
<add>}
<ide><path>lib/Cake/Network/Http/Response.php
<add><?php
<add>/**
<add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
<add> * Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> *
<add> * Licensed under The MIT License
<add> * Redistributions of files must retain the above copyright notice.
<add> *
<add> * @copyright Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> * @link http://cakephp.org CakePHP(tm) Project
<add> * @since CakePHP(tm) v 3.0.0
<add> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<add> */
<add>namespace Cake\Network\Http;
<add>
<add>/**
<add> * Implements methods for HTTP responses
<add> */
<add>class Response {
<add>
<add> public function __construct() {
<add> }
<add>
<add>} | 2 |
Ruby | Ruby | make bottle path | 939a530ddc644c3b6a193c7b6af22f5c5f0fd22a | <ide><path>Library/Homebrew/download_strategy.rb
<ide> def _fetch
<ide> class CurlBottleDownloadStrategy <CurlDownloadStrategy
<ide> def initialize url, name, version, specs
<ide> super
<del> @tarball_path=HOMEBREW_CACHE+'Bottles'+("#{name}-#{version}"+ext)
<add> HOMEBREW_CACHE_BOTTLES.mkpath
<add> @tarball_path=HOMEBREW_CACHE_BOTTLES+("#{name}-#{version}"+ext)
<ide> end
<ide> def stage
<ide> ohai "Pouring #{File.basename(@tarball_path)}"
<ide><path>Library/Homebrew/global.rb
<ide> # Where brews installed via URL are cached
<ide> HOMEBREW_CACHE_FORMULA = HOMEBREW_CACHE+"Formula"
<ide>
<add># Where bottles are cached
<add>HOMEBREW_CACHE_BOTTLES = HOMEBREW_CACHE+"Bottles"
<add>
<ide> if not defined? HOMEBREW_BREW_FILE
<ide> HOMEBREW_BREW_FILE = ENV['HOMEBREW_BREW_FILE'] || `which brew`.chomp
<ide> end | 2 |
Python | Python | move benchmark_wrappers to benchmark folder | d70eca307e2f8a949dddad6404b5ea4ef383bf4d | <add><path>official/benchmark/benchmark_wrappers.py
<del><path>official/utils/testing/benchmark_wrappers.py
<ide> # Lint as: python3
<add># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add># ==============================================================================
<ide> """Utils to annotate and trace benchmarks."""
<ide>
<ide> from __future__ import absolute_import
<ide><path>official/benchmark/bert_benchmark.py
<ide> from official.nlp.bert import configs
<ide> from official.nlp.bert import run_classifier
<ide> from official.utils.misc import distribution_utils
<del>from official.utils.testing import benchmark_wrappers
<add>from official.benchmark import benchmark_wrappers
<ide>
<ide> # pylint: disable=line-too-long
<ide> PRETRAINED_CHECKPOINT_PATH = 'gs://cloud-tpu-checkpoints/bert/keras_bert/uncased_L-24_H-1024_A-16/bert_model.ckpt'
<ide><path>official/benchmark/bert_squad_benchmark.py
<ide> from official.nlp.bert import run_squad
<ide> from official.utils.misc import distribution_utils
<ide> from official.utils.misc import keras_utils
<del>from official.utils.testing import benchmark_wrappers
<add>from official.benchmark import benchmark_wrappers
<ide>
<ide>
<ide> # pylint: disable=line-too-long
<ide><path>official/benchmark/keras_cifar_benchmark.py
<ide> import tensorflow as tf # pylint: disable=g-bad-import-order
<ide>
<ide> from official.benchmark import keras_benchmark
<del>from official.utils.testing import benchmark_wrappers
<add>from official.benchmark import benchmark_wrappers
<ide> from official.benchmark.models import resnet_cifar_main
<ide>
<ide> MIN_TOP_1_ACCURACY = 0.929
<ide><path>official/benchmark/keras_imagenet_benchmark.py
<ide> from absl import flags
<ide> import tensorflow as tf # pylint: disable=g-bad-import-order
<ide>
<add>from official.benchmark import benchmark_wrappers
<ide> from official.benchmark import keras_benchmark
<del>from official.utils.testing import benchmark_wrappers
<ide> from official.vision.image_classification import classifier_trainer
<ide> from official.vision.image_classification.resnet import resnet_imagenet_main
<ide>
<ide><path>official/benchmark/ncf_keras_benchmark.py
<ide> from absl import logging
<ide> from absl.testing import flagsaver
<ide> import tensorflow as tf
<del>
<add>from official.benchmark import benchmark_wrappers
<ide> from official.recommendation import ncf_common
<ide> from official.recommendation import ncf_keras_main
<ide> from official.utils.flags import core
<del>from official.utils.testing import benchmark_wrappers
<ide>
<ide> FLAGS = flags.FLAGS
<ide> NCF_DATA_DIR_NAME = 'movielens_data'
<ide><path>official/benchmark/resnet_ctl_imagenet_benchmark.py
<ide> from official.vision.image_classification.resnet import common
<ide> from official.vision.image_classification.resnet import resnet_ctl_imagenet_main
<ide> from official.benchmark.perfzero_benchmark import PerfZeroBenchmark
<del>from official.utils.testing import benchmark_wrappers
<add>from official.benchmark import benchmark_wrappers
<ide> from official.utils.flags import core as flags_core
<ide>
<ide> MIN_TOP_1_ACCURACY = 0.76
<ide><path>official/benchmark/retinanet_benchmark.py
<ide>
<ide> from official.benchmark import bert_benchmark_utils as benchmark_utils
<ide> from official.utils.flags import core as flags_core
<del>from official.utils.testing import benchmark_wrappers
<add>from official.benchmark import benchmark_wrappers
<ide> from official.vision.detection import main as detection
<ide>
<ide> TMP_DIR = os.getenv('TMPDIR')
<ide><path>official/benchmark/shakespeare_benchmark.py
<ide> from official.benchmark.models.shakespeare import shakespeare_main
<ide> from official.utils.flags import core as flags_core
<ide> from official.utils.misc import keras_utils
<del>from official.utils.testing import benchmark_wrappers
<add>from official.benchmark import benchmark_wrappers
<ide> from official.benchmark.perfzero_benchmark import PerfZeroBenchmark
<ide>
<ide> SHAKESPEARE_TRAIN_DATA = 'shakespeare/shakespeare.txt'
<ide><path>official/benchmark/transformer_benchmark.py
<ide>
<ide> from absl import flags
<ide> import tensorflow as tf
<add>from official.benchmark import benchmark_wrappers
<ide> from official.benchmark.perfzero_benchmark import PerfZeroBenchmark
<ide> from official.nlp.transformer import misc
<ide> from official.nlp.transformer import transformer_main as transformer_main
<ide> from official.utils.flags import core as flags_core
<del>from official.utils.testing import benchmark_wrappers
<ide>
<ide> TRANSFORMER_EN2DE_DATA_DIR_NAME = 'wmt32k-en2de-official'
<ide> EN2DE_2014_BLEU_DATA_DIR_NAME = 'newstest2014'
<ide><path>official/benchmark/xlnet_benchmark.py
<ide> from official.benchmark import bert_benchmark_utils as benchmark_utils
<ide> from official.nlp.xlnet import run_classifier
<ide> from official.nlp.xlnet import run_squad
<del>from official.utils.testing import benchmark_wrappers
<add>from official.benchmark import benchmark_wrappers
<ide>
<ide>
<ide> # pylint: disable=line-too-long | 11 |
Javascript | Javascript | fix more suspense traversal bugs | 8830ba890cdfc27970f1183fec41f8c2025d5977 | <ide><path>src/backend/renderer.js
<ide> export function attach(
<ide> }
<ide>
<ide> let pendingOperations: Array<number> = [];
<add> let pendingRealUnmountedIDs: Array<number> = [];
<add> let pendingSimulatedUnmountedIDs: Array<number> = [];
<ide> let pendingOperationsQueue: Array<Uint32Array> | null = [];
<ide>
<add> // We keep track of which Fibers have been reported as unmounted by React
<add> // during this commit phase so that we don't try to "hide" them or their
<add> // children when Suspense flips to fallback. These Fibers won't have IDs.
<add> let fibersUnmountedInThisCommitPhase: WeakSet<Fiber> = new WeakSet();
<add>
<add> // TODO: we could make this layer DEV-only and write directly to pendingOperations.
<ide> let nextOperation: Array<number> = [];
<ide> function beginNextOperation(size: number): void {
<ide> nextOperation.length = size;
<ide> }
<del> function endNextOperation(addToStartOfQueue: boolean): void {
<add> function endNextOperation(): void {
<ide> if (__DEV__) {
<ide> for (let i = 0; i < nextOperation.length; i++) {
<ide> if (!Number.isInteger(nextOperation[i])) {
<ide> export function attach(
<ide> }
<ide> }
<ide> }
<del>
<del> if (addToStartOfQueue) {
<del> pendingOperations.splice.apply(
<del> pendingOperations,
<del> [0, 0].concat(nextOperation)
<del> );
<del> } else {
<del> pendingOperations.push.apply(pendingOperations, nextOperation);
<del> }
<add> pendingOperations.push.apply(pendingOperations, nextOperation);
<ide> nextOperation.length = 0;
<ide> }
<ide>
<ide> function flushPendingEvents(root: Object): void {
<del> if (pendingOperations.length === 0) {
<add> if (
<add> pendingOperations.length === 0 &&
<add> pendingRealUnmountedIDs.length === 0 &&
<add> pendingSimulatedUnmountedIDs.length === 0
<add> ) {
<ide> // If we're currently profiling, send an "operations" method even if there are no mutations to the tree.
<ide> // The frontend needs this no-op info to know how to reconstruct the tree for each commit,
<ide> // even if a particular commit didn't change the shape of the tree.
<ide> export function attach(
<ide> }
<ide> }
<ide>
<add> const ops = new Uint32Array(
<add> // Identify which renderer this update is coming from.
<add> 2 + // [rendererID, rootFiberID]
<add> // All unmounts are batched in a single message.
<add> 2 + // [TREE_OPERATION_REMOVE, removedIDLength]
<add> pendingRealUnmountedIDs.length +
<add> pendingSimulatedUnmountedIDs.length +
<add> // Regular operations
<add> pendingOperations.length
<add> );
<add>
<ide> // Identify which renderer this update is coming from.
<ide> // This enables roots to be mapped to renderers,
<ide> // Which in turn enables fiber props, states, and hooks to be inspected.
<del> beginNextOperation(2);
<del> nextOperation[0] = rendererID;
<del> nextOperation[1] = getFiberID(getPrimaryFiber(root.current));
<del> endNextOperation(true);
<add> let i = 0;
<add> ops[i++] = rendererID;
<add> ops[i++] = getFiberID(getPrimaryFiber(root.current));
<add>
<add> // All unmounts except roots are batched in a single message.
<add> ops[i++] = TREE_OPERATION_REMOVE;
<add> // The first number is how many unmounted IDs we're gonna send.
<add> ops[i++] =
<add> pendingRealUnmountedIDs.length + pendingSimulatedUnmountedIDs.length;
<add> // Fill in the real unmounts in the reverse order.
<add> // They were inserted parents-first by React, but we want children-first.
<add> // So we traverse our array backwards.
<add> for (let j = pendingRealUnmountedIDs.length - 1; j >= 0; j--) {
<add> ops[i++] = pendingRealUnmountedIDs[j];
<add> }
<add> // Fill in the simulated unmounts (hidden Suspense subtrees) in their order.
<add> // (We want children to go before parents.)
<add> // They go *after* the real unmounts because we know for sure they won't be
<add> // children of already pushed "real" IDs. If they were, we wouldn't be able
<add> // to discover them during the traversal, as they would have been deleted.
<add> ops.set(pendingSimulatedUnmountedIDs, i);
<add> i += pendingSimulatedUnmountedIDs.length;
<add> // Fill in the rest of the operations.
<add> ops.set(pendingOperations, i);
<ide>
<ide> // Let the frontend know about tree operations.
<ide> // The first value in this array will identify which root it corresponds to,
<ide> // so we do no longer need to dispatch a separate root-committed event.
<del> const ops = Uint32Array.from(pendingOperations);
<ide> if (pendingOperationsQueue !== null) {
<ide> // Until the frontend has been connected, store the tree operations.
<ide> // This will let us avoid walking the tree later when the frontend connects,
<ide> export function attach(
<ide> hook.emit('operations', ops);
<ide> }
<ide>
<del> pendingOperations = [];
<add> pendingOperations.length = 0;
<add> pendingRealUnmountedIDs.length = 0;
<add> pendingSimulatedUnmountedIDs.length = 0;
<add> fibersUnmountedInThisCommitPhase = new WeakSet();
<ide> }
<ide>
<ide> function recordMount(fiber: Fiber, parentFiber: Fiber | null) {
<ide> export function attach(
<ide> nextOperation[2] = ElementTypeRoot;
<ide> nextOperation[3] = isProfilingSupported ? 1 : 0;
<ide> nextOperation[4] = hasOwnerMetadata ? 1 : 0;
<del> endNextOperation(false);
<add> endNextOperation();
<ide> } else {
<ide> const { displayName, key, type } = getDataForFiber(fiber);
<ide> const { _debugOwner } = fiber;
<ide> export function attach(
<ide> nextOperation[6 + encodedDisplayNameSize + 1 + i] = encodedKey[i];
<ide> }
<ide> }
<del> endNextOperation(false);
<add> endNextOperation();
<ide> }
<ide>
<ide> if (isProfiling) {
<ide> export function attach(
<ide> nextOperation[0] = TREE_OPERATION_UPDATE_TREE_BASE_DURATION;
<ide> nextOperation[1] = id;
<ide> nextOperation[2] = treeBaseDuration;
<del> endNextOperation(false);
<add> endNextOperation();
<ide>
<ide> const { actualDuration } = fiber;
<ide> if (actualDuration > 0) {
<ide> export function attach(
<ide> }
<ide> }
<ide>
<del> function recordUnmount(fiber: Fiber) {
<add> function recordUnmount(fiber: Fiber, isSimulated: boolean) {
<ide> const isRoot = fiber.tag === HostRoot;
<ide> const primaryFiber = getPrimaryFiber(fiber);
<ide> if (!fiberToIDMap.has(primaryFiber)) {
<ide> export function attach(
<ide> }
<ide> const id = getFiberID(primaryFiber);
<ide> if (isRoot) {
<del> beginNextOperation(2);
<add> // Removing a root needs to happen at the end
<add> // so we don't batch it with other unmounts.
<add> beginNextOperation(3);
<ide> nextOperation[0] = TREE_OPERATION_REMOVE;
<del> nextOperation[1] = id;
<del> endNextOperation(false);
<add> nextOperation[1] = 1; // Remove one item
<add> nextOperation[2] = id;
<add> endNextOperation();
<ide> } else if (!shouldFilterFiber(fiber)) {
<del> beginNextOperation(2);
<del> nextOperation[0] = TREE_OPERATION_REMOVE;
<del> nextOperation[1] = id;
<del> // Non-root fibers are deleted during the commit phase.
<del> // They are deleted in the parent-first order. However
<del> // DevTools currently expects deletions to be child-first.
<del> // This is why we prepend the delete operation to the queue.
<del> endNextOperation(true);
<add> // To maintain child-first ordering,
<add> // we'll push it into one of these queues,
<add> // and later arrange them in the correct order.
<add> if (isSimulated) {
<add> pendingSimulatedUnmountedIDs.push(id);
<add> } else {
<add> pendingRealUnmountedIDs.push(id);
<add> }
<ide> }
<ide> fiberToIDMap.delete(primaryFiber);
<ide> idToFiberMap.delete(id);
<ide> export function attach(
<ide> }
<ide> }
<ide>
<add> // We use this to simulate unmounting for Suspense trees
<add> // when we switch from primary to fallback.
<ide> function unmountFiberChildrenRecursively(fiber: Fiber) {
<ide> if (__DEBUG__) {
<ide> debug('unmountFiberChildrenRecursively()', fiber);
<ide> }
<add>
<add> // We might meet a nested Suspense on our way.
<add> const isTimedOutSuspense =
<add> fiber.tag === ReactTypeOfWork.SuspenseComponent &&
<add> fiber.memoizedState !== null;
<add>
<ide> let child = fiber.child;
<add> if (isTimedOutSuspense) {
<add> // If it's showing fallback tree, let's traverse it instead.
<add> const primaryChildFragment = fiber.child;
<add> const fallbackChildFragment = primaryChildFragment.sibling;
<add> // Skip over to the real Fiber child.
<add> child = fallbackChildFragment.child;
<add> }
<add>
<ide> while (child !== null) {
<del> recordUnmount(child);
<del> unmountFiberChildrenRecursively(child);
<add> // Record simulated unmounts children-first.
<add> // We might find real committed unmounts along the way--skip them.
<add> // Otherwise we would send duplicated messages for the same IDs.
<add> if (!fibersUnmountedInThisCommitPhase.has(child)) {
<add> unmountFiberChildrenRecursively(child);
<add> recordUnmount(child, true);
<add> }
<ide> child = child.sibling;
<ide> }
<ide> }
<ide> export function attach(
<ide> nextOperation[0] = TREE_OPERATION_UPDATE_TREE_BASE_DURATION;
<ide> nextOperation[1] = getFiberID(getPrimaryFiber(fiber));
<ide> nextOperation[2] = treeBaseDuration;
<del> endNextOperation(false);
<add> endNextOperation();
<ide> }
<ide>
<ide> if (haveProfilerTimesChanged(fiber.alternate, fiber)) {
<ide> export function attach(
<ide> for (let i = 0; i < nextChildren.length; i++) {
<ide> nextOperation[3 + i] = nextChildren[i];
<ide> }
<del> endNextOperation(false);
<add> endNextOperation();
<ide> }
<ide>
<ide> function findReorderedChildrenRecursively(
<ide> export function attach(
<ide> }
<ide>
<ide> function handleCommitFiberUnmount(fiber) {
<add> // Remeber this is a real deletion so we don't
<add> // go down this tree when hiding Suspense nodes.
<add> fibersUnmountedInThisCommitPhase.add(fiber);
<ide> // This is not recursive.
<ide> // We can't traverse fibers after unmounting so instead
<ide> // we rely on React telling us about each unmount.
<del> recordUnmount(fiber);
<add> recordUnmount(fiber, false);
<ide> }
<ide>
<ide> function handleCommitFiberRoot(root) {
<ide> export function attach(
<ide> updateFiberRecursively(current, alternate, null);
<ide> } else if (wasMounted && !isMounted) {
<ide> // Unmount an existing root.
<del> recordUnmount(current);
<add> recordUnmount(current, false);
<ide> }
<ide> } else {
<ide> // Mount a new root.
<ide><path>src/devtools/store.js
<ide> export default class Store extends EventEmitter {
<ide> }
<ide> };
<ide>
<add> _adjustParentTreeWeight = (
<add> parentElement: Element | null,
<add> weightDelta: number
<add> ) => {
<add> let isInsideCollapsedSubTree = false;
<add>
<add> while (parentElement != null) {
<add> parentElement.weight += weightDelta;
<add>
<add> // Additions and deletions within a collapsed subtree should not bubble beyond the collapsed parent.
<add> // Their weight will bubble up when the parent is expanded.
<add> if (parentElement.isCollapsed) {
<add> isInsideCollapsedSubTree = true;
<add> break;
<add> }
<add>
<add> parentElement = ((this._idToElement.get(
<add> parentElement.parentID
<add> ): any): Element);
<add> }
<add>
<add> // Additions and deletions within a collapsed subtree should not affect the overall number of elements.
<add> if (!isInsideCollapsedSubTree) {
<add> this._weightAcrossRoots += weightDelta;
<add> }
<add> };
<add>
<ide> onBridgeOperations = (operations: Uint32Array) => {
<ide> if (!(operations instanceof Uint32Array)) {
<ide> // $FlowFixMe TODO HACK Temporary workaround for the fact that Chrome is not transferring the typed array.
<ide> export default class Store extends EventEmitter {
<ide>
<ide> let i = 2;
<ide> while (i < operations.length) {
<del> let id: number = ((null: any): number);
<del> let element: Element = ((null: any): Element);
<del> let ownerID: number = 0;
<del> let parentID: number = ((null: any): number);
<del> let parentElement: Element = ((null: any): Element);
<del> let type: ElementType = ((null: any): ElementType);
<del> let weightDelta: number = 0;
<del>
<ide> const operation = operations[i];
<del>
<ide> switch (operation) {
<del> case TREE_OPERATION_ADD:
<del> id = ((operations[i + 1]: any): number);
<del> type = ((operations[i + 2]: any): ElementType);
<add> case TREE_OPERATION_ADD: {
<add> const id = ((operations[i + 1]: any): number);
<add> const type = ((operations[i + 2]: any): ElementType);
<ide>
<ide> i = i + 3;
<ide>
<ide> export default class Store extends EventEmitter {
<ide> );
<ide> }
<ide>
<add> let ownerID: number = 0;
<add> let parentID: number = ((null: any): number);
<ide> if (type === ElementTypeRoot) {
<ide> if (__DEBUG__) {
<ide> debug('Add', `new root node ${id}`);
<ide> export default class Store extends EventEmitter {
<ide> );
<ide> }
<ide>
<del> parentElement = ((this._idToElement.get(parentID): any): Element);
<add> const parentElement = ((this._idToElement.get(
<add> parentID
<add> ): any): Element);
<ide> parentElement.children = parentElement.children.concat(id);
<ide>
<ide> const element: Element = {
<ide> export default class Store extends EventEmitter {
<ide>
<ide> this._idToElement.set(id, element);
<ide>
<add> // TODO: don't recreate this on every iteration.
<ide> const oldAddedElementIDs = addedElementIDs;
<ide> addedElementIDs = new Uint32Array(addedElementIDs.length + 1);
<ide> addedElementIDs.set(oldAddedElementIDs);
<ide> addedElementIDs[oldAddedElementIDs.length] = id;
<ide>
<del> weightDelta = 1;
<add> this._adjustParentTreeWeight(parentElement, 1);
<ide> }
<ide> break;
<add> }
<ide> case TREE_OPERATION_REMOVE: {
<del> id = ((operations[i + 1]: any): number);
<del>
<del> if (!this._idToElement.has(id)) {
<del> throw Error(
<del> `Cannot remove node ${id} because no matching node was found in the Store.`
<del> );
<del> }
<del>
<add> const removeLength = ((operations[i + 1]: any): number);
<ide> i = i + 2;
<ide>
<del> element = ((this._idToElement.get(id): any): Element);
<del> parentID = element.parentID;
<del> weightDelta = -element.weight;
<del>
<del> if (element.children.length > 0) {
<del> throw new Error(
<del> 'Fiber ' +
<del> id +
<del> ' was removed before its children. ' +
<del> 'This is a bug in React DevTools.'
<del> );
<del> }
<del>
<del> this._idToElement.delete(id);
<add> for (let removeIndex = 0; removeIndex < removeLength; removeIndex++) {
<add> const id = ((operations[i]: any): number);
<ide>
<del> if (parentID === 0) {
<del> if (__DEBUG__) {
<del> debug('Remove', `node ${id} root`);
<add> if (!this._idToElement.has(id)) {
<add> throw Error(
<add> `Cannot remove node ${id} because no matching node was found in the Store.`
<add> );
<ide> }
<ide>
<del> this._roots = this._roots.filter(rootID => rootID !== id);
<del> this._rootIDToRendererID.delete(id);
<del> this._rootIDToCapabilities.delete(id);
<add> i = i + 1;
<ide>
<del> haveRootsChanged = true;
<del> } else {
<del> if (__DEBUG__) {
<del> debug('Remove', `node ${id} from parent ${parentID}`);
<add> const element = ((this._idToElement.get(id): any): Element);
<add> if (element.children.length > 0) {
<add> throw new Error(
<add> 'Fiber ' +
<add> id +
<add> ' was removed before its children. ' +
<add> 'This is a bug in React DevTools.'
<add> );
<ide> }
<del> parentElement = ((this._idToElement.get(parentID): any): Element);
<del> if (parentElement === undefined) {
<del> throw Error(
<del> `Cannot remove node ${id} from parent ${parentID} because no matching node was found in the Store.`
<add>
<add> this._idToElement.delete(id);
<add>
<add> const parentID = element.parentID;
<add> let parentElement = null;
<add> if (parentID === 0) {
<add> if (__DEBUG__) {
<add> debug('Remove', `fiber ${id} root`);
<add> }
<add>
<add> this._roots = this._roots.filter(rootID => rootID !== id);
<add> this._rootIDToRendererID.delete(id);
<add> this._rootIDToCapabilities.delete(id);
<add>
<add> haveRootsChanged = true;
<add> } else {
<add> if (__DEBUG__) {
<add> debug('Remove', `fiber ${id} from parent ${parentID}`);
<add> }
<add> parentElement = ((this._idToElement.get(parentID): any): Element);
<add> if (parentElement === undefined) {
<add> throw Error(
<add> `Cannot remove node ${id} from parent ${parentID} because no matching node was found in the Store.`
<add> );
<add> }
<add> parentElement.children = parentElement.children.filter(
<add> childID => childID !== id
<ide> );
<ide> }
<del> parentElement.children = parentElement.children.filter(
<del> childID => childID !== id
<del> );
<del> }
<ide>
<del> // Track removed items so search results can be updated
<del> const oldRemovedElementIDs = removedElementIDs;
<del> removedElementIDs = new Uint32Array(removedElementIDs.length + 1);
<del> removedElementIDs.set(oldRemovedElementIDs);
<del> removedElementIDs[oldRemovedElementIDs.length] = id;
<add> this._adjustParentTreeWeight(parentElement, -element.weight);
<add>
<add> // Track removed items so search results can be updated
<add> // TODO: no need to recreate this in a loop.
<add> const oldRemovedElementIDs = removedElementIDs;
<add> removedElementIDs = new Uint32Array(removedElementIDs.length + 1);
<add> removedElementIDs.set(oldRemovedElementIDs);
<add> removedElementIDs[oldRemovedElementIDs.length] = id;
<add> }
<ide> break;
<ide> }
<del> case TREE_OPERATION_RESET_CHILDREN:
<del> id = ((operations[i + 1]: any): number);
<add> case TREE_OPERATION_RESET_CHILDREN: {
<add> const id = ((operations[i + 1]: any): number);
<ide> const numChildren = ((operations[i + 2]: any): number);
<ide> const children = ((operations.slice(
<ide> i + 3,
<ide> export default class Store extends EventEmitter {
<ide> );
<ide> }
<ide>
<del> element = ((this._idToElement.get(id): any): Element);
<add> const element = ((this._idToElement.get(id): any): Element);
<ide> const prevChildren = element.children;
<ide> element.children = Array.from(children);
<ide> if (element.children.length !== prevChildren.length) {
<ide> export default class Store extends EventEmitter {
<ide> });
<ide>
<ide> element.weight = nextWeight;
<del>
<del> weightDelta = nextWeight - prevWeight;
<add> // TODO: passing null here is suspicious, but it's existing behavior.
<add> // It is suspicious because either we shouldn't need a delta at all--
<add> // or we should apply it to all parents, and not just this item.
<add> this._adjustParentTreeWeight(null, nextWeight - prevWeight);
<ide> }
<ide> break;
<add> }
<ide> case TREE_OPERATION_UPDATE_TREE_BASE_DURATION:
<ide> // Base duration updates are only sent while profiling is in progress.
<ide> // We can ignore them at this point.
<ide> export default class Store extends EventEmitter {
<ide> default:
<ide> throw Error(`Unsupported Bridge operation ${operation}`);
<ide> }
<del>
<del> let isInsideCollapsedSubTree = false;
<del>
<del> while (parentElement != null) {
<del> parentElement.weight += weightDelta;
<del>
<del> // Additions and deletions within a collapsed subtree should not bubble beyond the collapsed parent.
<del> // Their weight will bubble up when the parent is expanded.
<del> if (parentElement.isCollapsed) {
<del> isInsideCollapsedSubTree = true;
<del> break;
<del> }
<del>
<del> parentElement = ((this._idToElement.get(
<del> parentElement.parentID
<del> ): any): Element);
<del> }
<del>
<del> // Additions and deletions within a collapsed subtree should not affect the overall number of elements.
<del> if (!isInsideCollapsedSubTree) {
<del> this._weightAcrossRoots += weightDelta;
<del> }
<ide> }
<ide>
<ide> this._revision++;
<ide><path>src/devtools/views/Profiler/CommitTreeBuilder.js
<ide> function updateTree(
<ide>
<ide> let i = 2;
<ide> while (i < operations.length) {
<del> let id: number = ((null: any): number);
<del> let node: Node = ((null: any): Node);
<del> let parentID: number = ((null: any): number);
<del> let parentNode: Node = ((null: any): Node);
<del> let type: ElementType = ((null: any): ElementType);
<del>
<ide> const operation = operations[i];
<ide>
<ide> switch (operation) {
<ide> case TREE_OPERATION_ADD:
<del> id = ((operations[i + 1]: any): number);
<del> type = ((operations[i + 2]: any): ElementType);
<add> const id = ((operations[i + 1]: any): number);
<add> const type = ((operations[i + 2]: any): ElementType);
<ide>
<ide> i = i + 3;
<ide>
<ide> function updateTree(
<ide>
<ide> nodes.set(id, node);
<ide> } else {
<del> parentID = ((operations[i]: any): number);
<add> const parentID = ((operations[i]: any): number);
<ide> i++;
<ide>
<ide> i++; // ownerID
<ide> function updateTree(
<ide> );
<ide> }
<ide>
<del> parentNode = getClonedNode(parentID);
<add> const parentNode = getClonedNode(parentID);
<ide> parentNode.children = parentNode.children.concat(id);
<ide>
<ide> const node: Node = {
<ide> function updateTree(
<ide>
<ide> nodes.set(id, node);
<ide> }
<del> break;
<del> case TREE_OPERATION_REMOVE:
<del> id = ((operations[i + 1]: any): number);
<ide>
<add> break;
<add> case TREE_OPERATION_REMOVE: {
<add> const removeLength = ((operations[i + 1]: any): number);
<ide> i = i + 2;
<ide>
<del> if (!nodes.has(id)) {
<del> throw new Error(
<del> 'Commit tree does not contain fiber ' +
<del> id +
<del> '. This is a bug in React DevTools.'
<del> );
<del> }
<add> for (let removeIndex = 0; removeIndex < removeLength; removeIndex++) {
<add> const id = ((operations[i]: any): number);
<add> i = i + 1;
<add>
<add> if (!nodes.has(id)) {
<add> throw new Error(
<add> 'Commit tree does not contain fiber ' +
<add> id +
<add> '. This is a bug in React DevTools.'
<add> );
<add> }
<ide>
<del> node = getClonedNode(id);
<del> parentID = node.parentID;
<add> const node = getClonedNode(id);
<add> const parentID = node.parentID;
<ide>
<del> nodes.delete(id);
<add> nodes.delete(id);
<ide>
<del> parentNode = getClonedNode(parentID);
<del> if (parentNode == null) {
<del> // No-op
<del> } else {
<del> if (__DEBUG__) {
<del> debug('Remove', `fiber ${id} from parent ${parentID}`);
<del> }
<add> const parentNode = getClonedNode(parentID);
<add> if (parentNode == null) {
<add> // No-op
<add> } else {
<add> if (__DEBUG__) {
<add> debug('Remove', `fiber ${id} from parent ${parentID}`);
<add> }
<ide>
<del> parentNode.children = parentNode.children.filter(
<del> childID => childID !== id
<del> );
<add> parentNode.children = parentNode.children.filter(
<add> childID => childID !== id
<add> );
<add> }
<ide> }
<ide> break;
<del> case TREE_OPERATION_RESET_CHILDREN:
<del> id = ((operations[i + 1]: any): number);
<add> }
<add> case TREE_OPERATION_RESET_CHILDREN: {
<add> const id = ((operations[i + 1]: any): number);
<ide> const numChildren = ((operations[i + 2]: any): number);
<ide> const children = ((operations.slice(
<ide> i + 3,
<ide> function updateTree(
<ide> debug('Re-order', `fiber ${id} children ${children.join(',')}`);
<ide> }
<ide>
<del> node = getClonedNode(id);
<add> const node = getClonedNode(id);
<ide> node.children = Array.from(children);
<ide>
<ide> break;
<del> case TREE_OPERATION_UPDATE_TREE_BASE_DURATION:
<del> id = operations[i + 1];
<add> }
<add> case TREE_OPERATION_UPDATE_TREE_BASE_DURATION: {
<add> const id = operations[i + 1];
<ide>
<del> node = getClonedNode(id);
<add> const node = getClonedNode(id);
<ide> node.treeBaseDuration = operations[i + 2] / 1000; // Convert microseconds back to milliseconds;
<ide>
<ide> if (__DEBUG__) {
<ide> function updateTree(
<ide>
<ide> i = i + 3;
<ide> break;
<add> }
<ide> default:
<ide> throw Error(`Unsupported Bridge operation ${operation}`);
<ide> } | 3 |
Python | Python | update base punctuation | e85e1d571b834d35922a816e1886cfc74cdf50d8 | <ide><path>spacy/lang/char_classes.py
<ide> # These expressions contain various unicode variations, including characters
<ide> # used in Chinese (see #1333, #1340, #1351) – unless there are cross-language
<ide> # conflicts, spaCy's base tokenizer should handle all of those by default
<del>_punct = r'… …… , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* & 。 ? ! , 、 ; : ~ ·'
<add>_punct = r'… …… , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* & 。 ? ! , 、 ; : ~ · ।'
<ide> _quotes = r'\' \'\' " ” “ `` ` ‘ ´ ‘‘ ’’ ‚ , „ » « 「 」 『 』 ( ) 〔 〕 【 】 《 》 〈 〉'
<ide> _hyphens = '- – — -- --- —— ~'
<ide> | 1 |
Java | Java | remove unnecessary code | af4fc88b3321247c6e6e2778ae6f3226c742dac0 | <ide><path>rxjava-core/src/main/java/rx/internal/operators/BufferUntilSubscriber.java
<ide> boolean casFirst(int expected, int next) {
<ide> void setObserverRef(Observer<? super T> o) {
<ide> observerRef = o;
<ide> }
<del> boolean casObserverRef(Observer<? super T> expected, Observer<? super T> next) {
<del> return OBSERVER_UPDATER.compareAndSet(this, expected, next);
<del> }
<ide> }
<ide>
<ide> static final class OnSubscribeAction<T> implements OnSubscribe<T> {
<ide> private void drainIfNeededAndSwitchToActual() {
<ide> }
<ide> // now we can safely change over to the actual and get rid of the pass-thru
<ide> // but only if not unsubscribed
<del> state.casObserverRef(this, actual);
<add> state.setObserverRef(actual);
<ide> }
<ide>
<ide> } | 1 |
PHP | PHP | apply fixes from styleci | a4f7e9bfd2a7d9453ea2b142aa486b2ba29df7e1 | <ide><path>tests/Database/DatabaseMySqlSchemaGrammarTest.php
<ide> public function testAutoIncrementStartingValue()
<ide>
<ide> $this->assertCount(2, $statements);
<ide> $this->assertSame("create table `users` (`id` int unsigned not null auto_increment primary key, `email` varchar(255) not null) default character set utf8 collate 'utf8_unicode_ci'", $statements[0]);
<del> $this->assertSame("alter table `users` auto_increment = 1000", $statements[1]);
<add> $this->assertSame('alter table `users` auto_increment = 1000', $statements[1]);
<ide> }
<ide>
<ide> public function testEngineCreateTable() | 1 |
Javascript | Javascript | fix allocunsafe uninitialized buffer check | eb49d596b4fb910093ae59387b4a2dfc6d0bdd4a | <ide><path>test/parallel/test-buffer-bindingobj-no-zerofill.js
<ide> const monkeyPatchedBuffer = require('../../lib/buffer');
<ide> // possible that a segment of memory is already zeroed out, so try again and
<ide> // again until we succeed or we time out.
<ide> let uninitialized = buffer.Buffer.allocUnsafe(1024);
<del>while (uninitialized.some((val) => val !== 0))
<add>while (uninitialized.every((val) => val === 0))
<ide> uninitialized = buffer.Buffer.allocUnsafe(1024);
<ide>
<ide> // On monkeypatched buffer, zeroFill property is undefined. allocUnsafe() should | 1 |
Python | Python | fix deprecation messages for sftphook | 1c113d6b2fe2f24a71f0a2c3911e8141fa6e3ff9 | <ide><path>airflow/providers/sftp/hooks/sftp.py
<ide> def __init__(
<ide> extra_options = conn.extra_dejson
<ide>
<ide> # For backward compatibility
<del> # TODO: remove in Airflow 2.1
<add> # TODO: remove in the next major provider release.
<ide>
<ide> if 'private_key_pass' in extra_options:
<ide> warnings.warn(
<ide> 'Extra option `private_key_pass` is deprecated.'
<ide> 'Please use `private_key_passphrase` instead.'
<ide> '`private_key_passphrase` will precede if both options are specified.'
<del> 'The old option `private_key_pass` will be removed in Airflow 2.1',
<add> 'The old option `private_key_pass` will be removed in a future release.',
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide> def __init__(
<ide> warnings.warn(
<ide> 'Extra option `ignore_hostkey_verification` is deprecated.'
<ide> 'Please use `no_host_key_check` instead.'
<del> 'This option will be removed in Airflow 2.1',
<add> 'This option will be removed in a future release.',
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> ) | 1 |
PHP | PHP | apply fixes from styleci | 91543c185366dbef4749aa3552e186ce9190c78c | <ide><path>src/Illuminate/Foundation/Providers/ArtisanServiceProvider.php
<ide> use Illuminate\Queue\Console\FailedTableCommand;
<ide> use Illuminate\Foundation\Console\AppNameCommand;
<ide> use Illuminate\Foundation\Console\JobMakeCommand;
<del>use Illuminate\Foundation\Console\RuleMakeCommand;
<ide> use Illuminate\Database\Console\Seeds\SeedCommand;
<ide> use Illuminate\Foundation\Console\MailMakeCommand;
<ide> use Illuminate\Foundation\Console\OptimizeCommand;
<add>use Illuminate\Foundation\Console\RuleMakeCommand;
<ide> use Illuminate\Foundation\Console\TestMakeCommand;
<ide> use Illuminate\Foundation\Console\EventMakeCommand;
<ide> use Illuminate\Foundation\Console\ModelMakeCommand; | 1 |
Javascript | Javascript | remove array creation from `getbreaks` | 3e937e9dc08dc48026056e6c689025aa954a5edb | <ide><path>examples/jsm/csm/CSM.js
<ide> const _lightSpaceFrustum = new Frustum();
<ide> const _frustum = new Frustum();
<ide> const _center = new Vector3();
<ide> const _bbox = new FrustumBoundingBox();
<add>const _uniformArray = [];
<add>const _logArray = [];
<ide>
<ide> export default class CSM {
<ide>
<ide> export default class CSM {
<ide> this.customSplitsCallback = data.customSplitsCallback;
<ide> this.mainFrustum = new Frustum();
<ide> this.frustums = [];
<add> this.breaks = [];
<ide>
<ide> this.lights = [];
<ide> this.materials = [];
<ide> export default class CSM {
<ide>
<ide> const camera = this.camera;
<ide> const far = Math.min(camera.far, this.maxFar);
<del> this.breaks = [];
<add> this.breaks.length = 0;
<ide>
<ide> switch ( this.mode ) {
<ide>
<ide> case 'uniform':
<del> this.breaks = uniformSplit( this.cascades, camera.near, far );
<add> uniformSplit( this.cascades, camera.near, far, this.breaks );
<ide> break;
<ide> case 'logarithmic':
<del> this.breaks = logarithmicSplit( this.cascades, camera.near, far );
<add> logarithmicSplit( this.cascades, camera.near, far, this.breaks );
<ide> break;
<ide> case 'practical':
<del> this.breaks = practicalSplit( this.cascades, camera.near, far, 0.5 );
<add> practicalSplit( this.cascades, camera.near, far, 0.5, this.breaks );
<ide> break;
<ide> case 'custom':
<ide> if ( this.customSplitsCallback === undefined ) console.error( 'CSM: Custom split scheme callback not defined.' );
<del> this.breaks = this.customSplitsCallback( this.cascades, camera.near, far );
<add> this.customSplitsCallback( this.cascades, camera.near, far, this.breaks );
<ide> break;
<ide>
<ide> }
<ide>
<del> function uniformSplit( amount, near, far ) {
<del>
<del> const r = [];
<add> function uniformSplit( amount, near, far, target ) {
<ide>
<ide> for ( let i = 1; i < amount; i ++ ) {
<ide>
<del> r.push( ( near + ( far - near ) * i / amount ) / far );
<add> target.push( ( near + ( far - near ) * i / amount ) / far );
<ide>
<ide> }
<ide>
<del> r.push( 1 );
<del> return r;
<add> target.push( 1 );
<ide>
<ide> }
<ide>
<del> function logarithmicSplit( amount, near, far ) {
<del>
<del> const r = [];
<add> function logarithmicSplit( amount, near, far, target ) {
<ide>
<ide> for ( let i = 1; i < amount; i ++ ) {
<ide>
<del> r.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
<add> target.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
<ide>
<ide> }
<ide>
<del> r.push( 1 );
<del> return r;
<add> target.push( 1 );
<ide>
<ide> }
<ide>
<del> function practicalSplit( amount, near, far, lambda ) {
<add> function practicalSplit( amount, near, far, lambda, target ) {
<ide>
<del> const log = logarithmicSplit( amount, near, far );
<del> const uni = uniformSplit( amount, near, far );
<del> const r = [];
<add> _uniformArray.length = 0;
<add> _logArray.length = 0;
<add> const log = logarithmicSplit( amount, near, far, _logArray );
<add> const uni = uniformSplit( amount, near, far, _uniformArray );
<ide>
<ide> for ( let i = 1; i < amount; i ++ ) {
<ide>
<del> r.push( MathUtils.lerp( uni[ i - 1 ], log[ i - 1 ], lambda ) );
<add> target.push( MathUtils.lerp( _uniformArray[ i - 1 ], _logArray[ i - 1 ], lambda ) );
<ide>
<ide> }
<ide>
<del> r.push( 1 );
<del> return r;
<add> target.push( 1 );
<ide>
<ide> }
<ide> | 1 |
Mixed | Go | support node label update | e1165cdfd1d666b1d4e041ef6a0d5fd049e041c1 | <ide><path>api/client/node/accept.go
<ide> func newAcceptCommand(dockerCli *client.DockerCli) *cobra.Command {
<ide> }
<ide>
<ide> func runAccept(dockerCli *client.DockerCli, nodes []string) error {
<del> accept := func(node *swarm.Node) {
<add> accept := func(node *swarm.Node) error {
<ide> node.Spec.Membership = swarm.NodeMembershipAccepted
<add> return nil
<ide> }
<ide> success := func(nodeID string) {
<ide> fmt.Fprintf(dockerCli.Out(), "Node %s accepted in the swarm.\n", nodeID)
<ide><path>api/client/node/demote.go
<ide> func newDemoteCommand(dockerCli *client.DockerCli) *cobra.Command {
<ide> }
<ide>
<ide> func runDemote(dockerCli *client.DockerCli, nodes []string) error {
<del> demote := func(node *swarm.Node) {
<add> demote := func(node *swarm.Node) error {
<ide> node.Spec.Role = swarm.NodeRoleWorker
<add> return nil
<ide> }
<ide> success := func(nodeID string) {
<ide> fmt.Fprintf(dockerCli.Out(), "Manager %s demoted in the swarm.\n", nodeID)
<ide><path>api/client/node/opts.go
<ide> import (
<ide> "fmt"
<ide> "strings"
<ide>
<add> "github.com/docker/docker/opts"
<add> runconfigopts "github.com/docker/docker/runconfig/opts"
<ide> "github.com/docker/engine-api/types/swarm"
<ide> )
<ide>
<ide> type nodeOptions struct {
<add> annotations
<ide> role string
<ide> membership string
<ide> availability string
<ide> }
<ide>
<add>type annotations struct {
<add> name string
<add> labels opts.ListOpts
<add>}
<add>
<add>func newNodeOptions() *nodeOptions {
<add> return &nodeOptions{
<add> annotations: annotations{
<add> labels: opts.NewListOpts(nil),
<add> },
<add> }
<add>}
<add>
<ide> func (opts *nodeOptions) ToNodeSpec() (swarm.NodeSpec, error) {
<ide> var spec swarm.NodeSpec
<ide>
<add> spec.Annotations.Name = opts.annotations.name
<add> spec.Annotations.Labels = runconfigopts.ConvertKVStringsToMap(opts.annotations.labels.GetAll())
<add>
<ide> switch swarm.NodeRole(strings.ToLower(opts.role)) {
<ide> case swarm.NodeRoleWorker:
<ide> spec.Role = swarm.NodeRoleWorker
<ide><path>api/client/node/promote.go
<ide> func newPromoteCommand(dockerCli *client.DockerCli) *cobra.Command {
<ide> }
<ide>
<ide> func runPromote(dockerCli *client.DockerCli, nodes []string) error {
<del> promote := func(node *swarm.Node) {
<add> promote := func(node *swarm.Node) error {
<ide> node.Spec.Role = swarm.NodeRoleManager
<add> return nil
<ide> }
<ide> success := func(nodeID string) {
<ide> fmt.Fprintf(dockerCli.Out(), "Node %s promoted to a manager in the swarm.\n", nodeID)
<ide><path>api/client/node/update.go
<ide> import (
<ide>
<ide> "github.com/docker/docker/api/client"
<ide> "github.com/docker/docker/cli"
<add> "github.com/docker/docker/opts"
<add> runconfigopts "github.com/docker/docker/runconfig/opts"
<ide> "github.com/docker/engine-api/types/swarm"
<ide> "github.com/spf13/cobra"
<ide> "github.com/spf13/pflag"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide> func newUpdateCommand(dockerCli *client.DockerCli) *cobra.Command {
<del> var opts nodeOptions
<add> nodeOpts := newNodeOptions()
<ide>
<ide> cmd := &cobra.Command{
<ide> Use: "update [OPTIONS] NODE",
<ide> func newUpdateCommand(dockerCli *client.DockerCli) *cobra.Command {
<ide> }
<ide>
<ide> flags := cmd.Flags()
<del> flags.StringVar(&opts.role, flagRole, "", "Role of the node (worker/manager)")
<del> flags.StringVar(&opts.membership, flagMembership, "", "Membership of the node (accepted/rejected)")
<del> flags.StringVar(&opts.availability, flagAvailability, "", "Availability of the node (active/pause/drain)")
<add> flags.StringVar(&nodeOpts.role, flagRole, "", "Role of the node (worker/manager)")
<add> flags.StringVar(&nodeOpts.membership, flagMembership, "", "Membership of the node (accepted/rejected)")
<add> flags.StringVar(&nodeOpts.availability, flagAvailability, "", "Availability of the node (active/pause/drain)")
<add> flags.Var(&nodeOpts.annotations.labels, flagLabelAdd, "Add or update a node label (key=value)")
<add> labelKeys := opts.NewListOpts(nil)
<add> flags.Var(&labelKeys, flagLabelRemove, "Remove a node label if exists")
<ide> return cmd
<ide> }
<ide>
<ide> func runUpdate(dockerCli *client.DockerCli, flags *pflag.FlagSet, nodeID string)
<ide> return updateNodes(dockerCli, []string{nodeID}, mergeNodeUpdate(flags), success)
<ide> }
<ide>
<del>func updateNodes(dockerCli *client.DockerCli, nodes []string, mergeNode func(node *swarm.Node), success func(nodeID string)) error {
<add>func updateNodes(dockerCli *client.DockerCli, nodes []string, mergeNode func(node *swarm.Node) error, success func(nodeID string)) error {
<ide> client := dockerCli.Client()
<ide> ctx := context.Background()
<ide>
<ide> func updateNodes(dockerCli *client.DockerCli, nodes []string, mergeNode func(nod
<ide> return err
<ide> }
<ide>
<del> mergeNode(&node)
<add> err = mergeNode(&node)
<add> if err != nil {
<add> return err
<add> }
<ide> err = client.NodeUpdate(ctx, node.ID, node.Version, node.Spec)
<ide> if err != nil {
<ide> return err
<ide> func updateNodes(dockerCli *client.DockerCli, nodes []string, mergeNode func(nod
<ide> return nil
<ide> }
<ide>
<del>func mergeNodeUpdate(flags *pflag.FlagSet) func(*swarm.Node) {
<del> return func(node *swarm.Node) {
<add>func mergeNodeUpdate(flags *pflag.FlagSet) func(*swarm.Node) error {
<add> return func(node *swarm.Node) error {
<ide> spec := &node.Spec
<ide>
<ide> if flags.Changed(flagRole) {
<del> str, _ := flags.GetString(flagRole)
<add> str, err := flags.GetString(flagRole)
<add> if err != nil {
<add> return err
<add> }
<ide> spec.Role = swarm.NodeRole(str)
<ide> }
<ide> if flags.Changed(flagMembership) {
<del> str, _ := flags.GetString(flagMembership)
<add> str, err := flags.GetString(flagMembership)
<add> if err != nil {
<add> return err
<add> }
<ide> spec.Membership = swarm.NodeMembership(str)
<ide> }
<ide> if flags.Changed(flagAvailability) {
<del> str, _ := flags.GetString(flagAvailability)
<add> str, err := flags.GetString(flagAvailability)
<add> if err != nil {
<add> return err
<add> }
<ide> spec.Availability = swarm.NodeAvailability(str)
<ide> }
<add> if spec.Annotations.Labels == nil {
<add> spec.Annotations.Labels = make(map[string]string)
<add> }
<add> if flags.Changed(flagLabelAdd) {
<add> labels := flags.Lookup(flagLabelAdd).Value.(*opts.ListOpts).GetAll()
<add> for k, v := range runconfigopts.ConvertKVStringsToMap(labels) {
<add> spec.Annotations.Labels[k] = v
<add> }
<add> }
<add> if flags.Changed(flagLabelRemove) {
<add> keys := flags.Lookup(flagLabelRemove).Value.(*opts.ListOpts).GetAll()
<add> for _, k := range keys {
<add> // if a key doesn't exist, fail the command explicitly
<add> if _, exists := spec.Annotations.Labels[k]; !exists {
<add> return fmt.Errorf("key %s doesn't exist in node's labels", k)
<add> }
<add> delete(spec.Annotations.Labels, k)
<add> }
<add> }
<add> return nil
<ide> }
<ide> }
<ide>
<ide> const (
<ide> flagRole = "role"
<ide> flagMembership = "membership"
<ide> flagAvailability = "availability"
<add> flagLabelAdd = "label-add"
<add> flagLabelRemove = "label-rm"
<ide> )
<ide><path>docs/reference/commandline/node_update.md
<ide> Update a node
<ide> Options:
<ide> --availability string Availability of the node (active/pause/drain)
<ide> --help Print usage
<add> --label-add value Add or update a node label (key=value) (default [])
<add> --label-rm value Remove a node label if exists (default [])
<ide> --membership string Membership of the node (accepted/rejected)
<ide> --role string Role of the node (worker/manager)
<ide> ``` | 6 |
PHP | PHP | fix typo in urlgenerator | 66ff9dfa46af106139f1a1f727f9ed051f494833 | <ide><path>src/Illuminate/Routing/UrlGenerator.php
<ide> protected function replaceNamedParameters($path, &$parameters)
<ide> */
<ide> protected function addQueryString($uri, array $parameters)
<ide> {
<del> // If the URI has a fragmnet, we will move it to the end of the URI since it will
<add> // If the URI has a fragment, we will move it to the end of the URI since it will
<ide> // need to come after any query string that may be added to the URL else it is
<ide> // not going to be available. We will remove it then append it back on here.
<ide> if ( ! is_null($fragment = parse_url($uri, PHP_URL_FRAGMENT))) | 1 |
PHP | PHP | update email.blade.php | c9820088e9a785639299e8569cce33592b485b0b | <ide><path>src/Illuminate/Notifications/resources/views/email.blade.php
<ide> @component('mail::subcopy')
<ide> @lang(
<ide> "If you’re having trouble clicking the \":actionText\" button, copy and paste the URL below\n".
<del> 'into your web browser: ',
<add> 'into your web browser: [:actionURL](:actionURL)',
<ide> [
<del> 'actionText' => $actionText
<add> 'actionText' => $actionText,
<add> 'actionUrl' => $actionUrl
<ide> ]
<ide> )
<del>[{{ $actionUrl }}]({!! $actionUrl !!})
<ide> @endcomponent
<ide> @endisset
<ide> @endcomponent | 1 |
PHP | PHP | use the slice method | c72dc3aa2f5d072c5b6a769b4683730bf7da7179 | <ide><path>src/Illuminate/Support/Collection.php
<ide> public function merge($items)
<ide> */
<ide> public function forPage($page, $perPage)
<ide> {
<del> return new static(array_slice($this->items, ($page - 1) * $perPage, $perPage));
<add> return $this->slice(($page - 1) * $perPage, $perPage);
<ide> }
<ide>
<ide> /** | 1 |
Ruby | Ruby | fix syntax warning | db2552828b9008d848e55b5e9e1442521d7ce593 | <ide><path>Library/Homebrew/test/test_integration_cmds.rb
<ide> def test_cellar_formula
<ide> end
<ide>
<ide> def test_env
<del> assert_match /CMAKE_PREFIX_PATH="#{HOMEBREW_PREFIX}[:"]/,
<add> assert_match %r{CMAKE_PREFIX_PATH="#{HOMEBREW_PREFIX}[:"]},
<ide> cmd("--env")
<ide> end
<ide> | 1 |
Ruby | Ruby | fix preload with nested associations | 572dcdd7e858f126848bdf4f2be0f5cb0de7c026 | <ide><path>activerecord/lib/active_record/associations/preloader/through_association.rb
<ide> def run(preloader)
<ide>
<ide> owners.each do |owner|
<ide> through_records = Array(owner.association(through_reflection.name).target)
<add>
<ide> if already_loaded
<ide> if source_type = reflection.options[:source_type]
<ide> through_records = through_records.select do |record|
<ide> def run(preloader)
<ide> else
<ide> owner.association(through_reflection.name).reset if through_scope
<ide> end
<add>
<ide> result = through_records.flat_map do |record|
<ide> record.association(source_reflection.name).target
<ide> end
<add>
<ide> result.compact!
<ide> result.sort_by! { |rhs| preload_index[rhs] } if scope.order_values.any?
<ide> result.uniq! if scope.distinct_value
<ide> associate_records_to_owner(owner, result)
<ide> end
<add>
<ide> unless scope.empty_scope?
<ide> middle_records.each do |owner|
<del> owner.association(source_reflection.name).reset
<add> owner.association(source_reflection.name).reset if owner
<ide> end
<ide> end
<ide> end
<ide><path>activerecord/test/cases/associations/has_many_through_associations_test.rb
<ide> def test_marshal_dump
<ide> assert_equal preloaded, Marshal.load(Marshal.dump(preloaded))
<ide> end
<ide>
<add> def test_preload_with_nested_association
<add> posts = Post.preload(:author, :author_favorites).to_a
<add>
<add> assert_no_queries do
<add> posts.each(&:author)
<add> posts.each(&:author_favorites)
<add> end
<add> end
<add>
<ide> def test_preload_sti_rhs_class
<ide> developers = Developer.includes(:firms).all.to_a
<ide> assert_no_queries do
<ide><path>activerecord/test/models/author.rb
<ide> def self.destroyed_author_address_ids
<ide> end
<ide>
<ide> class AuthorFavorite < ActiveRecord::Base
<add> default_scope { order(id: :asc) }
<add>
<ide> belongs_to :author
<ide> belongs_to :favorite_author, class_name: "Author"
<ide> end | 3 |
Python | Python | set version to v2.3.5 | 7b277661f6cc91765dbe8891b230bb0dd8bdb53f | <ide><path>spacy/about.py
<ide> # fmt: off
<ide> __title__ = "spacy"
<del>__version__ = "2.3.4"
<add>__version__ = "2.3.5"
<ide> __release__ = True
<ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download"
<ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json" | 1 |
Text | Text | fix spelling of api name in 10.0.0 changelog | ad5307f1b4bc2d6824207717a2964c917aff1cb9 | <ide><path>doc/changelogs/CHANGELOG_V10.md
<ide> * EventEmitter
<ide> * The `EventEmitter.prototype.off()` method has been added as an alias for `EventEmitter.prototype.removeListener()`. [[`3bb6f07d52`](https://github.com/nodejs/node/commit/3bb6f07d52)]
<ide> * File System
<del> * The `fs.promises` API provides experimental promisified versions of the `fs` functions. [[`329fc78e49`](https://github.com/nodejs/node/commit/329fc78e49)]
<add> * The `fs/promises` API provides experimental promisified versions of the `fs` functions. [[`329fc78e49`](https://github.com/nodejs/node/commit/329fc78e49)]
<ide> * Invalid path errors are now thrown synchronously. [[`d8f73385e2`](https://github.com/nodejs/node/commit/d8f73385e2)]
<ide> * The `fs.readFile()` method now partitions reads to avoid thread pool exhaustion. [[`67a4ce1c6e`](https://github.com/nodejs/node/commit/67a4ce1c6e)]
<ide> * HTTP
<ide> * Timers
<ide> * The `enroll()` and `unenroll()` methods have been deprecated. [[`68783ae0b8`](https://github.com/nodejs/node/commit/68783ae0b8)]
<ide> * TLS
<del> * The `tls.convertNONProtocols()` method has been deprecated. [[`9204a0db6e`](https://github.com/nodejs/node/commit/9204a0db6e)]
<add> * The `tls.convertNPNProtocols()` method has been deprecated. [[`9204a0db6e`](https://github.com/nodejs/node/commit/9204a0db6e)]
<ide> * Support for NPN (next protocol negotiation) has been dropped. [[`5bfbe5ceae`](https://github.com/nodejs/node/commit/5bfbe5ceae)]
<ide> * The `ecdhCurve` default is now `'auto'`. [[`af78840b19`](https://github.com/nodejs/node/commit/af78840b19)]
<ide> * Trace Events | 1 |
PHP | PHP | fix getoriginal() not preserving nulls | 9515d83d1e3023bdee87ecc93ea6231b5af8c577 | <ide><path>src/Datasource/EntityTrait.php
<ide> public function set($property, $value = null, array $options = [])
<ide>
<ide> $this->dirty($p, true);
<ide>
<del> if (!isset($this->_original[$p]) &&
<del> isset($this->_properties[$p]) &&
<add> if (!array_key_exists($p, $this->_original) &&
<add> array_key_exists($p, $this->_properties) &&
<ide> $this->_properties[$p] !== $value
<ide> ) {
<ide> $this->_original[$p] = $this->_properties[$p];
<ide> public function getOriginal($property)
<ide> if (!strlen((string)$property)) {
<ide> throw new InvalidArgumentException('Cannot get an empty property');
<ide> }
<del> if (isset($this->_original[$property])) {
<add> if (array_key_exists($property, $this->_original)) {
<ide> return $this->_original[$property];
<ide> }
<ide> return $this->get($property);
<ide> public function extractOriginal(array $properties)
<ide> {
<ide> $result = [];
<ide> foreach ($properties as $property) {
<del> $original = $this->getOriginal($property);
<del> if ($original !== null) {
<del> $result[$property] = $original;
<del> }
<add> $result[$property] = $this->getOriginal($property);
<ide> }
<ide> return $result;
<ide> }
<ide><path>tests/TestCase/ORM/EntityTest.php
<ide> public function testSetMultiplePropertiesNoSetters()
<ide> $this->assertEquals(1, $entity->getOriginal('id'));
<ide> }
<ide>
<add> /**
<add> * Test that getOriginal() retains falsey values.
<add> *
<add> * @return void
<add> */
<add> public function testGetOriginal()
<add> {
<add> $entity = new Entity(
<add> ['false' => false, 'null' => null, 'zero' => 0, 'empty' => ''],
<add> ['markNew' => true]
<add> );
<add> $this->assertNull($entity->getOriginal('null'));
<add> $this->assertFalse($entity->getOriginal('false'));
<add> $this->assertSame(0, $entity->getOriginal('zero'));
<add> $this->assertSame('', $entity->getOriginal('empty'));
<add>
<add> $entity->set(['false' => 'y', 'null' => 'y', 'zero' => 'y', 'empty' => '']);
<add> $this->assertNull($entity->getOriginal('null'));
<add> $this->assertFalse($entity->getOriginal('false'));
<add> $this->assertSame(0, $entity->getOriginal('zero'));
<add> $this->assertSame('', $entity->getOriginal('empty'));
<add> }
<add>
<ide> /**
<ide> * Test extractOriginal()
<ide> *
<ide> public function testExtractOriginal()
<ide> $entity = new Entity([
<ide> 'id' => 1,
<ide> 'title' => 'original',
<del> 'body' => 'no'
<add> 'body' => 'no',
<add> 'null' => null,
<ide> ], ['markNew' => true]);
<ide> $entity->set('body', 'updated body');
<del> $result = $entity->extractOriginal(['id', 'title', 'body']);
<add> $result = $entity->extractOriginal(['id', 'title', 'body', 'null']);
<ide> $expected = [
<ide> 'id' => 1,
<ide> 'title' => 'original',
<del> 'body' => 'no'
<add> 'body' => 'no',
<add> 'null' => null,
<ide> ];
<ide> $this->assertEquals($expected, $result);
<ide>
<del> $result = $entity->extractOriginalChanged(['id', 'title', 'body']);
<add> $result = $entity->extractOriginalChanged(['id', 'title', 'body', 'null']);
<ide> $expected = [
<ide> 'body' => 'no',
<ide> ]; | 2 |
Javascript | Javascript | fix typos in reactserverrenderingtransaction.js | 7b0764b1d255e7d420a7d6d359ec93783076b207 | <ide><path>src/renderers/dom/server/ReactServerRenderingTransaction.js
<ide> var Mixin = {
<ide> * @see Transaction
<ide> * @abstract
<ide> * @final
<del> * @return {array} Empty list of operation wrap proceedures.
<add> * @return {array} Empty list of operation wrap procedures.
<ide> */
<ide> getTransactionWrappers: function() {
<ide> return TRANSACTION_WRAPPERS;
<ide> var Mixin = {
<ide>
<ide> /**
<ide> * `PooledClass` looks for this, and will invoke this before allowing this
<del> * instance to be resused.
<add> * instance to be reused.
<ide> */
<ide> destructor: function() {
<ide> CallbackQueue.release(this.reactMountReady); | 1 |
Ruby | Ruby | fix binary fixture test on windows | 4b7f95eb8b45a32c470a414c7f536fb8cb029bda | <ide><path>activerecord/test/cases/fixtures_test.rb
<ide> def test_subsubdir_file_with_arbitrary_name
<ide>
<ide> def test_binary_in_fixtures
<ide> assert_equal 1, @binaries.size
<del> data = File.read(ASSETS_ROOT + "/flowers.jpg")
<add> data = File.open(ASSETS_ROOT + "/flowers.jpg", 'rb') { |f| f.read }
<ide> data.force_encoding('ASCII-8BIT') if data.respond_to?(:force_encoding)
<ide> data.freeze
<ide> assert_equal data, @flowers.data | 1 |
Javascript | Javascript | support specularglossiness materials | 2dd56237b9fe14a03c0abfa67be0b5bfb03328ee | <ide><path>examples/js/exporters/GLTFExporter.js
<ide> THREE.GLTFExporter.prototype = {
<ide>
<ide> }
<ide>
<del> if ( material.isShaderMaterial ) {
<add> if ( material.isShaderMaterial && !material.isGLTFSpecularGlossinessMaterial ) {
<ide>
<ide> console.warn( 'GLTFExporter: THREE.ShaderMaterial not supported.' );
<ide> return null;
<ide> THREE.GLTFExporter.prototype = {
<ide>
<ide> }
<ide>
<add> if ( material.isGLTFSpecularGlossinessMaterial ) {
<add>
<add> var specularExtensionName = "KHR_materials_pbrSpecularGlossiness";
<add> var specularMapDef = material.specularMap ? { index: processTexture( material.specularMap ) } : undefined;
<add> var diffuseMapDef = material.map ? { index: processTexture( material.map ) } : undefined;
<add> if ( material.specularMap ) {
<add>
<add> applyTextureTransform( specularMapDef, material.specularMap );
<add>
<add> }
<add>
<add> // alpha default is 1, rgb will be overridden
<add> var diffuseFactor = [ 1, 1, 1, 1 ];
<add> material.color.toArray( diffuseFactor, 0 );
<add> // same story for specularFactor
<add> var specularFactor = [ 1, 1, 1 ];
<add> material.specular.toArray( specularFactor, 0 );
<add>
<add> gltfMaterial.extensions = {};
<add> gltfMaterial.extensions[ specularExtensionName ] = {
<add> "diffuseFactor": diffuseFactor,
<add> "diffuseTexture": diffuseMapDef,
<add> "specularFactor": specularFactor,
<add> "glossinessFactor": material.glossiness,
<add> "specularGlossinessTexture": specularMapDef
<add> };
<add> extensionsUsed[ specularExtensionName ] = true;
<add>
<add> }
<add>
<ide> // pbrMetallicRoughness.baseColorTexture
<ide> if ( material.map ) {
<ide>
<ide> THREE.GLTFExporter.prototype = {
<ide>
<ide> var normalMapDef = { index: processTexture( material.normalMap ) };
<ide>
<del> if ( material.normalScale.x !== - 1 ) {
<add> if ( material.normalScale && material.normalScale.x !== - 1 ) {
<ide>
<ide> if ( material.normalScale.x !== material.normalScale.y ) {
<ide> | 1 |
Python | Python | reset metric early | 5944a544f7f629ed44016028ccb11563c4f61143 | <ide><path>keras/engine/training.py
<ide> class during training. This can be useful to tell the model to "pay
<ide> self._assert_compile_was_called()
<ide> self._check_call_args('train_on_batch')
<ide> _disallow_inside_tf_function('train_on_batch')
<add> if reset_metrics:
<add> self.reset_metrics()
<ide> with self.distribute_strategy.scope(), \
<ide> training_utils.RespectCompiledTrainableState(self):
<ide> iterator = data_adapter.single_batch_iterator(self.distribute_strategy, x,
<ide> class during training. This can be useful to tell the model to "pay
<ide> self.train_function = self.make_train_function()
<ide> logs = self.train_function(iterator)
<ide>
<del> if reset_metrics:
<del> self.reset_metrics()
<ide> logs = tf_utils.sync_to_numpy_or_python_type(logs)
<ide> if return_dict:
<ide> return logs
<ide> def test_on_batch(self,
<ide> self._assert_compile_was_called()
<ide> self._check_call_args('test_on_batch')
<ide> _disallow_inside_tf_function('test_on_batch')
<add> if reset_metrics:
<add> self.reset_metrics()
<ide> with self.distribute_strategy.scope():
<ide> iterator = data_adapter.single_batch_iterator(self.distribute_strategy, x,
<ide> y, sample_weight)
<ide> self.test_function = self.make_test_function()
<ide> logs = self.test_function(iterator)
<ide>
<del> if reset_metrics:
<del> self.reset_metrics()
<ide> logs = tf_utils.sync_to_numpy_or_python_type(logs)
<ide> if return_dict:
<ide> return logs
<ide><path>keras/engine/training_test.py
<ide> def test_outputs_are_floats(self):
<ide> self.assertIsInstance(loss, float)
<ide> self.assertIsInstance(accuracy, float)
<ide>
<del> @keras_parameterized.run_all_keras_modes(always_skip_v1=True)
<del> @unittest.expectedFailure
<del> def test_loss_acc_is_corrupted(self):
<del> batch_size = 32
<del> n_samples, n_features = batch_size * 10, 5
<del> rng = np.random.RandomState(0)
<del> x = rng.normal(size=(n_samples, n_features))
<del> y = rng.randint(low=0, high=2, size=x.shape[0])
<del> model = sequential.Sequential([layers_module.Dense(1,)])
<del> model.compile('adam', 'binary_crossentropy',
<del> metrics=['accuracy'],
<del> run_eagerly=testing_utils.should_run_eagerly())
<del> loss = {}
<del> accurancy = {}
<del> loss_1 = {}
<del> accurancy_1 = {}
<del> for i in range(3):
<del> loss[i], accurancy[i] = model.test_on_batch(x[:batch_size],
<del> y[:batch_size])
<del> model.evaluate(x,y, batch_size=batch_size, verbose=0)
<del> for i in range(3):
<del> loss_1[i], accurancy_1[i] = model.test_on_batch(x[:batch_size],
<del> y[:batch_size])
<del> self.assertAllEqual(loss, loss_1,
<del> "https://github.com/keras-team/keras/issues/14086")
<del> self.assertAllEqual(accurancy, accurancy_1,
<del> "https://github.com/keras-team/keras/issues/14086")
<del>
<ide> @keras_parameterized.run_all_keras_modes(always_skip_v1=True)
<ide> def test_int_output(self):
<ide> x, y = np.ones((10, 1)), np.ones((10, 1))
<ide> def test_metric_state_reset_between_fit_and_evaluate(self):
<ide> model.evaluate(x_test, y_test, batch_size=5)
<ide> self.assertEqual(self.evaluate(acc_obj.count), 10)
<ide>
<add> @keras_parameterized.run_all_keras_modes
<add> def test_metric_state_reset_between_test_on_batch_and_evaluate(self):
<add> model = sequential.Sequential()
<add> model.add(layers_module.Dense(3, activation='relu', input_dim=4))
<add> model.add(layers_module.Dense(1, activation='sigmoid'))
<add> acc_obj = metrics_module.BinaryAccuracy()
<add> model.compile(
<add> loss='mae',
<add> metrics=[acc_obj],
<add> optimizer=RMSPropOptimizer(learning_rate=0.001),
<add> run_eagerly=testing_utils.should_run_eagerly())
<add>
<add> x_test = np.random.random((10, 4))
<add> y_test = np.random.random((10, 1))
<add> loss, acc = model.test_on_batch(x_test[:2],y_test[:2])
<add> model.evaluate(x_test, y_test)
<add> loss_1, acc_1 = model.test_on_batch(x_test[:2],y_test[:2])
<add> self.assertEqual(loss, loss_1)
<add> self.assertEqual(acc, acc_1)
<add>
<ide> @keras_parameterized.run_with_all_model_types(exclude_models=['sequential'])
<ide> @keras_parameterized.run_all_keras_modes
<ide> def test_metrics_valid_compile_input_formats(self): | 2 |
Java | Java | allow use of webfilters in routerfunction | 30bd3d873682734502554a25db398906215c81f1 | <ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/function/RouterFunctions.java
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.web.reactive.HandlerMapping;
<ide> import org.springframework.web.server.ServerWebExchange;
<add>import org.springframework.web.server.WebHandler;
<ide> import org.springframework.web.server.adapter.HttpWebHandlerAdapter;
<add>import org.springframework.web.server.adapter.WebHttpHandlerBuilder;
<ide>
<ide> /**
<ide> * <strong>Central entry point to Spring's functional web framework.</strong>
<ide> public static RouterFunction<Resource> resources(Function<ServerRequest, Optiona
<ide> /**
<ide> * Convert the given {@linkplain RouterFunction router function} into a {@link HttpHandler}.
<ide> * This conversion uses {@linkplain HandlerStrategies#builder() default strategies}.
<del> * <p>The returned {@code HttpHandler} can be adapted to run in
<add> * <p>The returned handler can be adapted to run in
<ide> * <ul>
<ide> * <li>Servlet 3.1+ using the
<ide> * {@link org.springframework.http.server.reactive.ServletHttpHandlerAdapter},</li>
<ide> public static RouterFunction<Resource> resources(Function<ServerRequest, Optiona
<ide> * <li>Undertow using the
<ide> * {@link org.springframework.http.server.reactive.UndertowHttpHandlerAdapter}.</li>
<ide> * </ul>
<add> * <p>Note that {@code HttpWebHandlerAdapter} also implements {@link WebHandler}, allowing
<add> * for additional filter and exception handler registration through
<add> * {@link WebHttpHandlerBuilder}.
<ide> * @param routerFunction the router function to convert
<ide> * @return an http handler that handles HTTP request using the given router function
<ide> */
<del> public static HttpHandler toHttpHandler(RouterFunction<?> routerFunction) {
<add> public static HttpWebHandlerAdapter toHttpHandler(RouterFunction<?> routerFunction) {
<ide> return toHttpHandler(routerFunction, HandlerStrategies.withDefaults());
<ide> }
<ide>
<ide> public static HttpHandler toHttpHandler(RouterFunction<?> routerFunction) {
<ide> * <li>Undertow using the
<ide> * {@link org.springframework.http.server.reactive.UndertowHttpHandlerAdapter}.</li>
<ide> * </ul>
<add> * <p>Note that {@code HttpWebHandlerAdapter} also implements {@link WebHandler}, allowing
<add> * for additional filter and exception handler registration through
<ide> * @param routerFunction the router function to convert
<ide> * @param strategies the strategies to use
<ide> * @return an http handler that handles HTTP request using the given router function
<ide> */
<del> public static HttpHandler toHttpHandler(RouterFunction<?> routerFunction, HandlerStrategies strategies) {
<add> public static HttpWebHandlerAdapter toHttpHandler(RouterFunction<?> routerFunction, HandlerStrategies strategies) {
<ide> Assert.notNull(routerFunction, "RouterFunction must not be null");
<ide> Assert.notNull(strategies, "HandlerStrategies must not be null");
<ide> | 1 |
PHP | PHP | implement timeagoinwords() for date | 1619c46cc3dffc52bc4facd9af3b9254cd7a89f1 | <ide><path>src/I18n/Date.php
<ide> class Date extends BaseDate implements JsonSerializable
<ide> * @see \Cake\I18n\DateFormatTrait::nice()
<ide> */
<ide> public static $niceFormat = [IntlDateFormatter::MEDIUM, -1];
<add>
<add> /**
<add> * The format to use when formatting a time using `Time::timeAgoInWords()`
<add> * and the difference is less than `Time::$wordEnd`
<add> *
<add> * @var array
<add> * @see \Cake\I18n\Date::timeAgoInWords()
<add> */
<add> public static $wordAccuracy = [
<add> 'year' => "day",
<add> 'month' => "day",
<add> 'week' => "day",
<add> 'day' => "day",
<add> 'hour' => "day",
<add> 'minute' => "day",
<add> 'second' => "day",
<add> ];
<add>
<add> /**
<add> * The end of relative time telling
<add> *
<add> * @var string
<add> * @see \Cake\I18n\Date::timeAgoInWords()
<add> */
<add> public static $wordEnd = '+1 month';
<add>
<add> /**
<add> * Returns either a relative or a formatted absolute date depending
<add> * on the difference between the current date and this object.
<add> *
<add> * ### Options:
<add> *
<add> * - `from` => another Date object representing the "now" date
<add> * - `format` => a fall back format if the relative time is longer than the duration specified by end
<add> * - `accuracy` => Specifies how accurate the date should be described (array)
<add> * - year => The format if years > 0 (default "day")
<add> * - month => The format if months > 0 (default "day")
<add> * - week => The format if weeks > 0 (default "day")
<add> * - day => The format if weeks > 0 (default "day")
<add> * - `end` => The end of relative date telling
<add> * - `relativeString` => The printf compatible string when outputting relative date
<add> * - `absoluteString` => The printf compatible string when outputting absolute date
<add> * - `timezone` => The user timezone the timestamp should be formatted in.
<add> *
<add> * Relative dates look something like this:
<add> *
<add> * - 3 weeks, 4 days ago
<add> * - 1 day ago
<add> *
<add> * Default date formatting is d/M/YY e.g: on 18/2/09. Formatting is done internally using
<add> * `i18nFormat`, see the method for the valid formatting strings.
<add> *
<add> * The returned string includes 'ago' or 'on' and assumes you'll properly add a word
<add> * like 'Posted ' before the function output.
<add> *
<add> * NOTE: If the difference is one week or more, the lowest level of accuracy is day.
<add> *
<add> * @param array $options Array of options.
<add> * @return string Relative time string.
<add> */
<add> public function timeAgoInWords(array $options = [])
<add> {
<add> $date = $this;
<add>
<add> $options += [
<add> 'from' => static::now(),
<add> 'timezone' => null,
<add> 'format' => static::$wordFormat,
<add> 'accuracy' => static::$wordAccuracy,
<add> 'end' => static::$wordEnd,
<add> 'relativeString' => __d('cake', '%s ago'),
<add> 'absoluteString' => __d('cake', 'on %s'),
<add> ];
<add> if (is_string($options['accuracy'])) {
<add> foreach (static::$wordAccuracy as $key => $level) {
<add> $options[$key] = $options['accuracy'];
<add> }
<add> } else {
<add> $options['accuracy'] += static::$wordAccuracy;
<add> }
<add> if ($options['timezone']) {
<add> $date = $date->timezone($options['timezone']);
<add> }
<add>
<add> $now = $options['from']->format('U');
<add> $inSeconds = $date->format('U');
<add> $backwards = ($inSeconds > $now);
<add>
<add> $futureTime = $now;
<add> $pastTime = $inSeconds;
<add> if ($backwards) {
<add> $futureTime = $inSeconds;
<add> $pastTime = $now;
<add> }
<add> $diff = $futureTime - $pastTime;
<add>
<add> if (!$diff) {
<add> return __d('cake', 'today');
<add> }
<add>
<add> if ($diff > abs($now - (new static($options['end']))->format('U'))) {
<add> return sprintf($options['absoluteString'], $date->i18nFormat($options['format']));
<add> }
<add>
<add> // If more than a week, then take into account the length of months
<add> if ($diff >= 604800) {
<add> list($future['H'], $future['i'], $future['s'], $future['d'], $future['m'], $future['Y']) = explode('/', date('H/i/s/d/m/Y', $futureTime));
<add>
<add> list($past['H'], $past['i'], $past['s'], $past['d'], $past['m'], $past['Y']) = explode('/', date('H/i/s/d/m/Y', $pastTime));
<add> $weeks = $days = $hours = $minutes = $seconds = 0;
<add>
<add> $years = $future['Y'] - $past['Y'];
<add> $months = $future['m'] + ((12 * $years) - $past['m']);
<add>
<add> if ($months >= 12) {
<add> $years = floor($months / 12);
<add> $months = $months - ($years * 12);
<add> }
<add> if ($future['m'] < $past['m'] && $future['Y'] - $past['Y'] === 1) {
<add> $years--;
<add> }
<add>
<add> if ($future['d'] >= $past['d']) {
<add> $days = $future['d'] - $past['d'];
<add> } else {
<add> $daysInPastMonth = date('t', $pastTime);
<add> $daysInFutureMonth = date('t', mktime(0, 0, 0, $future['m'] - 1, 1, $future['Y']));
<add>
<add> if (!$backwards) {
<add> $days = ($daysInPastMonth - $past['d']) + $future['d'];
<add> } else {
<add> $days = ($daysInFutureMonth - $past['d']) + $future['d'];
<add> }
<add>
<add> if ($future['m'] != $past['m']) {
<add> $months--;
<add> }
<add> }
<add>
<add> if (!$months && $years >= 1 && $diff < ($years * 31536000)) {
<add> $months = 11;
<add> $years--;
<add> }
<add>
<add> if ($months >= 12) {
<add> $years = $years + 1;
<add> $months = $months - 12;
<add> }
<add>
<add> if ($days >= 7) {
<add> $weeks = floor($days / 7);
<add> $days = $days - ($weeks * 7);
<add> }
<add> } else {
<add> $years = $months = $weeks = 0;
<add> $days = floor($diff / 86400);
<add>
<add> $diff = $diff - ($days * 86400);
<add>
<add> $hours = floor($diff / 3600);
<add> $diff = $diff - ($hours * 3600);
<add>
<add> $minutes = floor($diff / 60);
<add> $diff = $diff - ($minutes * 60);
<add> $seconds = $diff;
<add> }
<add>
<add> $fWord = $options['accuracy']['day'];
<add> if ($years > 0) {
<add> $fWord = $options['accuracy']['year'];
<add> } elseif (abs($months) > 0) {
<add> $fWord = $options['accuracy']['month'];
<add> } elseif (abs($weeks) > 0) {
<add> $fWord = $options['accuracy']['week'];
<add> } elseif (abs($days) > 0) {
<add> $fWord = $options['accuracy']['day'];
<add> }
<add>
<add> $fNum = str_replace(['year', 'month', 'week', 'day'], [1, 2, 3, 4], $fWord);
<add>
<add> $relativeDate = '';
<add> if ($fNum >= 1 && $years > 0) {
<add> $relativeDate .= ($relativeDate ? ', ' : '') . __dn('cake', '{0} year', '{0} years', $years, $years);
<add> }
<add> if ($fNum >= 2 && $months > 0) {
<add> $relativeDate .= ($relativeDate ? ', ' : '') . __dn('cake', '{0} month', '{0} months', $months, $months);
<add> }
<add> if ($fNum >= 3 && $weeks > 0) {
<add> $relativeDate .= ($relativeDate ? ', ' : '') . __dn('cake', '{0} week', '{0} weeks', $weeks, $weeks);
<add> }
<add> if ($fNum >= 4 && $days > 0) {
<add> $relativeDate .= ($relativeDate ? ', ' : '') . __dn('cake', '{0} day', '{0} days', $days, $days);
<add> }
<add>
<add> // When time has passed
<add> if (!$backwards && $relativeDate) {
<add> return sprintf($options['relativeString'], $relativeDate);
<add> }
<add> if (!$backwards) {
<add> $aboutAgo = [
<add> 'day' => __d('cake', 'about a day ago'),
<add> 'week' => __d('cake', 'about a week ago'),
<add> 'month' => __d('cake', 'about a month ago'),
<add> 'year' => __d('cake', 'about a year ago')
<add> ];
<add>
<add> return $aboutAgo[$fWord];
<add> }
<add>
<add> // When time is to come
<add> if (!$relativeDate) {
<add> $aboutIn = [
<add> 'day' => __d('cake', 'in about a day'),
<add> 'week' => __d('cake', 'in about a week'),
<add> 'month' => __d('cake', 'in about a month'),
<add> 'year' => __d('cake', 'in about a year')
<add> ];
<add>
<add> return $aboutIn[$fWord];
<add> }
<add>
<add> return $relativeDate;
<add> }
<ide> }
<ide><path>tests/TestCase/I18n/DateTest.php
<ide> public function testParseDateTime()
<ide> $date = Date::parseDate('13 10, 2015 12:54:12');
<ide> $this->assertEquals('2015-10-13 00:00:00', $date->format('Y-m-d H:i:s'));
<ide> }
<add>
<add> /**
<add> * provider for timeAgoInWords() tests
<add> *
<add> * @return array
<add> */
<add> public static function timeAgoProvider()
<add> {
<add> return [
<add> ['-12 seconds', 'today'],
<add> ['-12 minutes', 'today'],
<add> ['-2 hours', 'today'],
<add> ['-1 day', '1 day ago'],
<add> ['-2 days', '2 days ago'],
<add> ['-1 week', '1 week ago'],
<add> ['-2 weeks -2 days', '2 weeks, 2 days ago'],
<add> ['+1 second', 'today'],
<add> ['+1 minute, +10 seconds', 'today'],
<add> ['+1 week', '1 week'],
<add> ['+1 week 1 day', '1 week, 1 day'],
<add> ['+2 weeks 2 day', '2 weeks, 2 days'],
<add> ['2007-9-24', 'on 9/24/07'],
<add> ['now', 'today'],
<add> ];
<add> }
<add>
<add> /**
<add> * testTimeAgoInWords method
<add> *
<add> * @dataProvider timeAgoProvider
<add> * @return void
<add> */
<add> public function testTimeAgoInWords($input, $expected)
<add> {
<add> $date = new Date($input);
<add> $result = $date->timeAgoInWords();
<add> $this->assertEquals($expected, $result);
<add> }
<add>
<add> /**
<add> * test the timezone option for timeAgoInWords
<add> *
<add> * @return void
<add> */
<add> public function testTimeAgoInWordsTimezone()
<add> {
<add> $date = new Date('1990-07-31 20:33:00 UTC');
<add> $result = $date->timeAgoInWords(
<add> [
<add> 'timezone' => 'America/Vancouver',
<add> 'end' => '+1month',
<add> 'format' => 'dd-MM-YYYY'
<add> ]
<add> );
<add> $this->assertEquals('on 31-07-1990', $result);
<add> }
<add>
<add> /**
<add> * provider for timeAgo with an end date.
<add> *
<add> * @return void
<add> */
<add> public function timeAgoEndProvider()
<add> {
<add> return [
<add> [
<add> '+4 months +2 weeks +3 days',
<add> '4 months, 2 weeks, 3 days',
<add> '8 years'
<add> ],
<add> [
<add> '+4 months +2 weeks +1 day',
<add> '4 months, 2 weeks, 1 day',
<add> '8 years'
<add> ],
<add> [
<add> '+3 months +2 weeks',
<add> '3 months, 2 weeks',
<add> '8 years'
<add> ],
<add> [
<add> '+3 months +2 weeks +1 day',
<add> '3 months, 2 weeks, 1 day',
<add> '8 years'
<add> ],
<add> [
<add> '+1 months +1 week +1 day',
<add> '1 month, 1 week, 1 day',
<add> '8 years'
<add> ],
<add> [
<add> '+2 months +2 days',
<add> '2 months, 2 days',
<add> '+2 months +2 days'
<add> ],
<add> [
<add> '+2 months +12 days',
<add> '2 months, 1 week, 5 days',
<add> '3 months'
<add> ],
<add> ];
<add> }
<add>
<add> /**
<add> * test the end option for timeAgoInWords
<add> *
<add> * @dataProvider timeAgoEndProvider
<add> * @return void
<add> */
<add> public function testTimeAgoInWordsEnd($input, $expected, $end)
<add> {
<add> $time = new Date($input);
<add> $result = $time->timeAgoInWords(['end' => $end]);
<add> $this->assertEquals($expected, $result);
<add> }
<add>
<add> /**
<add> * test the custom string options for timeAgoInWords
<add> *
<add> * @return void
<add> */
<add> public function testTimeAgoInWordsCustomStrings()
<add> {
<add> $date = new Date('-8 years -4 months -2 weeks -3 days');
<add> $result = $date->timeAgoInWords([
<add> 'relativeString' => 'at least %s ago',
<add> 'accuracy' => ['year' => 'year'],
<add> 'end' => '+10 years'
<add> ]);
<add> $expected = 'at least 8 years ago';
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('+4 months +2 weeks +3 days');
<add> $result = $date->timeAgoInWords([
<add> 'absoluteString' => 'exactly on %s',
<add> 'accuracy' => ['year' => 'year'],
<add> 'end' => '+2 months'
<add> ]);
<add> $expected = 'exactly on ' . date('n/j/y', strtotime('+4 months +2 weeks +3 days'));
<add> $this->assertEquals($expected, $result);
<add> }
<add>
<add> /**
<add> * Test the accuracy option for timeAgoInWords()
<add> *
<add> * @return void
<add> */
<add> public function testDateAgoInWordsAccuracy()
<add> {
<add> $date = new Date('+8 years +4 months +2 weeks +3 days');
<add> $result = $date->timeAgoInWords([
<add> 'accuracy' => ['year' => 'year'],
<add> 'end' => '+10 years'
<add> ]);
<add> $expected = '8 years';
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('+8 years +4 months +2 weeks +3 days');
<add> $result = $date->timeAgoInWords([
<add> 'accuracy' => ['year' => 'month'],
<add> 'end' => '+10 years'
<add> ]);
<add> $expected = '8 years, 4 months';
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('+8 years +4 months +2 weeks +3 days');
<add> $result = $date->timeAgoInWords([
<add> 'accuracy' => ['year' => 'week'],
<add> 'end' => '+10 years'
<add> ]);
<add> $expected = '8 years, 4 months, 2 weeks';
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('+8 years +4 months +2 weeks +3 days');
<add> $result = $date->timeAgoInWords([
<add> 'accuracy' => ['year' => 'day'],
<add> 'end' => '+10 years'
<add> ]);
<add> $expected = '8 years, 4 months, 2 weeks, 3 days';
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('+1 years +5 weeks');
<add> $result = $date->timeAgoInWords([
<add> 'accuracy' => ['year' => 'year'],
<add> 'end' => '+10 years'
<add> ]);
<add> $expected = '1 year';
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('+23 hours');
<add> $result = $date->timeAgoInWords([
<add> 'accuracy' => 'day'
<add> ]);
<add> $expected = 'today';
<add> $this->assertEquals($expected, $result);
<add> }
<add>
<add> /**
<add> * Test the format option of timeAgoInWords()
<add> *
<add> * @return void
<add> */
<add> public function testDateAgoInWordsWithFormat()
<add> {
<add> $date = new Date('2007-9-25');
<add> $result = $date->timeAgoInWords(['format' => 'yyyy-MM-dd']);
<add> $this->assertEquals('on 2007-09-25', $result);
<add>
<add> $date = new Date('2007-9-25');
<add> $result = $date->timeAgoInWords(['format' => 'yyyy-MM-dd']);
<add> $this->assertEquals('on 2007-09-25', $result);
<add>
<add> $date = new Date('+2 weeks +2 days');
<add> $result = $date->timeAgoInWords(['format' => 'yyyy-MM-dd']);
<add> $this->assertRegExp('/^2 weeks, [1|2] day(s)?$/', $result);
<add>
<add> $date = new Date('+2 months +2 days');
<add> $result = $date->timeAgoInWords(['end' => '1 month', 'format' => 'yyyy-MM-dd']);
<add> $this->assertEquals('on ' . date('Y-m-d', strtotime('+2 months +2 days')), $result);
<add> }
<add>
<add> /**
<add> * test timeAgoInWords() with negative values.
<add> *
<add> * @return void
<add> */
<add> public function testDateAgoInWordsNegativeValues()
<add> {
<add> $date = new Date('-2 months -2 days');
<add> $result = $date->timeAgoInWords(['end' => '3 month']);
<add> $this->assertEquals('2 months, 2 days ago', $result);
<add>
<add> $date = new Date('-2 months -2 days');
<add> $result = $date->timeAgoInWords(['end' => '3 month']);
<add> $this->assertEquals('2 months, 2 days ago', $result);
<add>
<add> $date = new Date('-2 months -2 days');
<add> $result = $date->timeAgoInWords(['end' => '1 month', 'format' => 'yyyy-MM-dd']);
<add> $this->assertEquals('on ' . date('Y-m-d', strtotime('-2 months -2 days')), $result);
<add>
<add> $date = new Date('-2 years -5 months -2 days');
<add> $result = $date->timeAgoInWords(['end' => '3 years']);
<add> $this->assertEquals('2 years, 5 months, 2 days ago', $result);
<add>
<add> $date = new Date('-2 weeks -2 days');
<add> $result = $date->timeAgoInWords(['format' => 'yyyy-MM-dd']);
<add> $this->assertEquals('2 weeks, 2 days ago', $result);
<add>
<add> $date = new Date('-3 years -12 months');
<add> $result = $date->timeAgoInWords();
<add> $expected = 'on ' . $date->format('n/j/y');
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('-1 month -1 week -6 days');
<add> $result = $date->timeAgoInWords(
<add> ['end' => '1 year', 'accuracy' => ['month' => 'month']]
<add> );
<add> $this->assertEquals('1 month ago', $result);
<add>
<add> $date = new Date('-1 years -2 weeks -3 days');
<add> $result = $date->timeAgoInWords(
<add> ['accuracy' => ['year' => 'year']]
<add> );
<add> $expected = 'on ' . $date->format('n/j/y');
<add> $this->assertEquals($expected, $result);
<add>
<add> $date = new Date('-13 months -5 days');
<add> $result = $date->timeAgoInWords(['end' => '2 years']);
<add> $this->assertEquals('1 year, 1 month, 5 days ago', $result);
<add>
<add> $date = new Date('-23 hours');
<add> $result = $date->timeAgoInWords(['accuracy' => 'day']);
<add> $this->assertEquals('today', $result);
<add> }
<ide> } | 2 |
Text | Text | add 1.13.x changelog to master | ce29081d80b2893be84d495c61fba7ba4f7b8c3c | <ide><path>CHANGELOG.md
<ide> information on the list of deprecated flags and APIs please have a look at
<ide> https://docs.docker.com/engine/deprecated/ where target removal dates can also
<ide> be found.
<ide>
<add>## 1.13.1 (2017-02-08)
<add>
<add>**IMPORTANT**: On Linux distributions where `devicemapper` was the default storage driver,
<add>the `overlay2`, or `overlay` is now used by default (if the kernel supports it).
<add>To use devicemapper, you can manually configure the storage driver to use through
<add>the `--storage-driver` daemon option, or by setting "storage-driver" in the `daemon.json`
<add>configuration file.
<add>
<add>**IMPORTANT**: In Docker 1.13, the managed plugin api changed, as compared to the experimental
<add>version introduced in Docker 1.12. You must **uninstall** plugins which you installed with Docker 1.12
<add>_before_ upgrading to Docker 1.13. You can uninstall plugins using the `docker plugin rm` command.
<add>
<add>If you have already upgraded to Docker 1.13 without uninstalling
<add>previously-installed plugins, you may see this message when the Docker daemon
<add>starts:
<add>
<add> Error starting daemon: json: cannot unmarshal string into Go value of type types.PluginEnv
<add>
<add>To manually remove all plugins and resolve this problem, take the following steps:
<add>
<add>1. Remove plugins.json from: `/var/lib/docker/plugins/`.
<add>2. Restart Docker. Verify that the Docker daemon starts with no errors.
<add>3. Reinstall your plugins.
<add>
<add>### Contrib
<add>
<add>* Do not require a custom build of tini [#28454](https://github.com/docker/docker/pull/28454)
<add>* Upgrade to Go 1.7.5 [#30489](https://github.com/docker/docker/pull/30489)
<add>
<add>### Remote API (v1.26) & Client
<add>
<add>+ Support secrets in docker stack deploy with compose file [#30144](https://github.com/docker/docker/pull/30144)
<add>
<add>### Runtime
<add>
<add>* Fix size issue in `docker system df` [#30378](https://github.com/docker/docker/pull/30378)
<add>* Fix error on `docker inspect` when Swarm certificates were expired. [#29246](https://github.com/docker/docker/pull/29246)
<add>* Fix deadlock on v1 plugin with activate error [#30408](https://github.com/docker/docker/pull/30408)
<add>* Fix SELinux regression [#30649](https://github.com/docker/docker/pull/30649)
<add>
<add>### Plugins
<add>
<add>* Support global scoped network plugins (v2) in swarm mode [#30332](https://github.com/docker/docker/pull/30332)
<add>+ Add `docker plugin upgrade` [#29414](https://github.com/docker/docker/pull/29414)
<add>
<add>### Windows
<add>
<add>* Fix small regression with old plugins in Windows [#30150](https://github.com/docker/docker/pull/30150)
<add>* Fix warning on Windows [#30730](https://github.com/docker/docker/pull/30730)
<add>
<ide> ## 1.13.0 (2017-01-18)
<ide>
<add>**IMPORTANT**: On Linux distributions where `devicemapper` was the default storage driver,
<add>the `overlay2`, or `overlay` is now used by default (if the kernel supports it).
<add>To use devicemapper, you can manually configure the storage driver to use through
<add>the `--storage-driver` daemon option, or by setting "storage-driver" in the `daemon.json`
<add>configuration file.
<add>
<ide> **IMPORTANT**: In Docker 1.13, the managed plugin api changed, as compared to the experimental
<ide> version introduced in Docker 1.12. You must **uninstall** plugins which you installed with Docker 1.12
<ide> _before_ upgrading to Docker 1.13. You can uninstall plugins using the `docker plugin rm` command.
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> 3. Reinstall your plugins.
<ide>
<ide> ### Builder
<add>
<ide> + Add capability to specify images used as a cache source on build. These images do not need to have local parent chain and can be pulled from other registries [#26839](https://github.com/docker/docker/pull/26839)
<ide> + (experimental) Add option to squash image layers to the FROM image after successful builds [#22641](https://github.com/docker/docker/pull/22641)
<ide> * Fix dockerfile parser with empty line after escape [#24725](https://github.com/docker/docker/pull/24725)
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> - Fix image layer inconsistencies when using the overlay storage driver [#27209](https://github.com/docker/docker/pull/27209)
<ide> * Unused build-args are now allowed. A warning is presented instead of an error and failed build [#27412](https://github.com/docker/docker/pull/27412)
<ide> - Fix builder cache issue on Windows [#27805](https://github.com/docker/docker/pull/27805)
<add>+ Allow `USER` in builder on Windows [#28415](https://github.com/docker/docker/pull/28415)
<add>+ Handle env case-insensitive on Windows [#28725](https://github.com/docker/docker/pull/28725)
<ide>
<ide> ### Contrib
<del>+ Add support for building docker debs for Ubuntu Xenial on PPC64 [#23438](https://github.com/docker/docker/pull/23438)
<del>+ Add support for building docker debs for Ubuntu Xenial on s390x [#26104](https://github.com/docker/docker/pull/26104)
<add>
<add>+ Add support for building docker debs for Ubuntu 16.04 Xenial on PPC64LE [#23438](https://github.com/docker/docker/pull/23438)
<add>+ Add support for building docker debs for Ubuntu 16.04 Xenial on s390x [#26104](https://github.com/docker/docker/pull/26104)
<add>+ Add support for building docker debs for Ubuntu 16.10 Yakkety Yak on PPC64LE [#28046](https://github.com/docker/docker/pull/28046)
<ide> - Add RPM builder for VMWare Photon OS [#24116](https://github.com/docker/docker/pull/24116)
<ide> + Add shell completions to tgz [#27735](https://github.com/docker/docker/pull/27735)
<ide> * Update the install script to allow using the mirror in China [#27005](https://github.com/docker/docker/pull/27005)
<ide> + Add DEB builder for Ubuntu 16.10 Yakkety Yak [#27993](https://github.com/docker/docker/pull/27993)
<ide> + Add RPM builder for Fedora 25 [#28222](https://github.com/docker/docker/pull/28222)
<add>+ Add `make deb` support for aarch64 [#27625](https://github.com/docker/docker/pull/27625)
<ide>
<ide> ### Distribution
<ide>
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> + Unix socket support for fluentd [#26088](https://github.com/docker/docker/pull/26088)
<ide> * Enable fluentd logging driver on Windows [#28189](https://github.com/docker/docker/pull/28189)
<ide> - Sanitize docker labels when used as journald field names [#23725](https://github.com/docker/docker/pull/23725)
<add>- Fix an issue where `docker logs --tail` returned less lines than expected [#28203](https://github.com/docker/docker/pull/28203)
<add>- Splunk Logging Driver: performance and reliability improvements [#26207](https://github.com/docker/docker/pull/26207)
<add>- Splunk Logging Driver: configurable formats and skip for verifying connection [#25786](https://github.com/docker/docker/pull/25786)
<ide>
<ide> ### Networking
<ide>
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> + Add `docker plugin create` command [#28164](https://github.com/docker/docker/pull/28164)
<ide> * Send request's TLS peer certificates to authorization plugins [#27383](https://github.com/docker/docker/pull/27383)
<ide> * Support for global-scoped network and ipam plugins in swarm-mode [#27287](https://github.com/docker/docker/pull/27287)
<add>* Split `docker plugin install` into two API call `/privileges` and `/pull` [#28963](https://github.com/docker/docker/pull/28963)
<ide>
<ide> ### Remote API (v1.25) & Client
<ide>
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> + Add capability to /containers/create API to specify mounts in a more granular and safer way [#22373](https://github.com/docker/docker/pull/22373)
<ide> + Add `--format` flag to `network ls` and `volume ls` [#23475](https://github.com/docker/docker/pull/23475)
<ide> * Allow the top-level `docker inspect` command to inspect any kind of resource [#23614](https://github.com/docker/docker/pull/23614)
<add>+ Add --cpus flag to control cpu resources for `docker run` and `docker create`, and add `NanoCPUs` to `HostConfig` [#27958](https://github.com/docker/docker/pull/27958)
<ide> - Allow unsetting the `--entrypoint` in `docker run` or `docker create` [#23718](https://github.com/docker/docker/pull/23718)
<ide> * Restructure CLI commands by adding `docker image` and `docker container` commands for more consistency [#26025](https://github.com/docker/docker/pull/26025)
<ide> - Remove `COMMAND` column from `service ls` output [#28029](https://github.com/docker/docker/pull/28029)
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> - Fix a race condition between device deferred removal and resume device, when using the devicemapper graphdriver [#23497](https://github.com/docker/docker/pull/23497)
<ide> - Add `docker stats` support in Windows [#25737](https://github.com/docker/docker/pull/25737)
<ide> - Allow using `--pid=host` and `--net=host` when `--userns=host` [#25771](https://github.com/docker/docker/pull/25771)
<del>+ (experimental) Add metrics output [#25820](https://github.com/docker/docker/pull/25820)
<add>+ (experimental) Add metrics (Prometheus) output for basic `container`, `image`, and `daemon` operations [#25820](https://github.com/docker/docker/pull/25820)
<ide> - Fix issue in `docker stats` with `NetworkDisabled=true` [#25905](https://github.com/docker/docker/pull/25905)
<ide> + Add `docker top` support in Windows [#25891](https://github.com/docker/docker/pull/25891)
<ide> + Record pid of exec'd process [#27470](https://github.com/docker/docker/pull/27470)
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> ### Swarm Mode
<ide>
<ide> + Add secret management [#27794](https://github.com/docker/docker/pull/27794)
<add>+ Add support for templating service options (hostname, mounts, and environment variables) [#28025](https://github.com/docker/docker/pull/28025)
<ide> * Display the endpoint mode in the output of `docker service inspect --pretty` [#26906](https://github.com/docker/docker/pull/26906)
<ide> * Make `docker service ps` output more bearable by shortening service IDs in task names [#28088](https://github.com/docker/docker/pull/28088)
<del>* `docker node ps` now defaults to the current node [#25214](https://github.com/docker/docker/pull/25214)
<del>+ Add `-a`/`--all` flags to `docker service ps` and `docker node ps` to show all results [#25983](https://github.com/docker/docker/pull/25983)
<add>* Make `docker node ps` default to the current node [#25214](https://github.com/docker/docker/pull/25214)
<ide> + Add `--dns`, -`-dns-opt`, and `--dns-search` to service create. [#27567](https://github.com/docker/docker/pull/27567)
<ide> + Add `--force` to `docker service update` [#27596](https://github.com/docker/docker/pull/27596)
<add>+ Add `--health-*` and `--no-healthcheck` flags to `docker service create` and `docker service update` [#27369](https://github.com/docker/docker/pull/27369)
<ide> + Add `-q` to `docker service ps` [#27654](https://github.com/docker/docker/pull/27654)
<ide> * Display number of global services in `docker service ls` [#27710](https://github.com/docker/docker/pull/27710)
<ide> - Remove `--name` flag from `docker service update`. This flag is only functional on `docker service create`, so was removed from the `update` command [#26988](https://github.com/docker/docker/pull/26988)
<ide> - Fix worker nodes failing to recover because of transient networking issues [#26646](https://github.com/docker/docker/issues/26646)
<ide> * Add support for health aware load balancing and DNS records [#27279](https://github.com/docker/docker/pull/27279)
<del>* Add `--hostname` to `docker service create` [#27857](https://github.com/docker/docker/pull/27857)
<del>- Add `--tty` flag to `docker service create`/`update` [#28076](https://github.com/docker/docker/pull/28076)
<add>+ Add `--hostname` to `docker service create` [#27857](https://github.com/docker/docker/pull/27857)
<add>+ Add `--host` to `docker service create`, and `--host-add`, `--host-rm` to `docker service update` [#28031](https://github.com/docker/docker/pull/28031)
<add>+ Add `--tty` flag to `docker service create`/`update` [#28076](https://github.com/docker/docker/pull/28076)
<ide> * Autodetect, store, and expose node IP address as seen by the manager [#27910](https://github.com/docker/docker/pull/27910)
<ide> * Encryption at rest of manager keys and raft data [#27967](https://github.com/docker/docker/pull/27967)
<ide> + Add `--update-max-failure-ratio`, `--update-monitor` and `--rollback` flags to `docker service update` [#26421](https://github.com/docker/docker/pull/26421)
<ide> - Fix an issue with address autodiscovery on `docker swarm init` running inside a container [#26457](https://github.com/docker/docker/pull/26457)
<ide> + (experimental) Add `docker service logs` command to view logs for a service [#28089](https://github.com/docker/docker/pull/28089)
<del>- Pin images by digest for `docker service create` and `update` [#28173](https://github.com/docker/docker/pull/28173)
<del>- Add short (`-f`) flag for `docker node rm --force` and `docker swarm leave --force` [#28196](https://github.com/docker/docker/pull/28196)
<del>+ Don't repull image if pinned by digest [#28265](https://github.com/docker/docker/pull/28265)
<del>+ swarm-mode support for Windows [#27838](https://github.com/docker/docker/pull/27838)
<add>+ Pin images by digest for `docker service create` and `update` [#28173](https://github.com/docker/docker/pull/28173)
<add>* Add short (`-f`) flag for `docker node rm --force` and `docker swarm leave --force` [#28196](https://github.com/docker/docker/pull/28196)
<add>+ Add options to customize Raft snapshots (`--max-snapshots`, `--snapshot-interval`) [#27997](https://github.com/docker/docker/pull/27997)
<add>- Don't repull image if pinned by digest [#28265](https://github.com/docker/docker/pull/28265)
<add>+ Swarm-mode support for Windows [#27838](https://github.com/docker/docker/pull/27838)
<add>+ Allow hostname to be updated on service [#28771](https://github.com/docker/docker/pull/28771)
<add>+ Support v2 plugins [#29433](https://github.com/docker/docker/pull/29433)
<add>+ Add content trust for services [#29469](https://github.com/docker/docker/pull/29469)
<ide>
<ide> ### Volume
<ide>
<ide> To manually remove all plugins and resolve this problem, take the following step
<ide> - Deprecate unversioned API endpoints [#28208](https://github.com/docker/docker/pull/28208)
<ide> - Remove Ubuntu 15.10 (Wily Werewolf) as supported platform. Ubuntu 15.10 is EOL, and no longer receives updates [#27042](https://github.com/docker/docker/pull/27042)
<ide> - Remove Fedora 22 as supported platform. Fedora 22 is EOL, and no longer receives updates [#27432](https://github.com/docker/docker/pull/27432)
<add>- Remove Fedora 23 as supported platform. Fedora 23 is EOL, and no longer receives updates [#29455](https://github.com/docker/docker/pull/29455)
<ide> - Deprecate the `repo:shortid` syntax on `docker pull` [#27207](https://github.com/docker/docker/pull/27207)
<del>- Deprecate backing filesystem without d_type for overlay/overlay2 storage drivers [#27433](https://github.com/docker/docker/pull/27433)
<del>- Deprecate MAINTAINER in Dockerfile [#25466](https://github.com/docker/docker/pull/25466)
<del>- Deprecated filter param for endpoint `/images/json` [#27872](https://github.com/docker/docker/pull/27872)
<add>- Deprecate backing filesystem without `d_type` for overlay and overlay2 storage drivers [#27433](https://github.com/docker/docker/pull/27433)
<add>- Deprecate `MAINTAINER` in Dockerfile [#25466](https://github.com/docker/docker/pull/25466)
<add>- Deprecate `filter` param for endpoint `/images/json` [#27872](https://github.com/docker/docker/pull/27872)
<add>- Deprecate setting duplicate engine labels [#24533](https://github.com/docker/docker/pull/24533)
<add>- Deprecate "top-level" network information in `NetworkSettings` [#28437](https://github.com/docker/docker/pull/28437)
<ide>
<ide> ## 1.12.6 (2017-01-10)
<ide> | 1 |
PHP | PHP | add typehint and remove no need new line | bb8d3755184246a91b22b270dd61631ae8c27228 | <ide><path>src/Illuminate/Redis/Database.php
<ide> public function __construct(array $servers = array())
<ide> * @param array $options
<ide> * @return array
<ide> */
<del> protected function createAggregateClient(array $servers, $options = [])
<add> protected function createAggregateClient(array $servers, array $options = [])
<ide> {
<ide> return array('default' => new Client(array_values($servers), $options));
<ide> }
<ide> protected function createAggregateClient(array $servers, $options = [])
<ide> * @param array $options
<ide> * @return array
<ide> */
<del> protected function createSingleClients(array $servers, $options = [])
<add> protected function createSingleClients(array $servers, array $options = [])
<ide> {
<ide> $clients = array();
<ide>
<ide><path>tests/Redis/RedisConnectionTest.php
<ide> <?php
<ide>
<del>
<ide> class RedisConnectionTest extends PHPUnit_Framework_TestCase {
<ide>
<ide> public function testRedisNotCreateClusterAndOptionsServer() | 2 |
Python | Python | add xfailing test for | c32290557ff3732134ae210e826adc2f7a1cd285 | <ide><path>spacy/tests/regression/test_issue3288.py
<add># coding: utf-8
<add>from __future__ import unicode_literals
<add>
<add>import pytest
<add>import numpy
<add>from spacy import displacy
<add>
<add>from ..util import get_doc
<add>
<add>
<add>@pytest.mark.xfail
<add>def test_issue3288(en_vocab):
<add> """Test that retokenization works correctly via displaCy when punctuation
<add> is merged onto the preceeding token and tensor is resized."""
<add> words = ["Hello", "World", "!", "When", "is", "this", "breaking", "?"]
<add> heads = [1, 0, -1, 1, 0, 1, -2, -3]
<add> deps = ["intj", "ROOT", "punct", "advmod", "ROOT", "det", "nsubj", "punct"]
<add> doc = get_doc(en_vocab, words=words, heads=heads, deps=deps)
<add> doc.tensor = numpy.zeros(96, dtype="float32")
<add> displacy.render(doc) | 1 |
Javascript | Javascript | support emit on nodeeventtarget | c5477be8a1a102bd4b87ba812241e6ac1dbbfd3b | <ide><path>lib/internal/event_target.js
<ide> ObjectDefineProperty(Event.prototype, SymbolToStringTag, {
<ide> value: 'Event',
<ide> });
<ide>
<add>class NodeCustomEvent extends Event {
<add> constructor(type, options) {
<add> super(type, options);
<add> if (options && options.detail) {
<add> this.detail = options.detail;
<add> }
<add> }
<add>}
<ide> // The listeners for an EventTarget are maintained as a linked list.
<ide> // Unfortunately, the way EventTarget is defined, listeners are accounted
<ide> // using the tuple [handler,capture], and even if we don't actually make
<ide> class EventTarget {
<ide> event[kTarget] = undefined;
<ide> }
<ide>
<add> [kCreateEvent](nodeValue, type) {
<add> return new NodeCustomEvent(type, { detail: nodeValue });
<add> }
<ide> [customInspectSymbol](depth, options) {
<ide> const name = this.constructor.name;
<ide> if (depth < 0)
<ide> class NodeEventTarget extends EventTarget {
<ide> this.addEventListener(type, listener, { [kIsNodeStyleListener]: true });
<ide> return this;
<ide> }
<add> emit(type, arg) {
<add> if (typeof type !== 'string') {
<add> throw new ERR_INVALID_ARG_TYPE('type', 'string', type);
<add> }
<add> const hadListeners = this.listenerCount(type) > 0;
<add> this[kHybridDispatch](arg, type);
<add> return hadListeners;
<add> }
<ide>
<ide> once(type, listener) {
<ide> this.addEventListener(type, listener,
<ide> ObjectDefineProperties(NodeEventTarget.prototype, {
<ide> on: { enumerable: true },
<ide> addListener: { enumerable: true },
<ide> once: { enumerable: true },
<add> emit: { enumerable: true },
<ide> removeAllListeners: { enumerable: true },
<ide> });
<ide>
<ide><path>lib/internal/worker/io.js
<ide> ObjectDefineProperties(MessageEvent.prototype, {
<ide> },
<ide> });
<ide>
<add>const originalCreateEvent = EventTarget.prototype[kCreateEvent];
<ide> ObjectDefineProperty(
<ide> MessagePort.prototype,
<ide> kCreateEvent,
<ide> {
<ide> value: function(data, type) {
<add> if (type !== 'message' && type !== 'messageerror') {
<add> return originalCreateEvent.call(this, data, type);
<add> }
<ide> return new MessageEvent(type, { data });
<ide> },
<ide> configurable: false,
<ide><path>test/parallel/test-nodeeventtarget.js
<ide> const {
<ide> deepStrictEqual,
<ide> ok,
<ide> strictEqual,
<add> throws,
<ide> } = require('assert');
<ide>
<ide> const { on } = require('events');
<ide> const { on } = require('events');
<ide> target.on('foo', () => {});
<ide> target.on('foo', () => {});
<ide> }
<add>{
<add> // Test NodeEventTarget emit
<add> const emitter = new NodeEventTarget();
<add> emitter.addEventListener('foo', common.mustCall((e) => {
<add> strictEqual(e.type, 'foo');
<add> strictEqual(e.detail, 'bar');
<add> ok(e instanceof Event);
<add> }), { once: true });
<add> emitter.once('foo', common.mustCall((e, droppedAdditionalArgument) => {
<add> strictEqual(e, 'bar');
<add> strictEqual(droppedAdditionalArgument, undefined);
<add> }));
<add> emitter.emit('foo', 'bar', 'baz');
<add>}
<add>{
<add> // Test NodeEventTarget emit unsupported usage
<add> const emitter = new NodeEventTarget();
<add> throws(() => {
<add> emitter.emit();
<add> }, /ERR_INVALID_ARG_TYPE/);
<add>}
<ide>
<ide> (async () => {
<ide> // test NodeEventTarget async-iterability
<ide><path>test/parallel/test-worker-message-port.js
<ide> const { MessageChannel, MessagePort } = require('worker_threads');
<ide> port2.close(common.mustCall());
<ide> }));
<ide> }
<del>
<add>{
<add> // Test emitting non-message events on a port
<add> const { port2 } = new MessageChannel();
<add> port2.addEventListener('foo', common.mustCall((received) => {
<add> assert.strictEqual(received.type, 'foo');
<add> assert.strictEqual(received.detail, 'bar');
<add> }));
<add> port2.on('foo', common.mustCall((received) => {
<add> assert.strictEqual(received, 'bar');
<add> }));
<add> port2.emit('foo', 'bar');
<add>}
<ide> {
<ide> const { port1, port2 } = new MessageChannel();
<ide> | 4 |
Ruby | Ruby | add `uname` method | c90e63b2998312e4c9aa029599df2c0b184faaa6 | <ide><path>Library/Homebrew/os.rb
<ide> def self.kernel_version
<ide> @kernel_version ||= Version.new(Utils.safe_popen_read("uname", "-r").chomp)
<ide> end
<ide>
<add> # Get the kernel name.
<add> #
<add> # @api public
<add> sig { returns(String) }
<add> def self.uname
<add> @uname ||= Utils.safe_popen_read("uname").chomp
<add> end
<add>
<ide> ::OS_VERSION = ENV["HOMEBREW_OS_VERSION"]
<ide>
<ide> CI_GLIBC_VERSION = "2.23" | 1 |
Java | Java | remove soft error when creating preallocated view | db21584ba01ee9eda7f36b0e7ba13839b5fb1a47 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/mounting/SurfaceMountingManager.java
<ide> import com.facebook.common.logging.FLog;
<ide> import com.facebook.infer.annotation.Assertions;
<ide> import com.facebook.infer.annotation.ThreadConfined;
<del>import com.facebook.react.bridge.ReactNoCrashSoftException;
<ide> import com.facebook.react.bridge.ReactSoftExceptionLogger;
<ide> import com.facebook.react.bridge.ReadableArray;
<ide> import com.facebook.react.bridge.ReadableMap;
<ide> public void createView(
<ide> }
<ide> // We treat this as a perf problem and not a logical error. View Preallocation or unexpected
<ide> // changes to Differ or C++ Binding could cause some redundant Create instructions.
<del> // This is a NoCrash soft exception because we know there are cases where preallocation happens
<del> // and a node is recreated: if a node is preallocated and then committed with revision 2+,
<del> // an extra CREATE instruction will be generated.
<add> // There are cases where preallocation happens and a node is recreated: if a node is
<add> // preallocated and then committed with revision 2+, an extra CREATE instruction will be
<add> // generated.
<ide> // This represents a perf issue only, not a correctness issue. In the future we need to
<ide> // refactor View preallocation to correct the currently incorrect assumptions.
<ide> if (getNullableViewState(reactTag) != null) {
<del> ReactSoftExceptionLogger.logSoftException(
<del> TAG,
<del> new ReactNoCrashSoftException(
<del> "Cannot CREATE view with tag [" + reactTag + "], already exists."));
<ide> return;
<ide> }
<ide> | 1 |
Javascript | Javascript | implement sampled functions based on the pdf spec | 59283bdf6d439fdcd1a0ab07b318b48031091b34 | <ide><path>src/function.js
<ide> var PDFFunction = (function PDFFunctionClosure() {
<ide> else
<ide> decode = toMultiArray(decode);
<ide>
<del> // Precalc the multipliers
<del> var inputMul = new Float64Array(inputSize);
<del> for (var i = 0; i < inputSize; ++i) {
<del> inputMul[i] = (encode[i][1] - encode[i][0]) /
<del> (domain[i][1] - domain[i][0]);
<del> }
<del>
<del> var idxMul = new Int32Array(inputSize);
<del> idxMul[0] = outputSize;
<del> for (i = 1; i < inputSize; ++i) {
<del> idxMul[i] = idxMul[i - 1] * size[i - 1];
<del> }
<del>
<del> var nSamples = outputSize;
<del> for (i = 0; i < inputSize; ++i)
<del> nSamples *= size[i];
<del>
<ide> var samples = this.getSampleArray(size, outputSize, bps, str);
<ide>
<ide> return [
<ide> CONSTRUCT_SAMPLED, inputSize, domain, encode, decode, samples, size,
<del> outputSize, bps, range, inputMul, idxMul, nSamples
<add> outputSize, Math.pow(2, bps) - 1, range
<ide> ];
<ide> },
<ide>
<ide> constructSampledFromIR: function pdfFunctionConstructSampledFromIR(IR) {
<del> var inputSize = IR[1];
<del> var domain = IR[2];
<del> var encode = IR[3];
<del> var decode = IR[4];
<del> var samples = IR[5];
<del> var size = IR[6];
<del> var outputSize = IR[7];
<del> var bps = IR[8];
<del> var range = IR[9];
<del> var inputMul = IR[10];
<del> var idxMul = IR[11];
<del> var nSamples = IR[12];
<add> // See chapter 3, page 109 of the PDF reference
<add> function interpolate(x, xmin, xmax, ymin, ymax) {
<add> return ymin + ((x - xmin) * ((ymax - ymin) / (xmax - xmin)));
<add> }
<ide>
<ide> return function constructSampledFromIRResult(args) {
<del> if (inputSize != args.length)
<add> // See chapter 3, page 110 of the PDF reference.
<add> var m = IR[1];
<add> var domain = IR[2];
<add> var encode = IR[3];
<add> var decode = IR[4];
<add> var samples = IR[5];
<add> var size = IR[6];
<add> var n = IR[7];
<add> var mask = IR[8];
<add> var range = IR[9];
<add>
<add> if (m != args.length)
<ide> error('Incorrect number of arguments: ' + inputSize + ' != ' +
<ide> args.length);
<del> // Most of the below is a port of Poppler's implementation.
<del> // TODO: There's a few other ways to do multilinear interpolation such
<del> // as piecewise, which is much faster but an approximation.
<del> var out = new Float64Array(outputSize);
<del> var x;
<del> var e = new Array(inputSize);
<del> var efrac0 = new Float64Array(inputSize);
<del> var efrac1 = new Float64Array(inputSize);
<del> var sBuf = new Float64Array(1 << inputSize);
<del> var i, j, k, idx, t;
<del>
<del> // map input values into sample array
<del> for (i = 0; i < inputSize; ++i) {
<del> x = (args[i] - domain[i][0]) * inputMul[i] + encode[i][0];
<del> if (x < 0) {
<del> x = 0;
<del> } else if (x > size[i] - 1) {
<del> x = size[i] - 1;
<del> }
<del> e[i] = [Math.floor(x), 0];
<del> if ((e[i][1] = e[i][0] + 1) >= size[i]) {
<del> // this happens if in[i] = domain[i][1]
<del> e[i][1] = e[i][0];
<del> }
<del> efrac1[i] = x - e[i][0];
<del> efrac0[i] = 1 - efrac1[i];
<del> }
<ide>
<del> // for each output, do m-linear interpolation
<del> for (i = 0; i < outputSize; ++i) {
<del>
<del> // pull 2^m values out of the sample array
<del> for (j = 0; j < (1 << inputSize); ++j) {
<del> idx = i;
<del> for (k = 0, t = j; k < inputSize; ++k, t >>= 1) {
<del> idx += idxMul[k] * (e[k][t & 1]);
<del> }
<del> if (idx >= 0 && idx < nSamples) {
<del> sBuf[j] = samples[idx];
<del> } else {
<del> sBuf[j] = 0; // TODO Investigate if this is what Adobe does
<del> }
<del> }
<add> var x = args;
<add> var y = new Float64Array(n * m);
<ide>
<del> // do m sets of interpolations
<del> for (j = 0, t = (1 << inputSize); j < inputSize; ++j, t >>= 1) {
<del> for (k = 0; k < t; k += 2) {
<del> sBuf[k >> 1] = efrac0[j] * sBuf[k] + efrac1[j] * sBuf[k + 1];
<del> }
<del> }
<add> // Map x_i to y_j for 0 <= i < m using the sampled function.
<add> for (var i = 0; i < m; ++i) {
<add> // x_i' = min(max(x_i, Domain_2i), Domain_2i+1)
<add> var domain_2i = domain[2 * i];
<add> var domain_2i_1 = domain[2 * i + 1];
<add> var xi = Math.min(Math.max(x[i], domain_2i), domain_2i_1);
<add>
<add> // e_i = Interpolate(x_i', Domain_2i, Domain_2i+1, Encode_2i, Encode_2i+1)
<add> var e = interpolate(xi, domain_2i, domain_2i_1, encode[2 * i], encode[2 * i + 1]);
<add>
<add> // e_i' = min(max(e_i, 0), Size_i - 1)
<add> e = Math.min(Math.max(e, 0), size[i] - 1);
<add>
<add> var in = i * n;
<ide>
<del> // map output value to range
<del> out[i] = (sBuf[0] * (decode[i][1] - decode[i][0]) + decode[i][0]);
<del> if (out[i] < range[i][0]) {
<del> out[i] = range[i][0];
<del> } else if (out[i] > range[i][1]) {
<del> out[i] = range[i][1];
<add> for (var j = 0; j < n; ++j) {
<add> // average the two nearest neighbors in the sampling table
<add> var rj = (samples[Math.floor(e) * n + j] + samples[Math.ceil(e) * n + j]) / 2;
<add>
<add> // r_j' = Interpolate(r_j, 0, 2^BitsPerSample - 1, Decode_2j, Decode_2j+1)
<add> rj = interpolate(rj, 0, mask, 1, decode[2 * j], decode[2 * j + 1]);
<add>
<add> // y_j = min(max(r_j, range_2j, range_2j+1)
<add> y[in + j] = Math.min(Math.max(rj, range[2 * j], range[2 * j + 1]));
<ide> }
<ide> }
<del> return out;
<add>
<add> return y;
<ide> }
<ide> },
<ide> | 1 |
Java | Java | resolve ${} placeholders in @importresource | 1a8f0d6a9e37d12b31a2fe7bc1fbc2c91700a08e | <ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassParser.java
<ide> protected final SourceClass doProcessConfigurationClass(
<ide> String[] resources = importResource.getStringArray("value");
<ide> Class<? extends BeanDefinitionReader> readerClass = importResource.getClass("reader");
<ide> for (String resource : resources) {
<del> configClass.addImportedResource(resource, readerClass);
<add> String resolvedResource = this.environment.resolveRequiredPlaceholders(resource);
<add> configClass.addImportedResource(resolvedResource, readerClass);
<ide> }
<ide> }
<ide>
<ide><path>spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportResourceTests.java
<ide>
<ide> package org.springframework.context.annotation.configuration;
<ide>
<add>import java.util.Collections;
<add>
<ide> import org.aspectj.lang.annotation.Aspect;
<ide> import org.aspectj.lang.annotation.Before;
<add>
<ide> import static org.hamcrest.CoreMatchers.*;
<ide> import static org.junit.Assert.*;
<add>
<ide> import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import org.springframework.tests.sample.beans.TestBean;
<del>
<ide> import org.springframework.aop.support.AopUtils;
<ide> import org.springframework.beans.factory.annotation.Autowired;
<ide> import org.springframework.beans.factory.annotation.Value;
<ide> import org.springframework.context.annotation.Bean;
<ide> import org.springframework.context.annotation.Configuration;
<ide> import org.springframework.context.annotation.ImportResource;
<add>import org.springframework.core.env.MapPropertySource;
<add>import org.springframework.core.env.PropertySource;
<ide>
<ide> /**
<ide> * Integration tests for {@link ImportResource} support.
<ide> public void testImportDifferentResourceTypes() {
<ide> reader=XmlBeanDefinitionReader.class)
<ide> static class SubResourceConfig extends ImportNonXmlResourceConfig {
<ide> }
<add>
<add> @Test
<add> public void importWithPlaceHolder() throws Exception {
<add> AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
<add> PropertySource<?> propertySource = new MapPropertySource("test",
<add> Collections.<String, Object> singletonMap("test", "springframework"));
<add> ctx.getEnvironment().getPropertySources().addFirst(propertySource);
<add> ctx.register(ImportXmlConfig.class);
<add> ctx.refresh();
<add> assertTrue("did not contain xml-declared bean", ctx.containsBean("xmlDeclaredBean"));
<add> }
<add>
<add> @Configuration
<add> @ImportResource("classpath:org/${test}/context/annotation/configuration/ImportXmlConfig-context.xml")
<add> static class ImportWithPlaceHolder {
<add> }
<add>
<add>
<ide> } | 2 |
Ruby | Ruby | convert build_message to sprintf | 9699eeb496d6faff6f7768cbfe0e281ce7ce089c | <ide><path>actionpack/lib/action_controller/test_case.rb
<ide> def assert_template(options = {}, message = nil)
<ide> when NilClass, String, Symbol
<ide> options = options.to_s if Symbol === options
<ide> rendered = @templates
<del> msg = build_message(message,
<del> "expecting <?> but rendering with <?>",
<del> options, rendered.keys.join(', '))
<add> msg = message || sprintf("expecting <%s> but rendering with <%s>",
<add> options, rendered.keys)
<ide> assert_block(msg) do
<ide> if options
<ide> rendered.any? { |t,num| t.match(options) }
<ide> def assert_template(options = {}, message = nil)
<ide> end
<ide> when Hash
<ide> if expected_layout = options[:layout]
<del> msg = build_message(message,
<del> "expecting layout <?> but action rendered <?>",
<add> msg = message || sprintf("expecting layout <%s> but action rendered <%s>",
<ide> expected_layout, @layouts.keys)
<ide>
<ide> case expected_layout
<ide> def assert_template(options = {}, message = nil)
<ide> end
<ide> elsif expected_count = options[:count]
<ide> actual_count = @partials[expected_partial]
<del> msg = build_message(message,
<del> "expecting ? to be rendered ? time(s) but rendered ? time(s)",
<add> msg = message || sprintf("expecting %s to be rendered %s time(s) but rendered %s time(s)",
<ide> expected_partial, expected_count, actual_count)
<ide> assert(actual_count == expected_count.to_i, msg)
<ide> else
<del> msg = build_message(message,
<del> "expecting partial <?> but action rendered <?>",
<add> msg = message || sprintf("expecting partial <%s> but action rendered <%s>",
<ide> options[:partial], @partials.keys)
<ide> assert_includes @partials, expected_partial, msg
<ide> end | 1 |
Text | Text | add installation instructions for importmap-rails | 6a674db689320b6794952f1b3f79f5b083ba3454 | <ide><path>guides/source/working_with_javascript_in_rails.md
<ide> manage your JavaScript dependencies, there is no need to install Node.js or Yarn
<ide> When using import maps, no separate build process is required, just start your server with
<ide> `bin/rails server` and you are good to go.
<ide>
<add>### Installing importmap-rails
<add>
<add>Importmap for Rails is automatically included in Rails 7+ for new applications, but you can also install it manually in existing applications:
<add>
<add>```bash
<add>$ bin/bundle add importmap-rails
<add>```
<add>
<add>Run the install task:
<add>
<add>```bash
<add>$ bin/rails importmap:install
<add>```
<add>
<ide> ### Adding NPM Packages with importmap-rails
<ide>
<ide> To add new packages to your import map-powered application, run the `bin/importmap pin` command | 1 |
Text | Text | add vsemozhetbyt to collaborators | 4895e0c1f0ab49d01da18d6d5d1c7cd938020cad | <ide><path>README.md
<ide> more information about the governance of the Node.js project, see
<ide> **Mike Tunnicliffe** <[email protected]>
<ide> * [vkurchatkin](https://github.com/vkurchatkin) -
<ide> **Vladimir Kurchatkin** <[email protected]>
<add>* [vsemozhetbyt](https://github.com/vsemozhetbyt) -
<add>**Vse Mozhet Byt** <[email protected]> (he/him)
<ide> * [watilde](https://github.com/watilde) -
<ide> **Daijiro Wachi** <[email protected]> (he/him)
<ide> * [whitlockjc](https://github.com/whitlockjc) - | 1 |
Go | Go | add test for keeping same daemon id on upgrade | f923321aae7a961e94c00207cc80e51d410c676d | <ide><path>daemon/config/config.go
<ide> var flatOptions = map[string]bool{
<ide> var skipValidateOptions = map[string]bool{
<ide> "features": true,
<ide> "builder": true,
<add> // Corresponding flag has been removed because it was already unusable
<add> "deprecated-key-path": true,
<ide> }
<ide>
<ide> // skipDuplicates contains configuration keys that
<ide><path>integration/config/config_test.go
<ide> import (
<ide> "bytes"
<ide> "context"
<ide> "encoding/json"
<add> "io/ioutil"
<add> "path/filepath"
<ide> "sort"
<ide> "testing"
<ide> "time"
<ide> import (
<ide> swarmtypes "github.com/docker/docker/api/types/swarm"
<ide> "github.com/docker/docker/client"
<ide> "github.com/docker/docker/integration/internal/swarm"
<add> "github.com/docker/docker/internal/test/daemon"
<ide> "github.com/docker/docker/pkg/stdcopy"
<ide> "gotest.tools/assert"
<ide> is "gotest.tools/assert/cmp"
<ide> func TestConfigCreateResolve(t *testing.T) {
<ide> assert.Assert(t, is.Equal(0, len(entries)))
<ide> }
<ide>
<add>func TestConfigDaemonLibtrustID(t *testing.T) {
<add> skip.If(t, testEnv.DaemonInfo.OSType != "linux")
<add> defer setupTest(t)()
<add>
<add> d := daemon.New(t)
<add> defer d.Stop(t)
<add>
<add> trustKey := filepath.Join(d.RootDir(), "key.json")
<add> err := ioutil.WriteFile(trustKey, []byte(`{"crv":"P-256","d":"dm28PH4Z4EbyUN8L0bPonAciAQa1QJmmyYd876mnypY","kid":"WTJ3:YSIP:CE2E:G6KJ:PSBD:YX2Y:WEYD:M64G:NU2V:XPZV:H2CR:VLUB","kty":"EC","x":"Mh5-JINSjaa_EZdXDttri255Z5fbCEOTQIZjAcScFTk","y":"eUyuAjfxevb07hCCpvi4Zi334Dy4GDWQvEToGEX4exQ"}`), 0644)
<add> assert.NilError(t, err)
<add>
<add> config := filepath.Join(d.RootDir(), "daemon.json")
<add> err = ioutil.WriteFile(config, []byte(`{"deprecated-key-path": "`+trustKey+`"}`), 0644)
<add> assert.NilError(t, err)
<add>
<add> d.Start(t, "--config-file", config)
<add> info := d.Info(t)
<add> assert.Equal(t, info.ID, "WTJ3:YSIP:CE2E:G6KJ:PSBD:YX2Y:WEYD:M64G:NU2V:XPZV:H2CR:VLUB")
<add>}
<add>
<ide> func configNamesFromList(entries []swarmtypes.Config) []string {
<ide> var values []string
<ide> for _, entry := range entries { | 2 |
Python | Python | limit default for get_num_build_jobs() to 8 | 4c05fed01c68a305abf62135695bc61606746683 | <ide><path>numpy/distutils/misc_util.py
<ide> def get_num_build_jobs():
<ide> Get number of parallel build jobs set by the --parallel command line
<ide> argument of setup.py
<ide> If the command did not receive a setting the environment variable
<del> NPY_NUM_BUILD_JOBS checked and if that is unset it returns 1.
<add> NPY_NUM_BUILD_JOBS checked. If that is unset, return the number of
<add> processors on the system, with a maximum of 8 (to prevent
<add> overloading the system if there a lot of CPUs).
<ide>
<ide> Returns
<ide> -------
<ide> def get_num_build_jobs():
<ide> cpu_count = len(os.sched_getaffinity(0))
<ide> except AttributeError:
<ide> cpu_count = multiprocessing.cpu_count()
<add> cpu_count = min(cpu_count, 8)
<ide> envjobs = int(os.environ.get("NPY_NUM_BUILD_JOBS", cpu_count))
<ide> dist = get_distribution()
<ide> # may be None during configuration | 1 |
Ruby | Ruby | fix references to url | d97daa7c1b2203be02c69124bc7b3ae5f2b50f99 | <ide><path>Library/Homebrew/download_strategy.rb
<ide> def repo_valid?
<ide> end
<ide>
<ide> def clone_repo
<del> safe_system hgpath, "clone", url, cached_location
<add> safe_system hgpath, "clone", @url, cached_location
<ide> end
<ide>
<ide> def update
<ide> def repo_valid?
<ide>
<ide> def clone_repo
<ide> # "lightweight" means history-less
<del> safe_system bzrpath, "checkout", "--lightweight", url, cached_location
<add> safe_system bzrpath, "checkout", "--lightweight", @url, cached_location
<ide> end
<ide>
<ide> def update
<ide> def cache_tag
<ide> end
<ide>
<ide> def clone_repo
<del> safe_system fossilpath, "clone", url, cached_location
<add> safe_system fossilpath, "clone", @url, cached_location
<ide> end
<ide>
<ide> def update | 1 |
Text | Text | add note regarding file structure in src/readme.md | da3626adccdbe1ff8d7abf496e2906a91e308de4 | <ide><path>src/README.md
<ide> the [event loop][] and other operation system abstractions to Node.js.
<ide>
<ide> There is a [reference documentation for the libuv API][].
<ide>
<add>## File structure
<add>
<add>The Node.js C++ files follow this structure:
<add>
<add>The `.h` header files contain declarations, and sometimes definitions that don’t
<add>require including other headers (e.g. getters, setters, etc.). They should only
<add>include other `.h` header files and nothing else.
<add>
<add>The `-inl.h` header files contain definitions of inline functions from the
<add>corresponding `.h` header file (e.g. functions marked `inline` in the
<add>declaration or `template` functions). They always include the corresponding
<add>`.h` header file, and can include other `.h` and `-inl.h` header files as
<add>needed. It is not mandatory to split out the definitions from the `.h` file
<add>into an `-inl.h` file, but it becomes necessary when there are multiple
<add>definitions and contents of other `-inl.h` files start being used. Therefore, it
<add>is recommended to split a `-inl.h` file when inline functions become longer than
<add>a few lines to keep the corresponding `.h` file readable and clean. All visible
<add>definitions from the `-inl.h` file should be declared in the corresponding `.h`
<add>header file.
<add>
<add>The `.cc` files contain definitions of non-inline functions from the
<add>corresponding `.h` header file. They always include the corresponding `.h`
<add>header file, and can include other `.h` and `-inl.h` header files as needed.
<add>
<ide> ## Helpful concepts
<ide>
<ide> A number of concepts are involved in putting together Node.js on top of V8 and | 1 |
PHP | PHP | update compiled classes | b622dc2182b246a4a3c1f4e65bff686e655ce46c | <ide><path>src/Illuminate/Foundation/Console/Optimize/config.php
<ide> $basePath.'/vendor/symfony/http-foundation/Symfony/Component/HttpFoundation/Session/Attribute/AttributeBag.php',
<ide> $basePath.'/vendor/symfony/http-foundation/Symfony/Component/HttpFoundation/Session/Flash/FlashBagInterface.php',
<ide> $basePath.'/vendor/symfony/http-foundation/Symfony/Component/HttpFoundation/Session/Flash/AutoExpireFlashBag.php',
<add> $basePath.'/vendor/laravel/framework/src/Illuminate/Session/FlashBag.php',
<ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Support/ServiceProvider.php',
<ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Exception/ExceptionServiceProvider.php',
<ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Routing/RoutingServiceProvider.php', | 1 |
Python | Python | apply emoticon exceptions to tokenizer | 0d07d7fc80b6b1a16a164326b1b144962c743dca | <ide><path>spacy/en/language_data.py
<ide> from __future__ import unicode_literals
<ide> import re
<ide> from ..symbols import *
<add>from ..language_data import EMOTICONS
<ide>
<ide>
<ide> PRON_LEMMA = "-PRON-"
<ide> "z."
<ide> ]
<ide>
<del>overlap = set(TOKENIZER_EXCEPTIONS.keys()).intersection(set(self_map))
<del>assert not overlap, overlap
<del>TOKENIZER_EXCEPTIONS.update({orth: [{ORTH: orth}] for orth in self_map})
<add>for orths in [self_map, EMOTICONS]:
<add> overlap = set(TOKENIZER_EXCEPTIONS.keys()).intersection(set(orths))
<add> assert not overlap, overlap
<add> TOKENIZER_EXCEPTIONS.update({orth: [{ORTH: orth}] for orth in orths})
<ide>
<ide>
<ide> TOKENIZER_PREFIXES = r'''
<ide><path>spacy/language_data/__init__.py
<add>from .emoticons import * | 2 |
Go | Go | update restrictions for better handling of mounts | f5139233b930e436707a65cc032aa2952edd6e4a | <ide><path>daemon/execdriver/lxc/driver.go
<ide> package lxc
<ide>
<ide> import (
<ide> "fmt"
<del> "github.com/dotcloud/docker/daemon/execdriver"
<del> "github.com/dotcloud/docker/pkg/cgroups"
<del> "github.com/dotcloud/docker/pkg/label"
<del> "github.com/dotcloud/docker/pkg/libcontainer/security/restrict"
<del> "github.com/dotcloud/docker/pkg/system"
<del> "github.com/dotcloud/docker/utils"
<ide> "io/ioutil"
<ide> "log"
<ide> "os"
<ide> import (
<ide> "strings"
<ide> "syscall"
<ide> "time"
<add>
<add> "github.com/dotcloud/docker/daemon/execdriver"
<add> "github.com/dotcloud/docker/pkg/cgroups"
<add> "github.com/dotcloud/docker/pkg/label"
<add> "github.com/dotcloud/docker/pkg/libcontainer/security/restrict"
<add> "github.com/dotcloud/docker/pkg/system"
<add> "github.com/dotcloud/docker/utils"
<ide> )
<ide>
<ide> const DriverName = "lxc"
<ide> func init() {
<ide> if err := setupEnv(args); err != nil {
<ide> return err
<ide> }
<del>
<ide> if err := setupHostname(args); err != nil {
<ide> return err
<ide> }
<del>
<ide> if err := setupNetworking(args); err != nil {
<ide> return err
<ide> }
<del>
<del> if err := restrict.Restrict("/", "/empty"); err != nil {
<del> return err
<add> if !args.Privileged {
<add> if err := restrict.Restrict(); err != nil {
<add> return err
<add> }
<ide> }
<del>
<ide> if err := setupCapabilities(args); err != nil {
<ide> return err
<ide> }
<del>
<ide> if err := setupWorkingDirectory(args); err != nil {
<ide> return err
<ide> }
<del>
<ide> if err := system.CloseFdsFrom(3); err != nil {
<ide> return err
<ide> }
<del>
<ide> if err := changeUser(args); err != nil {
<ide> return err
<ide> }
<ide> func init() {
<ide> }
<ide>
<ide> type driver struct {
<del> root string // root path for the driver to use
<del> apparmor bool
<del> sharedRoot bool
<del> restrictionPath string
<add> root string // root path for the driver to use
<add> apparmor bool
<add> sharedRoot bool
<ide> }
<ide>
<ide> func NewDriver(root string, apparmor bool) (*driver, error) {
<ide> // setup unconfined symlink
<ide> if err := linkLxcStart(root); err != nil {
<ide> return nil, err
<ide> }
<del> restrictionPath := filepath.Join(root, "empty")
<del> if err := os.MkdirAll(restrictionPath, 0700); err != nil {
<del> return nil, err
<del> }
<ide> return &driver{
<del> apparmor: apparmor,
<del> root: root,
<del> sharedRoot: rootIsShared(),
<del> restrictionPath: restrictionPath,
<add> apparmor: apparmor,
<add> root: root,
<add> sharedRoot: rootIsShared(),
<ide> }, nil
<ide> }
<ide>
<ide> func (d *driver) generateLXCConfig(c *execdriver.Command) (string, error) {
<ide>
<ide> if err := LxcTemplateCompiled.Execute(fo, struct {
<ide> *execdriver.Command
<del> AppArmor bool
<del> ProcessLabel string
<del> MountLabel string
<del> RestrictionSource string
<add> AppArmor bool
<add> ProcessLabel string
<add> MountLabel string
<ide> }{
<del> Command: c,
<del> AppArmor: d.apparmor,
<del> ProcessLabel: process,
<del> MountLabel: mount,
<del> RestrictionSource: d.restrictionPath,
<add> Command: c,
<add> AppArmor: d.apparmor,
<add> ProcessLabel: process,
<add> MountLabel: mount,
<ide> }); err != nil {
<ide> return "", err
<ide> }
<ide><path>daemon/execdriver/lxc/lxc_template.go
<ide> package lxc
<ide>
<ide> import (
<del> "github.com/dotcloud/docker/daemon/execdriver"
<del> "github.com/dotcloud/docker/pkg/label"
<ide> "strings"
<ide> "text/template"
<add>
<add> "github.com/dotcloud/docker/daemon/execdriver"
<add> "github.com/dotcloud/docker/pkg/label"
<ide> )
<ide>
<ide> const LxcTemplate = `
<ide> lxc.aa_profile = unconfined
<ide> {{else}}
<ide> # Let AppArmor normal confinement take place (i.e., not unconfined)
<ide> {{end}}
<del>{{else}}
<del># Restrict access to some stuff in /proc. Note that /proc is already mounted
<del># read-only, so we don't need to bother about things that are just dangerous
<del># to write to (like sysrq-trigger). Also, recent kernels won't let a container
<del># peek into /proc/kcore, but let's cater for people who might run Docker on
<del># older kernels. Just in case.
<del>lxc.mount.entry = {{escapeFstabSpaces $ROOTFS}}/dev/null {{escapeFstabSpaces $ROOTFS}}/proc/kcore none bind,ro 0 0
<ide> {{end}}
<ide>
<ide> # limits
<ide><path>daemon/execdriver/native/create.go
<ide> func (d *driver) createContainer(c *execdriver.Command) (*libcontainer.Container
<ide> container.Cgroups.Name = c.ID
<ide> // check to see if we are running in ramdisk to disable pivot root
<ide> container.NoPivotRoot = os.Getenv("DOCKER_RAMDISK") != ""
<del> container.Context["restriction_path"] = d.restrictionPath
<add> container.Context["restrictions"] = "true"
<ide>
<ide> if err := d.createNetwork(container, c); err != nil {
<ide> return nil, err
<ide> func (d *driver) setPrivileged(container *libcontainer.Container) error {
<ide> }
<ide> container.Cgroups.DeviceAccess = true
<ide>
<del> delete(container.Context, "restriction_path")
<add> delete(container.Context, "restrictions")
<ide>
<ide> if apparmor.IsEnabled() {
<ide> container.Context["apparmor_profile"] = "unconfined"
<ide><path>daemon/execdriver/native/driver.go
<ide> type driver struct {
<ide> root string
<ide> initPath string
<ide> activeContainers map[string]*exec.Cmd
<del> restrictionPath string
<ide> }
<ide>
<ide> func NewDriver(root, initPath string) (*driver, error) {
<ide> func NewDriver(root, initPath string) (*driver, error) {
<ide> if err := apparmor.InstallDefaultProfile(filepath.Join(root, "../..", BackupApparmorProfilePath)); err != nil {
<ide> return nil, err
<ide> }
<del> restrictionPath := filepath.Join(root, "empty")
<del> if err := os.MkdirAll(restrictionPath, 0700); err != nil {
<del> return nil, err
<del> }
<del>
<ide> return &driver{
<ide> root: root,
<del> restrictionPath: restrictionPath,
<ide> initPath: initPath,
<ide> activeContainers: make(map[string]*exec.Cmd),
<ide> }, nil
<ide><path>pkg/libcontainer/mount/init.go
<ide> func newSystemMounts(rootfs, mountLabel string, mounts libcontainer.Mounts) []mo
<ide> systemMounts := []mount{
<ide> {source: "proc", path: filepath.Join(rootfs, "proc"), device: "proc", flags: defaultMountFlags},
<ide> {source: "sysfs", path: filepath.Join(rootfs, "sys"), device: "sysfs", flags: defaultMountFlags},
<add> {source: "shm", path: filepath.Join(rootfs, "dev", "shm"), device: "tmpfs", flags: defaultMountFlags, data: label.FormatMountLabel("mode=1777,size=65536k", mountLabel)},
<add> {source: "devpts", path: filepath.Join(rootfs, "dev", "pts"), device: "devpts", flags: syscall.MS_NOSUID | syscall.MS_NOEXEC, data: label.FormatMountLabel("newinstance,ptmxmode=0666,mode=620,gid=5", mountLabel)},
<ide> }
<ide>
<ide> if len(mounts.OfType("devtmpfs")) == 1 {
<ide> systemMounts = append(systemMounts, mount{source: "tmpfs", path: filepath.Join(rootfs, "dev"), device: "tmpfs", flags: syscall.MS_NOSUID | syscall.MS_STRICTATIME, data: label.FormatMountLabel("mode=755", mountLabel)})
<ide> }
<del> systemMounts = append(systemMounts,
<del> mount{source: "shm", path: filepath.Join(rootfs, "dev", "shm"), device: "tmpfs", flags: defaultMountFlags, data: label.FormatMountLabel("mode=1777,size=65536k", mountLabel)},
<del> mount{source: "devpts", path: filepath.Join(rootfs, "dev", "pts"), device: "devpts", flags: syscall.MS_NOSUID | syscall.MS_NOEXEC, data: label.FormatMountLabel("newinstance,ptmxmode=0666,mode=620,gid=5", mountLabel)},
<del> )
<del>
<ide> return systemMounts
<ide> }
<ide><path>pkg/libcontainer/nsinit/init.go
<ide> func Init(container *libcontainer.Container, uncleanRootfs, consolePath string,
<ide>
<ide> runtime.LockOSThread()
<ide>
<del> if restrictionPath := container.Context["restriction_path"]; restrictionPath != "" {
<del> if err := restrict.Restrict("/", restrictionPath); err != nil {
<add> if container.Context["restrictions"] != "" {
<add> if err := restrict.Restrict(); err != nil {
<ide> return err
<ide> }
<ide> }
<ide><path>pkg/libcontainer/security/restrict/restrict.go
<ide> import (
<ide> "github.com/dotcloud/docker/pkg/system"
<ide> )
<ide>
<del>// "restrictions" are container paths (files, directories, whatever) that have to be masked.
<del>// maskPath is a "safe" path to be mounted over maskedPath. It can take two special values:
<del>// - if it is "", then nothing is mounted;
<del>// - if it is "EMPTY", then an empty directory is mounted instead.
<del>// If remountRO is true then the maskedPath is remounted read-only (regardless of whether a maskPath was used).
<del>type restriction struct {
<del> maskedPath string
<del> maskPath string
<del> remountRO bool
<del>}
<del>
<del>var restrictions = []restriction{
<del> {"/proc", "", true},
<del> {"/sys", "", true},
<del> {"/proc/kcore", "/dev/null", false},
<del>}
<del>
<ide> // This has to be called while the container still has CAP_SYS_ADMIN (to be able to perform mounts).
<ide> // However, afterwards, CAP_SYS_ADMIN should be dropped (otherwise the user will be able to revert those changes).
<del>// "empty" should be the path to an empty directory.
<del>func Restrict(rootfs, empty string) error {
<del> for _, restriction := range restrictions {
<del> dest := filepath.Join(rootfs, restriction.maskedPath)
<del> if restriction.maskPath != "" {
<del> var source string
<del> if restriction.maskPath == "EMPTY" {
<del> source = empty
<del> } else {
<del> source = filepath.Join(rootfs, restriction.maskPath)
<del> }
<del> if err := system.Mount(source, dest, "", syscall.MS_BIND, ""); err != nil {
<del> return fmt.Errorf("unable to bind-mount %s over %s: %s", source, dest, err)
<del> }
<del> }
<del> if restriction.remountRO {
<del> if err := system.Mount("", dest, "", syscall.MS_REMOUNT|syscall.MS_RDONLY, ""); err != nil {
<del> return fmt.Errorf("unable to remount %s readonly: %s", dest, err)
<del> }
<add>func Restrict() error {
<add> // remount proc and sys as readonly
<add> for _, dest := range []string{"proc", "sys"} {
<add> if err := system.Mount("", dest, "", syscall.MS_REMOUNT|syscall.MS_RDONLY, ""); err != nil {
<add> return fmt.Errorf("unable to remount %s readonly: %s", dest, err)
<ide> }
<ide> }
<ide>
<add> if err := system.Mount("/proc/kcore", "/dev/null", "", syscall.MS_BIND, ""); err != nil {
<add> return fmt.Errorf("unable to bind-mount /dev/null over /proc/kcore")
<add> }
<add>
<ide> // This weird trick will allow us to mount /proc read-only, while being able to use AppArmor.
<ide> // This is because apparently, loading an AppArmor profile requires write access to /proc/1/attr.
<ide> // So we do another mount of procfs, ensure it's write-able, and bind-mount a subset of it.
<del> tmpProcPath := filepath.Join(rootfs, ".proc")
<del> if err := os.Mkdir(tmpProcPath, 0700); err != nil {
<del> return fmt.Errorf("unable to create temporary proc mountpoint %s: %s", tmpProcPath, err)
<add> var (
<add> rwAttrPath = filepath.Join(".proc", "1", "attr")
<add> roAttrPath = filepath.Join("proc", "1", "attr")
<add> )
<add>
<add> if err := os.Mkdir(".proc", 0700); err != nil {
<add> return fmt.Errorf("unable to create temporary proc mountpoint .proc: %s", err)
<ide> }
<del> if err := system.Mount("proc", tmpProcPath, "proc", 0, ""); err != nil {
<add> if err := system.Mount("proc", ".proc", "proc", 0, ""); err != nil {
<ide> return fmt.Errorf("unable to mount proc on temporary proc mountpoint: %s", err)
<ide> }
<del> if err := system.Mount("proc", tmpProcPath, "", syscall.MS_REMOUNT, ""); err != nil {
<add> if err := system.Mount("proc", ".proc", "", syscall.MS_REMOUNT, ""); err != nil {
<ide> return fmt.Errorf("unable to remount proc read-write: %s", err)
<ide> }
<del> rwAttrPath := filepath.Join(rootfs, ".proc", "1", "attr")
<del> roAttrPath := filepath.Join(rootfs, "proc", "1", "attr")
<ide> if err := system.Mount(rwAttrPath, roAttrPath, "", syscall.MS_BIND, ""); err != nil {
<ide> return fmt.Errorf("unable to bind-mount %s on %s: %s", rwAttrPath, roAttrPath, err)
<ide> }
<del> if err := system.Unmount(tmpProcPath, 0); err != nil {
<add> if err := system.Unmount(".proc", 0); err != nil {
<ide> return fmt.Errorf("unable to unmount temporary proc filesystem: %s", err)
<ide> }
<del> return nil
<add> return os.RemoveAll(".proc")
<ide> }
<ide><path>pkg/libcontainer/security/restrict/unsupported.go
<ide> package restrict
<ide>
<ide> import "fmt"
<ide>
<del>func Restrict(rootfs, empty string) error {
<add>func Restrict() error {
<ide> return fmt.Errorf("not supported")
<ide> } | 8 |
Javascript | Javascript | ignore limit when invalid | a3c3bf3332e5685dc319c46faef882cb6ac246e1 | <ide><path>src/ng/filter/limitTo.js
<ide> * @param {string|number} limit The length of the returned array or string. If the `limit` number
<ide> * is positive, `limit` number of items from the beginning of the source array/string are copied.
<ide> * If the number is negative, `limit` number of items from the end of the source array/string
<del> * are copied. The `limit` will be trimmed if it exceeds `array.length`
<add> * are copied. The `limit` will be trimmed if it exceeds `array.length`. If `limit` is undefined,
<add> * the input will be returned unchanged.
<ide> * @returns {Array|string} A new sub-array or substring of length `limit` or less if input array
<ide> * had less than `limit` elements.
<ide> *
<ide> */
<ide> function limitToFilter() {
<ide> return function(input, limit) {
<del> if (isNumber(input)) input = input.toString();
<del> if (!isArray(input) && !isString(input)) return input;
<del>
<ide> if (Math.abs(Number(limit)) === Infinity) {
<ide> limit = Number(limit);
<ide> } else {
<ide> limit = int(limit);
<ide> }
<add> if (isNaN(limit)) return input;
<ide>
<del> //NaN check on limit
<del> if (limit) {
<del> return limit > 0 ? input.slice(0, limit) : input.slice(limit);
<del> } else {
<del> return isString(input) ? "" : [];
<del> }
<add> if (isNumber(input)) input = input.toString();
<add> if (!isArray(input) && !isString(input)) return input;
<add>
<add> return limit >= 0 ? input.slice(0, limit) : input.slice(limit);
<ide> };
<ide> }
<ide><path>test/ng/filter/limitToSpec.js
<ide> describe('Filter: limitTo', function() {
<ide> });
<ide>
<ide>
<del> it('should return an empty array when X cannot be parsed', function() {
<del> expect(limitTo(items, 'bogus')).toEqual([]);
<del> expect(limitTo(items, 'null')).toEqual([]);
<del> expect(limitTo(items, 'undefined')).toEqual([]);
<del> expect(limitTo(items, null)).toEqual([]);
<del> expect(limitTo(items, undefined)).toEqual([]);
<add> it('should return an empty array when X = 0', function() {
<add> expect(limitTo(items, 0)).toEqual([]);
<add> expect(limitTo(items, '0')).toEqual([]);
<ide> });
<ide>
<del> it('should return an empty string when X cannot be parsed', function() {
<del> expect(limitTo(str, 'bogus')).toEqual("");
<del> expect(limitTo(str, 'null')).toEqual("");
<del> expect(limitTo(str, 'undefined')).toEqual("");
<del> expect(limitTo(str, null)).toEqual("");
<del> expect(limitTo(str, undefined)).toEqual("");
<add> it('should return entire array when X cannot be parsed', function() {
<add> expect(limitTo(items, 'bogus')).toEqual(items);
<add> expect(limitTo(items, 'null')).toEqual(items);
<add> expect(limitTo(items, 'undefined')).toEqual(items);
<add> expect(limitTo(items, null)).toEqual(items);
<add> expect(limitTo(items, undefined)).toEqual(items);
<add> });
<add>
<add> it('should return an empty string when X = 0', function() {
<add> expect(limitTo(str, 0)).toEqual("");
<add> expect(limitTo(str, '0')).toEqual("");
<add> });
<add>
<add> it('should return entire string when X cannot be parsed', function() {
<add> expect(limitTo(str, 'bogus')).toEqual(str);
<add> expect(limitTo(str, 'null')).toEqual(str);
<add> expect(limitTo(str, 'undefined')).toEqual(str);
<add> expect(limitTo(str, null)).toEqual(str);
<add> expect(limitTo(str, undefined)).toEqual(str);
<ide> });
<ide>
<ide> | 2 |
Ruby | Ruby | use helper methods | 7ac337181a3abbecc99c36d3702e7a0662117584 | <ide><path>Library/Homebrew/diagnostic.rb
<ide> def check_homebrew_prefix
<ide> end
<ide>
<ide> def check_deleted_formula
<del> formulae = Dir.children(HOMEBREW_CELLAR)
<del> formulae.delete(".keepme")
<add> kegs = Keg.all
<ide> deleted_formulae = []
<del> formulae.each do |f|
<del> Formula[f]
<add> kegs.each do |keg|
<add> keg.to_formula
<ide> rescue
<del> deleted_formulae << f
<add> deleted_formulae << keg.name
<ide> end
<ide> return if deleted_formulae.blank?
<ide>
<ide> message = <<~EOS
<del> Some installed formulae are deleted.
<add> Some installed formulae were deleted!
<ide> You should find replacements for the following formulae:
<del> #{deleted_formulae.*"\n "}
<add> #{deleted_formulae.join("\n ")}
<ide> EOS
<ide> message
<ide> end | 1 |
Mixed | Go | use the new error package | 628b9a41b09fde3ce1493f7d4f1495b9afaa506c | <ide><path>api/errors/README.md
<add>Docker 'errors' package
<add>=======================
<add>
<add>This package contains all of the error messages generated by the Docker
<add>engine that might be exposed via the Docker engine's REST API.
<add>
<add>Each top-level engine package will have its own file in this directory
<add>so that there's a clear grouping of errors, instead of just one big
<add>file. The errors for each package are defined here instead of within
<add>their respective package structure so that Docker CLI code that may need
<add>to import these error definition files will not need to know or understand
<add>the engine's package/directory structure. In other words, all they should
<add>need to do is import `.../docker/api/errors` and they will automatically
<add>pick up all Docker engine defined errors. This also gives the engine
<add>developers the freedom to change the engine packaging structure (e.g. to
<add>CRUD packages) without worrying about breaking existing clients.
<add>
<add>These errors are defined using the 'errcode' package. The `errcode` package
<add>allows for each error to be typed and include all information necessary to
<add>have further processing done on them if necessary. In particular, each error
<add>includes:
<add>
<add>* Value - a unique string (in all caps) associated with this error.
<add>Typically, this string is the same name as the variable name of the error
<add>(w/o the `ErrorCode` text) but in all caps.
<add>
<add>* Message - the human readable sentence that will be displayed for this
<add>error. It can contain '%s' substitutions that allows for the code generating
<add>the error to specify values that will be inserted in the string prior to
<add>being displayed to the end-user. The `WithArgs()` function can be used to
<add>specify the insertion strings. Note, the evaluation of the strings will be
<add>done at the time `WithArgs()` is called.
<add>
<add>* Description - additional human readable text to further explain the
<add>circumstances of the error situation.
<add>
<add>* HTTPStatusCode - when the error is returned back to a CLI, this value
<add>will be used to populate the HTTP status code. If not present the default
<add>value will be `StatusInternalServerError`, 500.
<add>
<add>Not all errors generated within the engine's executable will be propagated
<add>back to the engine's API layer. For example, it is expected that errors
<add>generated by vendored code (under `docker/vendor`) and packaged code
<add>(under `docker/pkg`) will be converted into errors defined by this package.
<add>
<add>When processing an errcode error, if you are looking for a particular
<add>error then you can do something like:
<add>
<add>```
<add>import derr "github.com/docker/docker/api/errors"
<add>
<add>...
<add>
<add>err := someFunc()
<add>if err.ErrorCode() == derr.ErrorCodeNoSuchContainer {
<add> ...
<add>}
<add>```
<ide><path>api/errors/builder.go
<add>package errors
<add>
<add>// This file contains all of the errors that can be generated from the
<add>// docker/builder component.
<add>
<add>import (
<add> "net/http"
<add>
<add> "github.com/docker/distribution/registry/api/errcode"
<add>)
<add>
<add>var (
<add> // ErrorCodeAtLeastOneArg is generated when the parser comes across a
<add> // Dockerfile command that doesn't have any args.
<add> ErrorCodeAtLeastOneArg = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "ATLEASTONEARG",
<add> Message: "%s requires at least one argument",
<add> Description: "The specified command requires at least one argument",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeExactlyOneArg is generated when the parser comes across a
<add> // Dockerfile command that requires exactly one arg but got less/more.
<add> ErrorCodeExactlyOneArg = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "EXACTLYONEARG",
<add> Message: "%s requires exactly one argument",
<add> Description: "The specified command requires exactly one argument",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeAtLeastTwoArgs is generated when the parser comes across a
<add> // Dockerfile command that requires at least two args but got less.
<add> ErrorCodeAtLeastTwoArgs = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "ATLEASTTWOARGS",
<add> Message: "%s requires at least two arguments",
<add> Description: "The specified command requires at least two arguments",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeTooManyArgs is generated when the parser comes across a
<add> // Dockerfile command that has more args than it should
<add> ErrorCodeTooManyArgs = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "TOOMANYARGS",
<add> Message: "Bad input to %s, too many args",
<add> Description: "The specified command was passed too many arguments",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeChainOnBuild is generated when the parser comes across a
<add> // Dockerfile command that is trying to chain ONBUILD commands.
<add> ErrorCodeChainOnBuild = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "CHAINONBUILD",
<add> Message: "Chaining ONBUILD via `ONBUILD ONBUILD` isn't allowed",
<add> Description: "ONBUILD Dockerfile commands aren't allow on ONBUILD commands",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeBadOnBuildCmd is generated when the parser comes across a
<add> // an ONBUILD Dockerfile command with an invalid trigger/command.
<add> ErrorCodeBadOnBuildCmd = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "BADONBUILDCMD",
<add> Message: "%s isn't allowed as an ONBUILD trigger",
<add> Description: "The specified ONBUILD command isn't allowed",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeMissingFrom is generated when the Dockerfile is missing
<add> // a FROM command.
<add> ErrorCodeMissingFrom = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "MISSINGFROM",
<add> Message: "Please provide a source image with `from` prior to run",
<add> Description: "The Dockerfile is missing a FROM command",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeNotOnWindows is generated when the specified Dockerfile
<add> // command is not supported on Windows.
<add> ErrorCodeNotOnWindows = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "NOTONWINDOWS",
<add> Message: "%s is not supported on Windows",
<add> Description: "The specified Dockerfile command is not supported on Windows",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeVolumeEmpty is generated when the specified Volume string
<add> // is empty.
<add> ErrorCodeVolumeEmpty = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "VOLUMEEMPTY",
<add> Message: "Volume specified can not be an empty string",
<add> Description: "The specified volume can not be an empty string",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>)
<ide><path>api/errors/daemon.go
<add>package errors
<add>
<add>// This file contains all of the errors that can be generated from the
<add>// docker/daemon component.
<add>
<add>import (
<add> "net/http"
<add>
<add> "github.com/docker/distribution/registry/api/errcode"
<add>)
<add>
<add>var (
<add> // ErrorCodeNoSuchContainer is generated when we look for a container by
<add> // name or ID and we can't find it.
<add> ErrorCodeNoSuchContainer = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "NOSUCHCONTAINER",
<add> Message: "no such id: %s",
<add> Description: "The specified container can not be found",
<add> HTTPStatusCode: http.StatusNotFound,
<add> })
<add>)
<ide><path>api/errors/error.go
<add>package errors
<add>
<add>// This file contains all of the errors that can be generated from the
<add>// docker engine but are not tied to any specific top-level component.
<add>
<add>const errGroup = "engine"
<ide><path>api/server/image.go
<ide> package server
<ide> import (
<ide> "encoding/base64"
<ide> "encoding/json"
<add> "errors"
<ide> "fmt"
<ide> "io"
<ide> "net/http"
<ide> func (s *Server) postBuild(version version.Version, w http.ResponseWriter, r *ht
<ide> return err
<ide> }
<ide> sf := streamformatter.NewJSONStreamFormatter()
<del> w.Write(sf.FormatError(err))
<add> w.Write(sf.FormatError(errors.New(utils.GetErrorMessage(err))))
<ide> }
<ide> return nil
<ide> }
<ide><path>api/server/server.go
<ide> import (
<ide> "github.com/gorilla/mux"
<ide>
<ide> "github.com/Sirupsen/logrus"
<add> "github.com/docker/distribution/registry/api/errcode"
<ide> "github.com/docker/docker/api"
<ide> "github.com/docker/docker/autogen/dockerversion"
<ide> "github.com/docker/docker/daemon"
<ide> func httpError(w http.ResponseWriter, err error) {
<ide> logrus.WithFields(logrus.Fields{"error": err, "writer": w}).Error("unexpected HTTP error handling")
<ide> return
<ide> }
<add>
<ide> statusCode := http.StatusInternalServerError
<del> // FIXME: this is brittle and should not be necessary.
<del> // If we need to differentiate between different possible error types, we should
<del> // create appropriate error types with clearly defined meaning.
<del> errStr := strings.ToLower(err.Error())
<del> for keyword, status := range map[string]int{
<del> "not found": http.StatusNotFound,
<del> "no such": http.StatusNotFound,
<del> "bad parameter": http.StatusBadRequest,
<del> "conflict": http.StatusConflict,
<del> "impossible": http.StatusNotAcceptable,
<del> "wrong login/password": http.StatusUnauthorized,
<del> "hasn't been activated": http.StatusForbidden,
<del> } {
<del> if strings.Contains(errStr, keyword) {
<del> statusCode = status
<del> break
<add> errMsg := err.Error()
<add>
<add> // Based on the type of error we get we need to process things
<add> // slightly differently to extract the error message.
<add> // In the 'errcode.*' cases there are two different type of
<add> // error that could be returned. errocode.ErrorCode is the base
<add> // type of error object - it is just an 'int' that can then be
<add> // used as the look-up key to find the message. errorcode.Error
<add> // extends errorcode.Error by adding error-instance specific
<add> // data, like 'details' or variable strings to be inserted into
<add> // the message.
<add> //
<add> // Ideally, we should just be able to call err.Error() for all
<add> // cases but the errcode package doesn't support that yet.
<add> //
<add> // Additionally, in both errcode cases, there might be an http
<add> // status code associated with it, and if so use it.
<add> switch err.(type) {
<add> case errcode.ErrorCode:
<add> daError, _ := err.(errcode.ErrorCode)
<add> statusCode = daError.Descriptor().HTTPStatusCode
<add> errMsg = daError.Message()
<add>
<add> case errcode.Error:
<add> // For reference, if you're looking for a particular error
<add> // then you can do something like :
<add> // import ( derr "github.com/docker/docker/api/errors" )
<add> // if daError.ErrorCode() == derr.ErrorCodeNoSuchContainer { ... }
<add>
<add> daError, _ := err.(errcode.Error)
<add> statusCode = daError.ErrorCode().Descriptor().HTTPStatusCode
<add> errMsg = daError.Message
<add>
<add> default:
<add> // This part of will be removed once we've
<add> // converted everything over to use the errcode package
<add>
<add> // FIXME: this is brittle and should not be necessary.
<add> // If we need to differentiate between different possible error types,
<add> // we should create appropriate error types with clearly defined meaning
<add> errStr := strings.ToLower(err.Error())
<add> for keyword, status := range map[string]int{
<add> "not found": http.StatusNotFound,
<add> "no such": http.StatusNotFound,
<add> "bad parameter": http.StatusBadRequest,
<add> "conflict": http.StatusConflict,
<add> "impossible": http.StatusNotAcceptable,
<add> "wrong login/password": http.StatusUnauthorized,
<add> "hasn't been activated": http.StatusForbidden,
<add> } {
<add> if strings.Contains(errStr, keyword) {
<add> statusCode = status
<add> break
<add> }
<ide> }
<ide> }
<ide>
<add> if statusCode == 0 {
<add> statusCode = http.StatusInternalServerError
<add> }
<add>
<ide> logrus.WithFields(logrus.Fields{"statusCode": statusCode, "err": err}).Error("HTTP Error")
<del> http.Error(w, err.Error(), statusCode)
<add> http.Error(w, errMsg, statusCode)
<ide> }
<ide>
<ide> // writeJSON writes the value v to the http response stream as json with standard
<ide><path>builder/dispatchers.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/Sirupsen/logrus"
<add> derr "github.com/docker/docker/api/errors"
<ide> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/pkg/nat"
<ide> "github.com/docker/docker/runconfig"
<ide> func nullDispatch(b *builder, args []string, attributes map[string]bool, origina
<ide> //
<ide> func env(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) == 0 {
<del> return fmt.Errorf("ENV requires at least one argument")
<add> return derr.ErrorCodeAtLeastOneArg.WithArgs("ENV")
<ide> }
<ide>
<ide> if len(args)%2 != 0 {
<ide> // should never get here, but just in case
<del> return fmt.Errorf("Bad input to ENV, too many args")
<add> return derr.ErrorCodeTooManyArgs.WithArgs("ENV")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func env(b *builder, args []string, attributes map[string]bool, original string)
<ide> // Sets the maintainer metadata.
<ide> func maintainer(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) != 1 {
<del> return fmt.Errorf("MAINTAINER requires exactly one argument")
<add> return derr.ErrorCodeExactlyOneArg.WithArgs("MAINTAINER")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func maintainer(b *builder, args []string, attributes map[string]bool, original
<ide> //
<ide> func label(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) == 0 {
<del> return fmt.Errorf("LABEL requires at least one argument")
<add> return derr.ErrorCodeAtLeastOneArg.WithArgs("LABEL")
<ide> }
<ide> if len(args)%2 != 0 {
<ide> // should never get here, but just in case
<del> return fmt.Errorf("Bad input to LABEL, too many args")
<add> return derr.ErrorCodeTooManyArgs.WithArgs("LABEL")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func label(b *builder, args []string, attributes map[string]bool, original strin
<ide> //
<ide> func add(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) < 2 {
<del> return fmt.Errorf("ADD requires at least two arguments")
<add> return derr.ErrorCodeAtLeastTwoArgs.WithArgs("ADD")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func add(b *builder, args []string, attributes map[string]bool, original string)
<ide> //
<ide> func dispatchCopy(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) < 2 {
<del> return fmt.Errorf("COPY requires at least two arguments")
<add> return derr.ErrorCodeAtLeastTwoArgs.WithArgs("COPY")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func dispatchCopy(b *builder, args []string, attributes map[string]bool, origina
<ide> //
<ide> func from(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) != 1 {
<del> return fmt.Errorf("FROM requires one argument")
<add> return derr.ErrorCodeExactlyOneArg.WithArgs("FROM")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func from(b *builder, args []string, attributes map[string]bool, original string
<ide> //
<ide> func onbuild(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) == 0 {
<del> return fmt.Errorf("ONBUILD requires at least one argument")
<add> return derr.ErrorCodeAtLeastOneArg.WithArgs("ONBUILD")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func onbuild(b *builder, args []string, attributes map[string]bool, original str
<ide> triggerInstruction := strings.ToUpper(strings.TrimSpace(args[0]))
<ide> switch triggerInstruction {
<ide> case "ONBUILD":
<del> return fmt.Errorf("Chaining ONBUILD via `ONBUILD ONBUILD` isn't allowed")
<add> return derr.ErrorCodeChainOnBuild
<ide> case "MAINTAINER", "FROM":
<del> return fmt.Errorf("%s isn't allowed as an ONBUILD trigger", triggerInstruction)
<add> return derr.ErrorCodeBadOnBuildCmd.WithArgs(triggerInstruction)
<ide> }
<ide>
<ide> original = regexp.MustCompile(`(?i)^\s*ONBUILD\s*`).ReplaceAllString(original, "")
<ide> func onbuild(b *builder, args []string, attributes map[string]bool, original str
<ide> //
<ide> func workdir(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if len(args) != 1 {
<del> return fmt.Errorf("WORKDIR requires exactly one argument")
<add> return derr.ErrorCodeExactlyOneArg.WithArgs("WORKDIR")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func workdir(b *builder, args []string, attributes map[string]bool, original str
<ide> //
<ide> func run(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if b.image == "" && !b.noBaseImage {
<del> return fmt.Errorf("Please provide a source image with `from` prior to run")
<add> return derr.ErrorCodeMissingFrom
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func expose(b *builder, args []string, attributes map[string]bool, original stri
<ide> portsTab := args
<ide>
<ide> if len(args) == 0 {
<del> return fmt.Errorf("EXPOSE requires at least one argument")
<add> return derr.ErrorCodeAtLeastOneArg.WithArgs("EXPOSE")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func expose(b *builder, args []string, attributes map[string]bool, original stri
<ide> //
<ide> func user(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if runtime.GOOS == "windows" {
<del> return fmt.Errorf("USER is not supported on Windows")
<add> return derr.ErrorCodeNotOnWindows.WithArgs("USER")
<ide> }
<ide>
<ide> if len(args) != 1 {
<del> return fmt.Errorf("USER requires exactly one argument")
<add> return derr.ErrorCodeExactlyOneArg.WithArgs("USER")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func user(b *builder, args []string, attributes map[string]bool, original string
<ide> //
<ide> func volume(b *builder, args []string, attributes map[string]bool, original string) error {
<ide> if runtime.GOOS == "windows" {
<del> return fmt.Errorf("VOLUME is not supported on Windows")
<add> return derr.ErrorCodeNotOnWindows.WithArgs("VOLUME")
<ide> }
<ide> if len(args) == 0 {
<del> return fmt.Errorf("VOLUME requires at least one argument")
<add> return derr.ErrorCodeAtLeastOneArg.WithArgs("VOLUME")
<ide> }
<ide>
<ide> if err := b.BuilderFlags.Parse(); err != nil {
<ide> func volume(b *builder, args []string, attributes map[string]bool, original stri
<ide> for _, v := range args {
<ide> v = strings.TrimSpace(v)
<ide> if v == "" {
<del> return fmt.Errorf("Volume specified can not be an empty string")
<add> return derr.ErrorCodeVolumeEmpty
<ide> }
<ide> b.Config.Volumes[v] = struct{}{}
<ide> }
<ide><path>daemon/daemon.go
<ide> import (
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/api"
<add> derr "github.com/docker/docker/api/errors"
<ide> "github.com/docker/docker/daemon/events"
<ide> "github.com/docker/docker/daemon/execdriver"
<ide> "github.com/docker/docker/daemon/execdriver/execdrivers"
<ide> func (daemon *Daemon) Get(prefixOrName string) (*Container, error) {
<ide>
<ide> containerId, indexError := daemon.idIndex.Get(prefixOrName)
<ide> if indexError != nil {
<add> // When truncindex defines an error type, use that instead
<add> if strings.Contains(indexError.Error(), "no such id") {
<add> return nil, derr.ErrorCodeNoSuchContainer.WithArgs(prefixOrName)
<add> }
<ide> return nil, indexError
<ide> }
<ide> return daemon.containers.Get(containerId), nil
<ide><path>utils/utils.go
<ide> import (
<ide> "runtime"
<ide> "strings"
<ide>
<add> "github.com/docker/distribution/registry/api/errcode"
<ide> "github.com/docker/docker/autogen/dockerversion"
<ide> "github.com/docker/docker/pkg/archive"
<ide> "github.com/docker/docker/pkg/fileutils"
<ide> func ImageReference(repo, ref string) string {
<ide> func DigestReference(ref string) bool {
<ide> return strings.Contains(ref, ":")
<ide> }
<add>
<add>// GetErrorMessage returns the human readable message associated with
<add>// the passed-in error. In some cases the default Error() func returns
<add>// something that is less than useful so based on its types this func
<add>// will go and get a better piece of text.
<add>func GetErrorMessage(err error) string {
<add> switch err.(type) {
<add> case errcode.Error:
<add> e, _ := err.(errcode.Error)
<add> return e.Message
<add>
<add> case errcode.ErrorCode:
<add> ec, _ := err.(errcode.ErrorCode)
<add> return ec.Message()
<add>
<add> default:
<add> return err.Error()
<add> }
<add>} | 9 |
PHP | PHP | add methods to get plugin's template path | b31228ca6102a067956c85b1df21ca5c38053031 | <ide><path>src/Core/BasePlugin.php
<ide> class BasePlugin implements PluginInterface
<ide> */
<ide> protected $configPath;
<ide>
<add> /**
<add> * The templates path for this plugin.
<add> *
<add> * @var string
<add> */
<add> protected $templatePath;
<add>
<ide> /**
<ide> * The name of this plugin
<ide> *
<ide> public function getClassPath(): string
<ide> return $path . 'src' . DIRECTORY_SEPARATOR;
<ide> }
<ide>
<add> /**
<add> * {@inheritDoc}
<add> */
<add> public function getTemplatePath(): string
<add> {
<add> if ($this->templatePath) {
<add> return $this->templatePath;
<add> }
<add> $path = $this->getPath();
<add>
<add> return $path . 'templates' . DIRECTORY_SEPARATOR;
<add> }
<add>
<ide> /**
<ide> * {@inheritdoc}
<ide> */
<ide><path>src/Core/Plugin.php
<ide> public static function configPath(string $name): string
<ide> return $plugin->getConfigPath();
<ide> }
<ide>
<add> /**
<add> * Returns the filesystem path for plugin's folder containing template files.
<add> *
<add> * @param string $name name of the plugin in CamelCase format.
<add> * @return string Path to the plugin folder container config files.
<add> * @throws \Cake\Core\Exception\MissingPluginException If plugin has not been loaded.
<add> */
<add> public static function templatePath(string $name): string
<add> {
<add> $plugin = static::getCollection()->get($name);
<add>
<add> return $plugin->getTemplatePath();
<add> }
<add>
<ide> /**
<ide> * Returns true if the plugin $plugin is already loaded.
<ide> *
<ide><path>src/Core/PluginInterface.php
<ide> public function getConfigPath(): string;
<ide> */
<ide> public function getClassPath(): string;
<ide>
<add> /**
<add> * Get the filesystem path to templates for this plugin
<add> *
<add> * @return string
<add> */
<add> public function getTemplatePath(): string;
<add>
<ide> /**
<ide> * Load all the application configuration and bootstrap logic.
<ide> *
<ide><path>tests/TestCase/Core/BasePluginTest.php
<ide> public function testGetPathBaseClass()
<ide> $this->assertSame($expected, $plugin->getPath());
<ide> $this->assertSame($expected . 'config' . DS, $plugin->getConfigPath());
<ide> $this->assertSame($expected . 'src' . DS, $plugin->getClassPath());
<add> $this->assertSame($expected . 'templates' . DS, $plugin->getTemplatePath());
<ide> }
<ide>
<ide> public function testGetPathOptionValue()
<ide> public function testGetPathOptionValue()
<ide> $this->assertSame($expected, $plugin->getPath());
<ide> $this->assertSame($expected . 'config' . DS, $plugin->getConfigPath());
<ide> $this->assertSame($expected . 'src' . DS, $plugin->getClassPath());
<add> $this->assertSame($expected . 'templates' . DS, $plugin->getTemplatePath());
<ide> }
<ide>
<ide> public function testGetPathSubclass()
<ide> public function testGetPathSubclass()
<ide> $this->assertSame($expected, $plugin->getPath());
<ide> $this->assertSame($expected . 'config' . DS, $plugin->getConfigPath());
<ide> $this->assertSame($expected . 'src' . DS, $plugin->getClassPath());
<add> $this->assertSame($expected . 'templates' . DS, $plugin->getTemplatePath());
<ide> }
<ide> }
<ide><path>tests/TestCase/Core/PluginTest.php
<ide> public function testClassPath()
<ide> $this->assertPathEquals(Plugin::classPath('Company/TestPluginThree'), $expected);
<ide> }
<ide>
<add> /**
<add> * Tests that Plugin::templatePath() returns the correct path for the loaded plugins
<add> *
<add> * @return void
<add> */
<add> public function testTemplatePath()
<add> {
<add> $this->loadPlugins(['TestPlugin', 'TestPluginTwo', 'Company/TestPluginThree']);
<add> $expected = TEST_APP . 'Plugin' . DS . 'TestPlugin' . DS . 'templates' . DS;
<add> $this->assertPathEquals(Plugin::templatePath('TestPlugin'), $expected);
<add>
<add> $expected = TEST_APP . 'Plugin' . DS . 'TestPluginTwo' . DS . 'templates' . DS;
<add> $this->assertPathEquals(Plugin::templatePath('TestPluginTwo'), $expected);
<add>
<add> $expected = TEST_APP . 'Plugin' . DS . 'Company' . DS . 'TestPluginThree' . DS . 'templates' . DS;
<add> $this->assertPathEquals(Plugin::templatePath('Company/TestPluginThree'), $expected);
<add> }
<add>
<ide> /**
<ide> * Tests that Plugin::classPath() throws an exception on unknown plugin
<ide> * | 5 |
Javascript | Javascript | reduce android 2.3 support | ce3b4a62427c5a3a6669dcb8bf8e27a6287990d5 | <ide><path>src/ajax/parseJSON.js
<ide> define( [
<ide> "../core"
<ide> ], function( jQuery ) {
<ide>
<del>// Support: Android 2.3
<del>// Workaround failure to string-cast null input
<del>jQuery.parseJSON = function( data ) {
<del> return JSON.parse( data + "" );
<del>};
<add>jQuery.parseJSON = JSON.parse;
<ide>
<ide> return jQuery.parseJSON;
<ide>
<ide><path>src/attributes/support.js
<ide> define( [
<ide> // Must access selectedIndex to make default options select
<ide> support.optSelected = opt.selected;
<ide>
<del> // Support: Android<=2.3
<del> // Options inside disabled selects are incorrectly marked as disabled
<del> select.disabled = true;
<del> support.optDisabled = !opt.disabled;
<del>
<ide> // Support: IE<=11+
<ide> // An input loses its value after becoming a radio
<ide> input = document.createElement( "input" );
<ide><path>src/attributes/val.js
<ide> jQuery.extend( {
<ide> if ( ( option.selected || i === index ) &&
<ide>
<ide> // Don't return options that are disabled or in a disabled optgroup
<del> ( support.optDisabled ?
<del> !option.disabled : option.getAttribute( "disabled" ) === null ) &&
<add> !option.disabled &&
<ide> ( !option.parentNode.disabled ||
<ide> !jQuery.nodeName( option.parentNode, "optgroup" ) ) ) {
<ide>
<ide><path>src/css.js
<ide> jQuery.each( [ "height", "width" ], function( i, name ) {
<ide> };
<ide> } );
<ide>
<del>// Support: Android 2.3
<del>jQuery.cssHooks.marginRight = addGetHookIf( support.reliableMarginRight,
<del> function( elem, computed ) {
<del> if ( computed ) {
<del> return swap( elem, { "display": "inline-block" },
<del> curCSS, [ elem, "marginRight" ] );
<del> }
<del> }
<del>);
<del>
<ide> // These hooks are used by animate to expand properties
<ide> jQuery.each( {
<ide> margin: "",
<ide><path>src/css/support.js
<ide> define( [
<ide> // so they're executed at the same time to save the second computation.
<ide> function computeStyleTests() {
<ide> div.style.cssText =
<del>
<del> // Support: Android 2.3
<del> // Vendor-prefix box-sizing
<del> "-webkit-box-sizing:border-box;box-sizing:border-box;" +
<add> "box-sizing:border-box;" +
<ide> "display:block;position:absolute;" +
<ide> "margin:0;margin-top:1%;margin-right:50%;" +
<ide> "border:1px;padding:1px;" +
<ide> define( [
<ide> computeStyleTests();
<ide> }
<ide> return pixelMarginRightVal;
<del> },
<del> reliableMarginRight: function() {
<del>
<del> // Support: Android 2.3
<del> // Check if div with explicit width and no margin-right incorrectly
<del> // gets computed margin-right based on width of container. (#3333)
<del> // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right
<del> // This support function is only executed once so no memoizing is needed.
<del> var ret,
<del> marginDiv = div.appendChild( document.createElement( "div" ) );
<del>
<del> // Reset CSS: box-sizing; display; margin; border; padding
<del> marginDiv.style.cssText = div.style.cssText =
<del>
<del> // Support: Android 2.3
<del> // Vendor-prefix box-sizing
<del> "-webkit-box-sizing:content-box;box-sizing:content-box;" +
<del> "display:block;margin:0;border:0;padding:0";
<del> marginDiv.style.marginRight = marginDiv.style.width = "0";
<del> div.style.width = "1px";
<del> documentElement.appendChild( container );
<del>
<del> ret = !parseFloat( window.getComputedStyle( marginDiv ).marginRight );
<del>
<del> documentElement.removeChild( container );
<del> div.removeChild( marginDiv );
<del>
<del> return ret;
<ide> }
<ide> } );
<ide> } )();
<ide><path>src/manipulation/wrapMap.js
<ide> var wrapMap = {
<ide> // their parent elements (except for "table" element) could be omitted
<ide> // since browser parsers are smart enough to auto-insert them
<ide>
<del> // Support: Android 2.3
<del> // Android browser doesn't auto-insert colgroup
<del> col: [ 2, "<table><colgroup>", "</colgroup></table>" ],
<add> // Auto-insert "colgroup" element
<add> col: [ 2, "<table>", "</table>" ],
<ide>
<ide> // Auto-insert "tbody" element
<ide> tr: [ 2, "<table>", "</table>" ],
<ide><path>test/unit/support.js
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": true,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<add> "radioValue": true
<ide> };
<ide> } else if ( /(msie 10\.0|trident\/7\.0)/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": true,
<ide> "noCloneChecked": false,
<del> "optDisabled": true,
<ide> "optSelected": false,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": true,
<del> "radioValue": false,
<del> "reliableMarginRight": true
<add> "radioValue": false
<ide> };
<ide> } else if ( /msie 9\.0/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": true,
<ide> "noCloneChecked": false,
<del> "optDisabled": true,
<ide> "optSelected": false,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": true,
<del> "radioValue": false,
<del> "reliableMarginRight": true
<add> "radioValue": false
<ide> };
<ide> } else if ( /chrome/i.test( userAgent ) ) {
<ide>
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": true,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<add> "radioValue": true
<ide> };
<ide> } else if ( /8\.0(\.\d+|) safari/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": false,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": false,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<add> "radioValue": true
<ide> };
<ide> } else if ( /7\.0(\.\d+|) safari/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": false,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<add> "radioValue": true
<ide> };
<ide> } else if ( /firefox/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": true,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<add> "radioValue": true
<ide> };
<ide> } else if ( /iphone os 8/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": false,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": false,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<add> "radioValue": true
<ide> };
<ide> } else if ( /iphone os (6|7)/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": true,
<ide> "pixelPosition": false,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<add> "radioValue": true
<ide> };
<ide> } else if ( /android 4\.[0-3]/i.test( userAgent ) ) {
<ide> expected = {
<ide> testIframeWithCallback(
<ide> "createHTMLDocument": true,
<ide> "focusin": false,
<ide> "noCloneChecked": true,
<del> "optDisabled": true,
<ide> "optSelected": true,
<ide> "pixelMarginRight": false,
<ide> "pixelPosition": false,
<del> "radioValue": true,
<del> "reliableMarginRight": true
<del> };
<del> } else if ( /android 2\.3/i.test( userAgent ) ) {
<del> expected = {
<del> "ajax": true,
<del> "boxSizingReliable": true,
<del> "checkClone": true,
<del> "checkOn": false,
<del> "clearCloneStyle": false,
<del> "cors": true,
<del> "createHTMLDocument": true,
<del> "focusin": false,
<del> "noCloneChecked": true,
<del> "optDisabled": false,
<del> "optSelected": true,
<del> "pixelMarginRight": true,
<del> "pixelPosition": false,
<del> "radioValue": true,
<del> "reliableMarginRight": false
<add> "radioValue": true
<ide> };
<ide> }
<ide> | 7 |
Javascript | Javascript | use runtime globals in shared plugin | 9fa45137d87ad18647c2d51126fe5cc80255e52e | <ide><path>lib/sharing/ConsumeSharedPlugin.js
<ide> class ConsumeSharedPlugin {
<ide> PLUGIN_NAME,
<ide> (chunk, set) => {
<ide> set.add(RuntimeGlobals.module);
<add> set.add(RuntimeGlobals.moduleCache);
<ide> set.add(RuntimeGlobals.moduleFactoriesAddOnly);
<ide> set.add(RuntimeGlobals.shareScopeMap);
<ide> set.add(RuntimeGlobals.initializeSharing);
<ide><path>lib/sharing/ConsumeSharedRuntimeModule.js
<ide> class ConsumeSharedRuntimeModule extends RuntimeModule {
<ide> ? Template.asString([
<ide> `var initialConsumes = ${JSON.stringify(initialConsumes)};`,
<ide> `initialConsumes.forEach(${runtimeTemplate.basicFunction("id", [
<del> `__webpack_modules__[id] = ${runtimeTemplate.basicFunction(
<del> "module",
<del> [
<del> "// Handle case when module is used sync",
<del> "installedModules[id] = 0;",
<del> "delete __webpack_module_cache__[id];",
<del> "var factory = moduleToHandlerMapping[id]();",
<del> 'if(typeof factory !== "function") throw new Error("Shared module is not available for eager consumption: " + id);',
<del> `module.exports = factory();`
<del> ]
<del> )}`
<add> `${
<add> RuntimeGlobals.moduleFactories
<add> }[id] = ${runtimeTemplate.basicFunction("module", [
<add> "// Handle case when module is used sync",
<add> "installedModules[id] = 0;",
<add> `delete ${RuntimeGlobals.moduleCache}[id];`,
<add> "var factory = moduleToHandlerMapping[id]();",
<add> 'if(typeof factory !== "function") throw new Error("Shared module is not available for eager consumption: " + id);',
<add> `module.exports = factory();`
<add> ])}`
<ide> ])});`
<ide> ])
<ide> : "// no consumes in initial chunks",
<ide> class ConsumeSharedRuntimeModule extends RuntimeModule {
<ide> "factory",
<ide> [
<ide> "installedModules[id] = 0;",
<del> `__webpack_modules__[id] = ${runtimeTemplate.basicFunction(
<del> "module",
<del> [
<del> "delete __webpack_module_cache__[id];",
<del> "module.exports = factory();"
<del> ]
<del> )}`
<add> `${
<add> RuntimeGlobals.moduleFactories
<add> }[id] = ${runtimeTemplate.basicFunction("module", [
<add> `delete ${RuntimeGlobals.moduleCache}[id];`,
<add> "module.exports = factory();"
<add> ])}`
<ide> ]
<ide> )};`,
<ide> `var onError = ${runtimeTemplate.basicFunction("error", [
<ide> "delete installedModules[id];",
<del> `__webpack_modules__[id] = ${runtimeTemplate.basicFunction(
<del> "module",
<del> ["delete __webpack_module_cache__[id];", "throw error;"]
<del> )}`
<add> `${
<add> RuntimeGlobals.moduleFactories
<add> }[id] = ${runtimeTemplate.basicFunction("module", [
<add> `delete ${RuntimeGlobals.moduleCache}[id];`,
<add> "throw error;"
<add> ])}`
<ide> ])};`,
<ide> "try {",
<ide> Template.indent([ | 2 |
Text | Text | apply suggestions from code review | 08e3995add441edb5dc23362b05f1c725f3bb024 | <ide><path>docs/recipes/WritingTests.md
<ide> As such, the Redux code can be treated as an implementation detail of the app, w
<ide> The general advice for testing an app using Redux is as follows:
<ide>
<ide> - Use integration tests for everything working together. I.e. for a React app using Redux, render a `<Provider>` with a real store instance wrapping the component/s being tested. Interactions with the page being tested should use real Redux logic, with API calls mocked out so app code doesn't have to change, and assert that the UI is updated appropriately.
<del>- Use basic unit tests _where fitting_ for pure functions that deserve it. I.e. particularly complex reducers or selectors. However, in many cases, these are just implementation details that are covered by integration tests instead.
<add>- If needed, use basic unit tests for pure functions such as particularly complex reducers or selectors. However, in many cases, these are just implementation details that are covered by integration tests instead.
<ide>
<ide> :::tip
<ide>
<ide> Our recommendation is to mock async requests at the `fetch/xhr` level using tool
<ide>
<ide> ### Reducers
<ide>
<del>A reducer should be a pure function that returns the new state after applying the action to the previous state. In the majority of cases, the reducer is an implementation detail that does not need explicit tests. However, if your reducer contains particularly complex logic that you would like the confidence of having unit tests for, reducers can be easily tested. As a reducer should always be a pure function, it is easy to write tests for - a given input should always provide a particular output.
<add>Reducers pure functions that return the new state after applying the action to the previous state. In the majority of cases, the reducer is an implementation detail that does not need explicit tests. However, if your reducer contains particularly complex logic that you would like the confidence of having unit tests for, reducers can be easily tested.
<add>
<add>Because reducers are pure functions, testing them should be straightforward. Call the reducer with a specific input `state` and `action`, and assert that the result state matches expectations.
<ide>
<ide> #### Example
<ide>
<ide> export default function App() {
<ide> }
<ide> ```
<ide>
<del>This app involves async action creators, reducers and selectors. All of these can be tested by writing an integration test with the following in mind:
<add>This app involves thunks, reducers and selectors. All of these can be tested by writing an integration test with the following in mind:
<ide>
<ide> - Upon first loading the app, there should be no user yet - we should see 'No user' on the screen.
<ide> - After clicking the button that says 'Fetch user', we expect it to start fetching the user. We should see 'Fetching user...' displayed on the screen. | 1 |
Python | Python | handle large negative np.int64 args in binary_repr | dd9051c78476be2bc2cbf03ab895e8dde5ca14ca | <ide><path>numpy/core/numeric.py
<ide> def warn_if_insufficient(width, binwidth):
<ide> "will raise an error in the future.", DeprecationWarning,
<ide> stacklevel=3)
<ide>
<add> # Ensure that num is a Python integer to avoid overflow or unwanted
<add> # casts to floating point.
<add> num = operator.index(num)
<add>
<ide> if num == 0:
<ide> return '0' * (width or 1)
<ide>
<ide><path>numpy/core/tests/test_numeric.py
<ide> def test_neg_width_boundaries(self):
<ide> exp = '1' + (width - 1) * '0'
<ide> assert_equal(np.binary_repr(num, width=width), exp)
<ide>
<add> def test_large_neg_int64(self):
<add> # See gh-14289.
<add> assert_equal(np.binary_repr(np.int64(-2**62), width=64),
<add> '11' + '0'*62)
<add>
<ide>
<ide> class TestBaseRepr(object):
<ide> def test_base3(self): | 2 |
Javascript | Javascript | add strict equalities in src/core/bidi.js | 84503c656d7549b0a59f05e2e12ec7e67b47d171 | <ide><path>src/core/bidi.js
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide>
<ide> function findUnequal(arr, start, value) {
<ide> for (var j = start, jj = arr.length; j < jj; ++j) {
<del> if (arr[j] != value) {
<add> if (arr[j] !== value) {
<ide> return j;
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> } else if (0x0700 <= charCode && charCode <= 0x08AC) {
<ide> charType = 'AL';
<ide> }
<del> if (charType == 'R' || charType == 'AL' || charType == 'AN') {
<add> if (charType === 'R' || charType === 'AL' || charType === 'AN') {
<ide> numBidi++;
<ide> }
<ide> types[i] = charType;
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> return createBidiText(str, isLTR);
<ide> }
<ide>
<del> if (startLevel == -1) {
<add> if (startLevel === -1) {
<ide> if ((strLength / numBidi) < 0.3) {
<ide> isLTR = true;
<ide> startLevel = 0;
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> */
<ide> var lastType = sor;
<ide> for (i = 0; i < strLength; ++i) {
<del> if (types[i] == 'NSM') {
<add> if (types[i] === 'NSM') {
<ide> types[i] = lastType;
<ide> } else {
<ide> lastType = types[i];
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> var t;
<ide> for (i = 0; i < strLength; ++i) {
<ide> t = types[i];
<del> if (t == 'EN') {
<del> types[i] = (lastType == 'AL') ? 'AN' : 'EN';
<del> } else if (t == 'R' || t == 'L' || t == 'AL') {
<add> if (t === 'EN') {
<add> types[i] = (lastType === 'AL') ? 'AN' : 'EN';
<add> } else if (t === 'R' || t === 'L' || t === 'AL') {
<ide> lastType = t;
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> */
<ide> for (i = 0; i < strLength; ++i) {
<ide> t = types[i];
<del> if (t == 'AL') {
<add> if (t === 'AL') {
<ide> types[i] = 'R';
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> type changes to that type:
<ide> */
<ide> for (i = 1; i < strLength - 1; ++i) {
<del> if (types[i] == 'ES' && types[i - 1] == 'EN' && types[i + 1] == 'EN') {
<add> if (types[i] === 'ES' && types[i - 1] === 'EN' && types[i + 1] === 'EN') {
<ide> types[i] = 'EN';
<ide> }
<del> if (types[i] == 'CS' && (types[i - 1] == 'EN' || types[i - 1] == 'AN') &&
<del> types[i + 1] == types[i - 1]) {
<add> if (types[i] === 'CS' &&
<add> (types[i - 1] === 'EN' || types[i - 1] === 'AN') &&
<add> types[i + 1] === types[i - 1]) {
<ide> types[i] = types[i - 1];
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> to all European numbers:
<ide> */
<ide> for (i = 0; i < strLength; ++i) {
<del> if (types[i] == 'EN') {
<add> if (types[i] === 'EN') {
<ide> // do before
<ide> var j;
<ide> for (j = i - 1; j >= 0; --j) {
<del> if (types[j] != 'ET') {
<add> if (types[j] !== 'ET') {
<ide> break;
<ide> }
<ide> types[j] = 'EN';
<ide> }
<ide> // do after
<ide> for (j = i + 1; j < strLength; --j) {
<del> if (types[j] != 'ET') {
<add> if (types[j] !== 'ET') {
<ide> break;
<ide> }
<ide> types[j] = 'EN';
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> */
<ide> for (i = 0; i < strLength; ++i) {
<ide> t = types[i];
<del> if (t == 'WS' || t == 'ES' || t == 'ET' || t == 'CS') {
<add> if (t === 'WS' || t === 'ES' || t === 'ET' || t === 'CS') {
<ide> types[i] = 'ON';
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> lastType = sor;
<ide> for (i = 0; i < strLength; ++i) {
<ide> t = types[i];
<del> if (t == 'EN') {
<del> types[i] = ((lastType == 'L') ? 'L' : 'EN');
<del> } else if (t == 'R' || t == 'L') {
<add> if (t === 'EN') {
<add> types[i] = ((lastType === 'L') ? 'L' : 'EN');
<add> } else if (t === 'R' || t === 'L') {
<ide> lastType = t;
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> end-of-level-run (eor) are used at level run boundaries.
<ide> */
<ide> for (i = 0; i < strLength; ++i) {
<del> if (types[i] == 'ON') {
<add> if (types[i] === 'ON') {
<ide> var end = findUnequal(types, i + 1, 'ON');
<ide> var before = sor;
<ide> if (i > 0) {
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> if (end + 1 < strLength) {
<ide> after = types[end + 1];
<ide> }
<del> if (before != 'L') {
<add> if (before !== 'L') {
<ide> before = 'R';
<ide> }
<del> if (after != 'L') {
<add> if (after !== 'L') {
<ide> after = 'R';
<ide> }
<del> if (before == after) {
<add> if (before === after) {
<ide> setValues(types, i, end, before);
<ide> }
<ide> i = end - 1; // reset to end (-1 so next iteration is ok)
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> N2. Any remaining neutrals take the embedding direction.
<ide> */
<ide> for (i = 0; i < strLength; ++i) {
<del> if (types[i] == 'ON') {
<add> if (types[i] === 'ON') {
<ide> types[i] = e;
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> for (i = 0; i < strLength; ++i) {
<ide> t = types[i];
<ide> if (isEven(levels[i])) {
<del> if (t == 'R') {
<add> if (t === 'R') {
<ide> levels[i] += 1;
<del> } else if (t == 'AN' || t == 'EN') {
<add> } else if (t === 'AN' || t === 'EN') {
<ide> levels[i] += 2;
<ide> }
<ide> } else { // isOdd
<del> if (t == 'L' || t == 'AN' || t == 'EN') {
<add> if (t === 'L' || t === 'AN' || t === 'EN') {
<ide> levels[i] += 1;
<ide> }
<ide> }
<ide> var bidi = PDFJS.bidi = (function bidiClosure() {
<ide> var result = '';
<ide> for (i = 0, ii = chars.length; i < ii; ++i) {
<ide> var ch = chars[i];
<del> if (ch != '<' && ch != '>') {
<add> if (ch !== '<' && ch !== '>') {
<ide> result += ch;
<ide> }
<ide> } | 1 |
Ruby | Ruby | fix tests on 1.9 | b2d6fdae353be4fca41d7ac1839f30d9737162fd | <ide><path>actionpack/lib/action_view/helpers/number_helper.rb
<add>require 'active_support/core_ext/float/rounding'
<add>
<ide> module ActionView
<ide> module Helpers #:nodoc:
<ide> # Provides methods for converting numbers into formatted strings.
<ide><path>activemodel/lib/active_model/core.rb
<ide> begin
<ide> require 'active_support'
<ide> rescue LoadError
<del> activesupport_path = "#{File.dirname(__FILE__)}/../../activesupport/lib"
<add> activesupport_path = "#{File.dirname(__FILE__)}/../../../activesupport/lib"
<ide> if File.directory?(activesupport_path)
<ide> $:.unshift activesupport_path
<ide> require 'active_support'
<ide><path>activesupport/lib/active_support/core_ext/class/attribute_accessors.rb
<add>require 'active_support/core_ext/array/extract_options'
<add>
<ide> # Extends the class object with class and instance accessors for class attributes,
<ide> # just like the native attr* accessors for instance attributes.
<ide> #
<ide><path>activesupport/lib/active_support/core_ext/class/delegating_attributes.rb
<del>require 'active_support/core_ext/blank'
<add>require 'active_support/core_ext/object/blank'
<ide>
<ide> class Class
<ide> def superclass_delegating_reader(*names) | 4 |
Python | Python | determine complex types using typecodes | f59e22561e02a185e57d67d07b423fd429ea35bb | <ide><path>numpy/ma/tests/test_core.py
<ide> def test_inplace_multiplication_array_type(self):
<ide> def test_inplace_floor_division_scalar_type(self):
<ide> # Test of inplace division
<ide> # Check for TypeError in case of unsupported types
<del> unsupported = {np.complex64, np.complex128, np.complex256}
<add> unsupported = {np.dtype(t).type for t in np.typecodes["Complex"]}
<ide> for t in self.othertypes:
<ide> with warnings.catch_warnings(record=True) as w:
<ide> warnings.filterwarnings("always")
<ide> def test_inplace_floor_division_scalar_type(self):
<ide> def test_inplace_floor_division_array_type(self):
<ide> # Test of inplace division
<ide> # Check for TypeError in case of unsupported types
<del> unsupported = {np.complex64, np.complex128, np.complex256}
<add> unsupported = {np.dtype(t).type for t in np.typecodes["Complex"]}
<ide> for t in self.othertypes:
<ide> with warnings.catch_warnings(record=True) as w:
<ide> warnings.filterwarnings("always") | 1 |
Mixed | Javascript | use function declarations | a180259e428fbc3e220abdceb74d941482b87d40 | <ide><path>doc/api/util.md
<ide> Returns `true` if the given `object` is a `Function`. Otherwise, returns
<ide> const util = require('util');
<ide>
<ide> function Foo() {}
<del>const Bar = function() {};
<add>const Bar = () => {};
<ide>
<ide> util.isFunction({});
<ide> // Returns: false
<ide><path>lib/_tls_legacy.js
<ide> CryptoStream.prototype.destroySoon = function(err) {
<ide> // was written on this side was read from the other side.
<ide> var self = this;
<ide> var waiting = 1;
<del> var finish = function() {
<add> function finish() {
<ide> if (--waiting === 0) self.destroy();
<del> };
<add> }
<ide> this._opposite.once('end', finish);
<ide> if (!this._finished) {
<ide> this.once('finish', finish);
<ide><path>lib/crypto.js
<ide> function pbkdf2(password, salt, iterations, keylen, digest, callback) {
<ide> // at this point, we need to handle encodings.
<ide> var encoding = exports.DEFAULT_ENCODING;
<ide> if (callback) {
<del> var next = function(er, ret) {
<add> function next(er, ret) {
<ide> if (ret)
<ide> ret = ret.toString(encoding);
<ide> callback(er, ret);
<del> };
<add> }
<ide> binding.PBKDF2(password, salt, iterations, keylen, digest, next);
<ide> } else {
<ide> var ret = binding.PBKDF2(password, salt, iterations, keylen, digest);
<ide><path>lib/internal/util.js
<ide> function cachedResult(fn) {
<ide> // B() instanceof A // true
<ide> // B() instanceof B // true
<ide> function createClassWrapper(type) {
<del> const fn = function(...args) {
<add> function fn(...args) {
<ide> return Reflect.construct(type, args, new.target || type);
<del> };
<add> }
<ide> // Mask the wrapper function name and length values
<ide> Object.defineProperties(fn, {
<ide> name: {value: type.name},
<ide><path>test/addons-napi/test_async/test.js
<ide> const test_async = require(`./build/${common.buildType}/test_async`);
<ide> test_async.Test(5, common.mustCall(function(err, val) {
<ide> assert.strictEqual(err, null);
<ide> assert.strictEqual(val, 10);
<del> process.nextTick(common.mustCall(function() {}));
<add> process.nextTick(common.mustCall());
<ide> }));
<ide>
<del>const cancelSuceeded = function() {};
<del>test_async.TestCancel(common.mustCall(cancelSuceeded));
<add>test_async.TestCancel(common.mustCall());
<ide><path>test/addons-napi/test_exception/test.js
<ide> const common = require('../../common');
<ide> const test_exception = require(`./build/${common.buildType}/test_exception`);
<ide> const assert = require('assert');
<ide> const theError = new Error('Some error');
<del>const throwTheError = function() {
<add>function throwTheError() {
<ide> throw theError;
<del>};
<add>}
<ide> let caughtError;
<ide>
<del>const throwNoError = function() {};
<add>const throwNoError = common.noop;
<ide>
<ide> // Test that the native side successfully captures the exception
<ide> let returnedError = test_exception.returnException(throwTheError);
<ide><path>test/addons-napi/test_instanceof/test.js
<ide> if (typeof Symbol !== 'undefined' && 'hasInstance' in Symbol &&
<ide> (theObject instanceof theConstructor));
<ide> }
<ide>
<del> const MyClass = function MyClass() {};
<add> function MyClass() {}
<ide> Object.defineProperty(MyClass, Symbol.hasInstance, {
<ide> value: function(candidate) {
<ide> return 'mark' in candidate;
<ide> }
<ide> });
<ide>
<del> const MySubClass = function MySubClass() {};
<add> function MySubClass() {}
<ide> MySubClass.prototype = new MyClass();
<ide>
<ide> let x = new MySubClass();
<ide><path>test/addons/make-callback/test.js
<ide> const forward = vm.runInNewContext(`
<ide> })
<ide> `);
<ide> // Runs in outer context.
<del>const endpoint = function($Object) {
<add>function endpoint($Object) {
<ide> if (Object === $Object)
<ide> throw new Error('bad');
<ide> return Object;
<del>};
<add>}
<ide> assert.strictEqual(Object, makeCallback(process, forward, endpoint));
<ide><path>test/inspector/inspector-helper.js
<ide> function timeout(message, multiplicator) {
<ide> TIMEOUT * (multiplicator || 1));
<ide> }
<ide>
<del>const TestSession = function(socket, harness) {
<add>function TestSession(socket, harness) {
<ide> this.mainScriptPath = harness.mainScriptPath;
<ide> this.mainScriptId = null;
<ide>
<ide> const TestSession = function(socket, harness) {
<ide> buffer = buffer.slice(consumed);
<ide> } while (consumed);
<ide> }).on('close', () => assert(this.expectClose_, 'Socket closed prematurely'));
<del>};
<add>}
<ide>
<ide> TestSession.prototype.scriptUrlForId = function(id) {
<ide> return this.scripts_[id];
<ide> TestSession.prototype.testHttpResponse = function(path, check) {
<ide> };
<ide>
<ide>
<del>const Harness = function(port, childProcess) {
<add>function Harness(port, childProcess) {
<ide> this.port = port;
<ide> this.mainScriptPath = mainScript;
<ide> this.stderrFilters_ = [];
<ide> const Harness = function(port, childProcess) {
<ide> this.returnCode_ = code;
<ide> this.running_ = false;
<ide> });
<del>};
<add>}
<ide>
<ide> Harness.prototype.addStderrFilter = function(regexp, callback) {
<ide> this.stderrFilters_.push((message) => {
<ide><path>test/parallel/test-assert.js
<ide> a.throws(makeBlock(thrower, TypeError), function(err) {
<ide>
<ide> AnotherErrorType = class extends Error {};
<ide>
<del> const functionThatThrows = function() {
<add> const functionThatThrows = () => {
<ide> throw new AnotherErrorType('foo');
<ide> };
<ide>
<ide> a.throws(makeBlock(a.deepEqual, args, []));
<ide>
<ide> // more checking that arguments objects are handled correctly
<ide> {
<add> // eslint-disable-next-line func-style
<ide> const returnArguments = function() { return arguments; };
<ide>
<ide> const someArgs = returnArguments('a');
<ide><path>test/parallel/test-child-process-fork-dgram.js
<ide> if (process.argv[2] === 'child') {
<ide> });
<ide> });
<ide>
<del> const sendMessages = function() {
<add> function sendMessages() {
<ide> const serverPort = parentServer.address().port;
<ide>
<ide> const timer = setInterval(function() {
<ide> if (process.argv[2] === 'child') {
<ide> );
<ide> }
<ide> }, 1);
<del> };
<add> }
<ide>
<ide> parentServer.bind(0, '127.0.0.1');
<ide>
<ide><path>test/parallel/test-child-process-fork-net.js
<ide> if (process.argv[2] === 'child') {
<ide> }));
<ide>
<ide> // send net.Server to child and test by connecting
<del> const testServer = function(callback) {
<add> function testServer(callback) {
<ide>
<ide> // destroy server execute callback when done
<ide> const progress = new ProgressTracker(2, function() {
<ide> if (process.argv[2] === 'child') {
<ide> server.listen(0);
<ide>
<ide> // handle client messages
<del> const messageHandlers = function(msg) {
<add> function messageHandlers(msg) {
<ide>
<ide> if (msg.what === 'listening') {
<ide> // make connections
<ide> if (process.argv[2] === 'child') {
<ide> child.removeListener('message', messageHandlers);
<ide> callback();
<ide> }
<del> };
<add> }
<ide>
<ide> child.on('message', messageHandlers);
<del> };
<add> }
<ide>
<ide> // send net.Socket to child
<del> const testSocket = function(callback) {
<add> function testSocket(callback) {
<ide>
<ide> // create a new server and connect to it,
<ide> // but the socket will be handled by the child
<ide> if (process.argv[2] === 'child') {
<ide> server.close();
<ide> });
<ide> });
<del> };
<add> }
<ide>
<ide> // create server and send it to child
<ide> let serverSuccess = false;
<ide><path>test/parallel/test-child-process-fork-net2.js
<ide> if (process.argv[2] === 'child') {
<ide>
<ide> server.listen(0, '127.0.0.1');
<ide>
<del> const closeServer = function() {
<add> function closeServer() {
<ide> server.close();
<ide>
<ide> setTimeout(function() {
<ide> if (process.argv[2] === 'child') {
<ide> child2.send('close');
<ide> child3.disconnect();
<ide> }, 200);
<del> };
<add> }
<ide>
<ide> process.on('exit', function() {
<ide> assert.strictEqual(disconnected, count);
<ide><path>test/parallel/test-child-process-spawn-typeerror.js
<ide> assert.throws(function() {
<ide> // Argument types for combinatorics
<ide> const a = [];
<ide> const o = {};
<del>const c = function c() {};
<add>function c() {}
<ide> const s = 'string';
<ide> const u = undefined;
<ide> const n = null;
<ide><path>test/parallel/test-cluster-disconnect.js
<ide> if (cluster.isWorker) {
<ide> const servers = 2;
<ide>
<ide> // test a single TCP server
<del> const testConnection = function(port, cb) {
<add> const testConnection = (port, cb) => {
<ide> const socket = net.connect(port, '127.0.0.1', () => {
<ide> // buffer result
<ide> let result = '';
<ide> if (cluster.isWorker) {
<ide> };
<ide>
<ide> // test both servers created in the cluster
<del> const testCluster = function(cb) {
<add> const testCluster = (cb) => {
<ide> let done = 0;
<ide>
<ide> for (let i = 0; i < servers; i++) {
<ide> if (cluster.isWorker) {
<ide> };
<ide>
<ide> // start two workers and execute callback when both is listening
<del> const startCluster = function(cb) {
<add> const startCluster = (cb) => {
<ide> const workers = 8;
<ide> let online = 0;
<ide>
<ide> if (cluster.isWorker) {
<ide> }
<ide> };
<ide>
<del> const test = function(again) {
<add> const test = (again) => {
<ide> //1. start cluster
<ide> startCluster(common.mustCall(() => {
<ide> //2. test cluster
<ide><path>test/parallel/test-cluster-master-error.js
<ide> if (cluster.isWorker) {
<ide> // Check that the cluster died accidentally (non-zero exit code)
<ide> masterExited = !!code;
<ide>
<del> const pollWorkers = function() {
<add> const pollWorkers = () => {
<ide> // When master is dead all workers should be dead too
<ide> let alive = false;
<ide> workers.forEach((pid) => alive = common.isAlive(pid));
<ide><path>test/parallel/test-cluster-master-kill.js
<ide> if (cluster.isWorker) {
<ide> assert.strictEqual(code, 0);
<ide>
<ide> // check worker process status
<del> const pollWorker = function() {
<add> const pollWorker = () => {
<ide> alive = common.isAlive(pid);
<ide> if (alive) {
<ide> setTimeout(pollWorker, 50);
<ide><path>test/parallel/test-cluster-message.js
<ide> if (cluster.isWorker) {
<ide>
<ide>
<ide> let client;
<del> const check = function(type, result) {
<add> const check = (type, result) => {
<ide> checks[type].receive = true;
<ide> checks[type].correct = result;
<ide> console.error('check', checks);
<ide><path>test/parallel/test-cluster-worker-exit.js
<ide> if (cluster.isWorker) {
<ide> }
<ide> }));
<ide>
<del> const finish_test = function() {
<add> const finish_test = () => {
<ide> try {
<ide> checkResults(expected_results, results);
<ide> } catch (exc) {
<ide><path>test/parallel/test-console-not-call-toString.js
<ide> require('../common');
<ide> const assert = require('assert');
<ide>
<del>const func = function() {};
<add>function func() {}
<ide> let toStringCalled = false;
<ide> func.toString = function() {
<ide> toStringCalled = true;
<ide><path>test/parallel/test-event-emitter-add-listeners.js
<ide> const EventEmitter = require('events');
<ide> }
<ide>
<ide> {
<del> const listen1 = function listen1() {};
<del> const listen2 = function listen2() {};
<add> const listen1 = () => {};
<add> const listen2 = () => {};
<ide> const ee = new EventEmitter();
<ide>
<ide> ee.once('newListener', function() {
<ide><path>test/parallel/test-event-emitter-once.js
<ide> e.emit('hello', 'a', 'b');
<ide> e.emit('hello', 'a', 'b');
<ide> e.emit('hello', 'a', 'b');
<ide>
<del>const remove = function() {
<add>function remove() {
<ide> assert.fail('once->foo should not be emitted');
<del>};
<add>}
<ide>
<ide> e.once('foo', remove);
<ide> e.removeListener('foo', remove);
<ide><path>test/parallel/test-fs-access.js
<ide> const doesNotExist = path.join(common.tmpDir, '__this_should_not_exist');
<ide> const readOnlyFile = path.join(common.tmpDir, 'read_only_file');
<ide> const readWriteFile = path.join(common.tmpDir, 'read_write_file');
<ide>
<del>const createFileWithPerms = function(file, mode) {
<add>function createFileWithPerms(file, mode) {
<ide> fs.writeFileSync(file, '');
<ide> fs.chmodSync(file, mode);
<del>};
<add>}
<ide>
<ide> common.refreshTmpDir();
<ide> createFileWithPerms(readOnlyFile, 0o444);
<ide><path>test/parallel/test-fs-link.js
<ide> const srcPath = path.join(common.tmpDir, 'hardlink-target.txt');
<ide> const dstPath = path.join(common.tmpDir, 'link1.js');
<ide> fs.writeFileSync(srcPath, 'hello world');
<ide>
<del>const callback = function(err) {
<add>function callback(err) {
<ide> assert.ifError(err);
<ide> const dstContent = fs.readFileSync(dstPath, 'utf8');
<ide> assert.strictEqual('hello world', dstContent);
<del>};
<add>}
<ide>
<ide> fs.link(srcPath, dstPath, common.mustCall(callback));
<ide>
<ide><path>test/parallel/test-fs-read-stream-fd-leak.js
<ide> function testLeak(endFn, callback) {
<ide> let i = 0;
<ide> let check = 0;
<ide>
<del> const checkFunction = function() {
<add> function checkFunction() {
<ide> if (openCount !== 0 && check < totalCheck) {
<ide> check++;
<ide> setTimeout(checkFunction, 100);
<ide> function testLeak(endFn, callback) {
<ide>
<ide> openCount = 0;
<ide> callback && setTimeout(callback, 100);
<del> };
<add> }
<ide>
<ide> setInterval(function() {
<ide> const s = fs.createReadStream(emptyTxt);
<ide><path>test/parallel/test-http-parser.js
<ide> function expectBody(expected) {
<ide> 'GET /hello HTTP/1.1' + CRLF +
<ide> CRLF);
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(versionMajor, 1);
<ide> assert.strictEqual(versionMinor, 1);
<ide> assert.strictEqual(method, methods.indexOf('GET'));
<ide> function expectBody(expected) {
<ide> CRLF +
<ide> 'pong');
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, undefined);
<ide> assert.strictEqual(versionMajor, 1);
<ide> assert.strictEqual(versionMinor, 1);
<ide> assert.strictEqual(statusCode, 200);
<ide> assert.strictEqual(statusMessage, 'OK');
<ide> };
<ide>
<del> const onBody = function(buf, start, len) {
<add> const onBody = (buf, start, len) => {
<ide> const body = '' + buf.slice(start, start + len);
<ide> assert.strictEqual(body, 'pong');
<ide> };
<ide> function expectBody(expected) {
<ide> 'HTTP/1.0 200 Connection established' + CRLF +
<ide> CRLF);
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(versionMajor, 1);
<ide> assert.strictEqual(versionMinor, 0);
<ide> assert.strictEqual(method, undefined);
<ide> function expectBody(expected) {
<ide>
<ide> let seen_body = false;
<ide>
<del> const onHeaders = function(headers, url) {
<add> const onHeaders = (headers, url) => {
<ide> assert.ok(seen_body); // trailers should come after the body
<ide> assert.deepStrictEqual(headers,
<ide> ['Vary', '*', 'Content-Type', 'text/plain']);
<ide> };
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('POST'));
<ide> assert.strictEqual(url || parser.url, '/it');
<ide> assert.strictEqual(versionMajor, 1);
<ide> function expectBody(expected) {
<ide> parser[kOnHeaders] = mustCall(onHeaders);
<ide> };
<ide>
<del> const onBody = function(buf, start, len) {
<add> const onBody = (buf, start, len) => {
<ide> const body = '' + buf.slice(start, start + len);
<ide> assert.strictEqual(body, 'ping');
<ide> seen_body = true;
<ide> function expectBody(expected) {
<ide> 'X-Filler2: 42' + CRLF +
<ide> CRLF);
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('GET'));
<ide> assert.strictEqual(versionMajor, 1);
<ide> assert.strictEqual(versionMinor, 0);
<ide> function expectBody(expected) {
<ide> lots_of_headers +
<ide> CRLF);
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('GET'));
<ide> assert.strictEqual(url || parser.url, '/foo/bar/baz?quux=42#1337');
<ide> assert.strictEqual(versionMajor, 1);
<ide> function expectBody(expected) {
<ide> CRLF +
<ide> 'foo=42&bar=1337');
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('POST'));
<ide> assert.strictEqual(url || parser.url, '/it');
<ide> assert.strictEqual(versionMajor, 1);
<ide> assert.strictEqual(versionMinor, 1);
<ide> };
<ide>
<del> const onBody = function(buf, start, len) {
<add> const onBody = (buf, start, len) => {
<ide> const body = '' + buf.slice(start, start + len);
<ide> assert.strictEqual(body, 'foo=42&bar=1337');
<ide> };
<ide> function expectBody(expected) {
<ide> '1234567890' + CRLF +
<ide> '0' + CRLF);
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('POST'));
<ide> assert.strictEqual(url || parser.url, '/it');
<ide> assert.strictEqual(versionMajor, 1);
<ide> function expectBody(expected) {
<ide> let body_part = 0;
<ide> const body_parts = ['123', '123456', '1234567890'];
<ide>
<del> const onBody = function(buf, start, len) {
<add> const onBody = (buf, start, len) => {
<ide> const body = '' + buf.slice(start, start + len);
<ide> assert.strictEqual(body, body_parts[body_part++]);
<ide> };
<ide> function expectBody(expected) {
<ide> '6' + CRLF +
<ide> '123456' + CRLF);
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('POST'));
<ide> assert.strictEqual(url || parser.url, '/it');
<ide> assert.strictEqual(versionMajor, 1);
<ide> function expectBody(expected) {
<ide> const body_parts =
<ide> ['123', '123456', '123456789', '123456789ABC', '123456789ABCDEF'];
<ide>
<del> const onBody = function(buf, start, len) {
<add> const onBody = (buf, start, len) => {
<ide> const body = '' + buf.slice(start, start + len);
<ide> assert.strictEqual(body, body_parts[body_part++]);
<ide> };
<ide> function expectBody(expected) {
<ide> '0' + CRLF);
<ide>
<ide> function test(a, b) {
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('POST'));
<ide> assert.strictEqual(url || parser.url, '/helpme');
<ide> assert.strictEqual(versionMajor, 1);
<ide> function expectBody(expected) {
<ide>
<ide> let expected_body = '123123456123456789123456789ABC123456789ABCDEF';
<ide>
<del> const onBody = function(buf, start, len) {
<add> const onBody = (buf, start, len) => {
<ide> const chunk = '' + buf.slice(start, start + len);
<ide> assert.strictEqual(expected_body.indexOf(chunk), 0);
<ide> expected_body = expected_body.slice(chunk.length);
<ide> function expectBody(expected) {
<ide> '123456789ABCDEF' + CRLF +
<ide> '0' + CRLF);
<ide>
<del> const onHeadersComplete = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('POST'));
<ide> assert.strictEqual(url || parser.url, '/it');
<ide> assert.strictEqual(versionMajor, 1);
<ide> function expectBody(expected) {
<ide>
<ide> let expected_body = '123123456123456789123456789ABC123456789ABCDEF';
<ide>
<del> const onBody = function(buf, start, len) {
<add> const onBody = (buf, start, len) => {
<ide> const chunk = '' + buf.slice(start, start + len);
<ide> assert.strictEqual(expected_body.indexOf(chunk), 0);
<ide> expected_body = expected_body.slice(chunk.length);
<ide> function expectBody(expected) {
<ide> CRLF +
<ide> 'pong');
<ide>
<del> const onHeadersComplete1 = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete1 = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('PUT'));
<ide> assert.strictEqual(url, '/this');
<ide> assert.strictEqual(versionMajor, 1);
<ide> function expectBody(expected) {
<ide> ['Content-Type', 'text/plain', 'Transfer-Encoding', 'chunked']);
<ide> };
<ide>
<del> const onHeadersComplete2 = function(versionMajor, versionMinor, headers,
<del> method, url, statusCode, statusMessage,
<del> upgrade, shouldKeepAlive) {
<add> const onHeadersComplete2 = (versionMajor, versionMinor, headers,
<add> method, url, statusCode, statusMessage,
<add> upgrade, shouldKeepAlive) => {
<ide> assert.strictEqual(method, methods.indexOf('POST'));
<ide> assert.strictEqual(url, '/that');
<ide> assert.strictEqual(versionMajor, 1);
<ide><path>test/parallel/test-http-response-status-message.js
<ide> const server = net.createServer(function(connection) {
<ide> });
<ide> });
<ide>
<del>const runTest = function(testCaseIndex) {
<add>function runTest(testCaseIndex) {
<ide> const testCase = testCases[testCaseIndex];
<ide>
<ide> http.get({
<ide> const runTest = function(testCaseIndex) {
<ide>
<ide> response.resume();
<ide> });
<del>};
<add>}
<ide>
<ide> server.listen(0, function() { runTest(0); });
<ide>
<ide><path>test/parallel/test-https-simple.js
<ide> const options = {
<ide> const tests = 2;
<ide> let successful = 0;
<ide>
<del>const testSucceeded = function() {
<add>const testSucceeded = () => {
<ide> successful = successful + 1;
<ide> if (successful === tests) {
<ide> server.close();
<ide><path>test/parallel/test-net-server-max-connections-close-makes-more-available.js
<ide> const connections = [];
<ide> const received = [];
<ide> const sent = [];
<ide>
<del>const createConnection = function(index) {
<add>function createConnection(index) {
<ide> console.error('creating connection ' + index);
<ide>
<ide> return new Promise(function(resolve, reject) {
<ide> const createConnection = function(index) {
<ide>
<ide> connections[index] = connection;
<ide> });
<del>};
<add>}
<ide>
<del>const closeConnection = function(index) {
<add>function closeConnection(index) {
<ide> console.error('closing connection ' + index);
<ide> return new Promise(function(resolve, reject) {
<ide> connections[index].on('end', function() {
<ide> resolve();
<ide> });
<ide> connections[index].end();
<ide> });
<del>};
<add>}
<ide>
<ide> const server = net.createServer(function(socket) {
<ide> socket.on('data', function(data) {
<ide><path>test/parallel/test-net-server-pause-on-connect.js
<ide> let stopped = true;
<ide> let server1Sock;
<ide>
<ide>
<del>const server1ConnHandler = function(socket) {
<add>const server1ConnHandler = (socket) => {
<ide> socket.on('data', function(data) {
<ide> if (stopped) {
<ide> assert.fail('data event should not have happened yet');
<ide> const server1ConnHandler = function(socket) {
<ide>
<ide> const server1 = net.createServer({pauseOnConnect: true}, server1ConnHandler);
<ide>
<del>const server2ConnHandler = function(socket) {
<add>const server2ConnHandler = (socket) => {
<ide> socket.on('data', function(data) {
<ide> assert.strictEqual(data.toString(), msg, 'invalid data received');
<ide> socket.end();
<ide><path>test/parallel/test-os.js
<ide> const interfaces = os.networkInterfaces();
<ide> switch (platform) {
<ide> case 'linux':
<ide> {
<del> const filter = function(e) { return e.address === '127.0.0.1'; };
<add> const filter = (e) => e.address === '127.0.0.1';
<ide> const actual = interfaces.lo.filter(filter);
<ide> const expected = [{ address: '127.0.0.1', netmask: '255.0.0.0',
<ide> mac: '00:00:00:00:00:00', family: 'IPv4',
<ide> switch (platform) {
<ide> }
<ide> case 'win32':
<ide> {
<del> const filter = function(e) { return e.address === '127.0.0.1'; };
<add> const filter = (e) => e.address === '127.0.0.1';
<ide> const actual = interfaces['Loopback Pseudo-Interface 1'].filter(filter);
<ide> const expected = [{ address: '127.0.0.1', netmask: '255.0.0.0',
<ide> mac: '00:00:00:00:00:00', family: 'IPv4',
<ide><path>test/parallel/test-preload.js
<ide> if (common.isSunOS) {
<ide>
<ide> const nodeBinary = process.argv[0];
<ide>
<del>const preloadOption = function(preloads) {
<add>const preloadOption = (preloads) => {
<ide> let option = '';
<ide> preloads.forEach(function(preload, index) {
<ide> option += '-r ' + preload + ' ';
<ide> });
<ide> return option;
<ide> };
<ide>
<del>const fixture = function(name) {
<del> return path.join(common.fixturesDir, name);
<del>};
<add>const fixture = (name) => path.join(common.fixturesDir, name);
<ide>
<ide> const fixtureA = fixture('printA.js');
<ide> const fixtureB = fixture('printB.js');
<ide><path>test/parallel/test-querystring.js
<ide> const qsColonTestCases = [
<ide> ];
<ide>
<ide> // [wonkyObj, qs, canonicalObj]
<del>const extendedFunction = function() {};
<add>function extendedFunction() {}
<ide> extendedFunction.prototype = {a: 'b'};
<ide> const qsWeirdObjects = [
<ide> // eslint-disable-next-line no-unescaped-regexp-dot
<ide><path>test/parallel/test-readline-interface.js
<ide> function isWarned(emitter) {
<ide>
<ide> // \t does not become part of the input when there is a completer function
<ide> fi = new FakeInput();
<del> const completer = function(line) {
<del> return [[], line];
<del> };
<add> const completer = (line) => [[], line];
<ide> rli = new readline.Interface({
<ide> input: fi,
<ide> output: fi,
<ide><path>test/parallel/test-stream2-pipe-error-once-listener.js
<ide> const util = require('util');
<ide> const stream = require('stream');
<ide>
<ide>
<del>const Read = function() {
<add>function Read() {
<ide> stream.Readable.call(this);
<del>};
<add>}
<ide> util.inherits(Read, stream.Readable);
<ide>
<ide> Read.prototype._read = function(size) {
<ide> Read.prototype._read = function(size) {
<ide> };
<ide>
<ide>
<del>const Write = function() {
<add>function Write() {
<ide> stream.Writable.call(this);
<del>};
<add>}
<ide> util.inherits(Write, stream.Writable);
<ide>
<ide> Write.prototype._write = function(buffer, encoding, cb) {
<ide><path>test/parallel/test-timers-ordering.js
<ide> const N = 30;
<ide> let last_i = 0;
<ide> let last_ts = 0;
<ide>
<del>const f = function(i) {
<add>function f(i) {
<ide> if (i <= N) {
<ide> // check order
<ide> assert.strictEqual(i, last_i + 1, 'order is broken: ' + i + ' != ' +
<ide> const f = function(i) {
<ide> // schedule next iteration
<ide> setTimeout(f, 1, i + 1);
<ide> }
<del>};
<add>}
<ide> f(1);
<ide><path>test/parallel/test-util-inspect.js
<ide> assert.strictEqual(
<ide>
<ide> // Function with properties
<ide> {
<del> const value = function() {};
<add> const value = () => {};
<ide> value.aprop = 42;
<ide> assert.strictEqual(util.inspect(value), '{ [Function: value] aprop: 42 }');
<ide> }
<ide><path>test/pummel/test-dtrace-jsstack.js
<ide> if (!common.isSunOS) {
<ide> */
<ide> const frames = [ 'stalloogle', 'bagnoogle', 'doogle' ];
<ide>
<del>const stalloogle = function(str) {
<add>const stalloogle = (str) => {
<ide> global.expected = str;
<ide> os.loadavg();
<ide> };
<ide>
<del>const bagnoogle = function(arg0, arg1) {
<add>const bagnoogle = (arg0, arg1) => {
<ide> stalloogle(arg0 + ' is ' + arg1 + ' except that it is read-only');
<ide> };
<ide>
<ide> let done = false;
<ide>
<del>const doogle = function() {
<add>const doogle = () => {
<ide> if (!done)
<ide> setTimeout(doogle, 10);
<ide>
<ide><path>test/pummel/test-tls-session-timeout.js
<ide> function doTest() {
<ide>
<ide> // Expects a callback -- cb(connectionType : enum ['New'|'Reused'])
<ide>
<del> const Client = function(cb) {
<add> function Client(cb) {
<ide> const flags = [
<ide> 's_client',
<ide> '-connect', 'localhost:' + common.PORT,
<ide> function doTest() {
<ide> });
<ide> client.on('exit', function(code) {
<ide> let connectionType;
<del> const grepConnectionType = function(line) {
<add> const grepConnectionType = (line) => {
<ide> const matches = line.match(/(New|Reused), /);
<ide> if (matches) {
<ide> connectionType = matches[1];
<ide> function doTest() {
<ide> }
<ide> cb(connectionType);
<ide> });
<del> };
<add> }
<ide>
<ide> const server = tls.createServer(options, function(cleartext) {
<ide> cleartext.on('error', function(er) { | 39 |
PHP | PHP | apply styleci fixes | 9f3b5e822b963433b400f07a1e9db52ae525660f | <ide><path>tests/Database/DatabaseEloquentModelTest.php
<ide> public function testModelObserversCanBeAttachedToModelsThroughCallingObserveMeth
<ide> $events->shouldReceive('forget');
<ide> EloquentModelStub::observe([
<ide> 'Illuminate\Tests\Database\EloquentTestObserverStub',
<del> 'Illuminate\Tests\Database\EloquentTestAnotherObserverStub'
<add> 'Illuminate\Tests\Database\EloquentTestAnotherObserverStub',
<ide> ]);
<ide> EloquentModelStub::flushEventListeners();
<ide> } | 1 |
Ruby | Ruby | require only minitest | 73c6ca87a36336c360db66a751c66e11f7886788 | <ide><path>actionpack/lib/action_dispatch/testing/integration.rb
<ide> require 'active_support/core_ext/kernel/singleton_class'
<ide> require 'active_support/core_ext/object/try'
<ide> require 'rack/test'
<del>require 'minitest/autorun'
<add>require 'minitest'
<ide>
<ide> module ActionDispatch
<ide> module Integration #:nodoc:
<ide><path>activesupport/lib/active_support/test_case.rb
<ide> gem 'minitest' # make sure we get the gem, not stdlib
<del>require 'minitest/autorun'
<add>require 'minitest'
<ide> require 'active_support/testing/tagged_logging'
<ide> require 'active_support/testing/setup_and_teardown'
<ide> require 'active_support/testing/assertions' | 2 |
Text | Text | add information about where application is located | 5316d586d12518e1ce8707f3715c6e64acdff102 | <ide><path>docs/build-instructions/windows.md
<ide> cd C:\
<ide> git clone https://github.com/atom/atom/
<ide> cd atom
<del> script\build
<add> script\build # Creates application in the `Program Files` directory
<ide> ```
<ide>
<ide> ## Why do I have to use GitHub for Windows? | 1 |
PHP | PHP | use array_pull for session pull | 76e4be8628d695aa3e1e683fb32c06addf0fa28d | <ide><path>src/Illuminate/Session/Store.php
<ide> public function get($name, $default = null)
<ide> */
<ide> public function pull($key, $default = null)
<ide> {
<del> $value = $this->get($key, $default);
<del>
<del> $this->forget($key);
<del>
<del> return $value;
<add> return array_pull($this->attributes, $key, $default);
<ide> }
<ide>
<ide> /** | 1 |
Text | Text | add history for url.parse | fafd5b07588733ef87767cf9e156f62bcb8f09c0 | <ide><path>doc/api/url.md
<ide> The formatting process operates as follows:
<ide> ### url.parse(urlString[, parseQueryString[, slashesDenoteHost]])
<ide> <!-- YAML
<ide> added: v0.1.25
<add>changes:
<add> - version: v9.0.0
<add> pr-url: https://github.com/nodejs/node/pull/13606
<add> description: The `search` property on the returned URL object is now `null`
<add> when no query string is present.
<ide> -->
<ide>
<ide> * `urlString` {string} The URL string to parse. | 1 |
Python | Python | detect invalid user/password correctly | 0de209ece9d375eeb6227fc386c5933823ea05ef | <ide><path>libcloud/drivers/gogrid.py
<ide>
<ide> class GoGridResponse(Response):
<ide> def success(self):
<add> if self.status == 403:
<add> raise InvalidCredsException()
<ide> if not self.body:
<ide> return None
<ide> return json.loads(self.body)['status'] == 'success' | 1 |
Ruby | Ruby | remove special case filtering for procs | d5ac941ddc3de7ad1aaff80ed67aa04fb626a263 | <ide><path>actionpack/test/controller/filters_test.rb
<ide> class << self
<ide>
<ide> def before_actions
<ide> filters = _process_action_callbacks.select { |c| c.kind == :before }
<del> filters.map!(&:raw_filter)
<add> filters.map!(&:filter)
<ide> end
<ide> end
<ide> end
<ide><path>activesupport/lib/active_support/callbacks.rb
<ide> def self.build(chain, filter, kind, options)
<ide> end
<ide>
<ide> attr_accessor :kind, :name
<del> attr_reader :chain_config
<add> attr_reader :chain_config, :filter
<ide>
<ide> def initialize(name, filter, kind, options, chain_config)
<ide> @chain_config = chain_config
<ide> @name = name
<ide> @kind = kind
<ide> @filter = filter
<del> @key = compute_identifier filter
<ide> @if = check_conditionals(options[:if])
<ide> @unless = check_conditionals(options[:unless])
<ide> end
<ide>
<del> def filter; @key; end
<del> def raw_filter; @filter; end
<del>
<ide> def merge_conditional_options(chain, if_option:, unless_option:)
<ide> options = {
<ide> if: @if.dup,
<ide> def check_conditionals(conditionals)
<ide> conditionals.freeze
<ide> end
<ide>
<del> def compute_identifier(filter)
<del> case filter
<del> when ::Proc
<del> filter.object_id
<del> else
<del> filter
<del> end
<del> end
<del>
<ide> def conditions_lambdas
<ide> @if.map { |c| CallTemplate.build(c, self).make_lambda } +
<ide> @unless.map { |c| CallTemplate.build(c, self).inverted_lambda }
<ide><path>activesupport/test/callbacks_test.rb
<ide> def test_skip_class # removes one at a time
<ide> }
<ide> end
<ide>
<del> def test_skip_lambda # raises error
<del> calls = []
<del> callback = ->(o) { calls << o }
<del> klass = build_class(callback)
<del> assert_raises(ArgumentError) { klass.skip callback }
<del> klass.new.run
<del> assert_equal 10, calls.length
<del> end
<del>
<ide> def test_skip_symbol # removes all
<ide> calls = []
<ide> klass = build_class(:bar)
<ide><path>activesupport/test/test_case_test.rb
<ide> class SetupAndTeardownTest < ActiveSupport::TestCase
<ide> teardown :foo, :sentinel
<ide>
<ide> def test_inherited_setup_callbacks
<del> assert_equal [:reset_callback_record, :foo], self.class._setup_callbacks.map(&:raw_filter)
<add> assert_equal [:reset_callback_record, :foo], self.class._setup_callbacks.map(&:filter)
<ide> assert_equal [:foo], @called_back
<del> assert_equal [:foo, :sentinel], self.class._teardown_callbacks.map(&:raw_filter)
<add> assert_equal [:foo, :sentinel], self.class._teardown_callbacks.map(&:filter)
<ide> end
<ide>
<ide> def setup
<ide> class SubclassSetupAndTeardownTest < SetupAndTeardownTest
<ide> teardown :bar
<ide>
<ide> def test_inherited_setup_callbacks
<del> assert_equal [:reset_callback_record, :foo, :bar], self.class._setup_callbacks.map(&:raw_filter)
<add> assert_equal [:reset_callback_record, :foo, :bar], self.class._setup_callbacks.map(&:filter)
<ide> assert_equal [:foo, :bar], @called_back
<del> assert_equal [:foo, :sentinel, :bar], self.class._teardown_callbacks.map(&:raw_filter)
<add> assert_equal [:foo, :sentinel, :bar], self.class._teardown_callbacks.map(&:filter)
<ide> end
<ide>
<ide> private | 4 |
Javascript | Javascript | remove missing link from modal doc | dbc853246d956a0d8dfc6e5b95436f59ea069bbc | <ide><path>Libraries/Modal/Modal.js
<ide> const RCTModalHostView = requireNativeComponent('RCTModalHostView', null);
<ide> * The Modal component is a simple way to present content above an enclosing view.
<ide> *
<ide> * _Note: If you need more control over how to present modals over the rest of your app,
<del> * then consider using a top-level Navigator. Go [here](/react-native/docs/navigator-comparison.html) to compare navigation options._
<add> * then consider using a top-level Navigator._
<ide> *
<ide> * ```javascript
<ide> * import React, { Component } from 'react'; | 1 |
Javascript | Javascript | add regex check in test-url-parse-invalid-input | 7906ed50fab6d3e8f6bd259484c465f2df1bc754 | <ide><path>test/parallel/test-url-parse-invalid-input.js
<ide> const url = require('url');
<ide> 0.0,
<ide> 0,
<ide> [],
<del> {}
<del>].forEach(function(val) {
<del> assert.throws(function() { url.parse(val); }, TypeError);
<add> {},
<add> () => {},
<add> Symbol('foo')
<add>].forEach((val) => {
<add> assert.throws(() => { url.parse(val); },
<add> /^TypeError: Parameter "url" must be a string, not (undefined|boolean|number|object|function|symbol)$/);
<ide> });
<ide>
<del>assert.throws(function() { url.parse('http://%E0%A4%A@fail'); }, /^URIError: URI malformed$/);
<add>assert.throws(() => { url.parse('http://%E0%A4%A@fail'); },
<add> /^URIError: URI malformed$/); | 1 |
Java | Java | expose id from clienthttpresponse | 55aa8e914ed2e6808a7599842414152248cbe00c | <ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ClientHttpResponse.java
<ide> /*
<del> * Copyright 2002-2019 the original author or authors.
<add> * Copyright 2002-2021 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import org.springframework.http.ReactiveHttpInputMessage;
<ide> import org.springframework.http.ResponseCookie;
<ide> import org.springframework.util.MultiValueMap;
<add>import org.springframework.util.ObjectUtils;
<ide>
<ide> /**
<ide> * Represents a client-side reactive HTTP response.
<ide> */
<ide> public interface ClientHttpResponse extends ReactiveHttpInputMessage {
<ide>
<add> /**
<add> * Return an id that represents the underlying connection, if available,
<add> * or the request for the purpose of correlating log messages.
<add> * @since 5.3.5
<add> */
<add> default String getId() {
<add> return ObjectUtils.getIdentityHexString(this);
<add> }
<add>
<ide> /**
<ide> * Return the HTTP status code as an {@link HttpStatus} enum value.
<ide> * @return the HTTP status as an HttpStatus enum value (never {@code null})
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ClientHttpResponseDecorator.java
<ide> /*
<del> * Copyright 2002-2018 the original author or authors.
<add> * Copyright 2002-2021 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public ClientHttpResponse getDelegate() {
<ide>
<ide> // ClientHttpResponse delegation methods...
<ide>
<add> @Override
<add> public String getId() {
<add> return this.delegate.getId();
<add> }
<add>
<ide> @Override
<ide> public HttpStatus getStatusCode() {
<ide> return this.delegate.getStatusCode();
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ReactorClientHttpResponse.java
<ide> /*
<del> * Copyright 2002-2020 the original author or authors.
<add> * Copyright 2002-2021 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import org.springframework.http.HttpStatus;
<ide> import org.springframework.http.ResponseCookie;
<ide> import org.springframework.lang.Nullable;
<add>import org.springframework.util.ClassUtils;
<ide> import org.springframework.util.CollectionUtils;
<ide> import org.springframework.util.LinkedMultiValueMap;
<ide> import org.springframework.util.MultiValueMap;
<add>import org.springframework.util.ObjectUtils;
<ide>
<ide> /**
<ide> * {@link ClientHttpResponse} implementation for the Reactor-Netty HTTP client.
<ide> */
<ide> class ReactorClientHttpResponse implements ClientHttpResponse {
<ide>
<add> /** Reactor Netty 1.0.5+. */
<add> static final boolean reactorNettyRequestChannelOperationsIdPresent = ClassUtils.isPresent(
<add> "reactor.netty.ChannelOperationsId", ReactorClientHttpResponse.class.getClassLoader());
<add>
<add>
<ide> private static final Log logger = LogFactory.getLog(ReactorClientHttpResponse.class);
<ide>
<ide> private final HttpClientResponse response;
<ide> class ReactorClientHttpResponse implements ClientHttpResponse {
<ide> // 0 - not subscribed, 1 - subscribed, 2 - cancelled via connector (before subscribe)
<ide> private final AtomicInteger state = new AtomicInteger();
<ide>
<del> private final String logPrefix;
<del>
<ide>
<ide> /**
<ide> * Constructor that matches the inputs from
<ide> public ReactorClientHttpResponse(HttpClientResponse response, Connection connect
<ide> this.headers = HttpHeaders.readOnlyHttpHeaders(adapter);
<ide> this.inbound = connection.inbound();
<ide> this.bufferFactory = new NettyDataBufferFactory(connection.outbound().alloc());
<del> this.logPrefix = (logger.isDebugEnabled() ? "[" + connection.channel().id().asShortText() + "] " : "");
<ide> }
<ide>
<ide> /**
<ide> public ReactorClientHttpResponse(HttpClientResponse response, NettyInbound inbou
<ide> this.headers = HttpHeaders.readOnlyHttpHeaders(adapter);
<ide> this.inbound = inbound;
<ide> this.bufferFactory = new NettyDataBufferFactory(alloc);
<del> this.logPrefix = "";
<ide> }
<ide>
<ide>
<add> @Override
<add> public String getId() {
<add> String id = null;
<add> if (reactorNettyRequestChannelOperationsIdPresent) {
<add> id = ChannelOperationsIdHelper.getId(this.response);
<add> }
<add> if (id == null && this.response instanceof Connection) {
<add> id = ((Connection) this.response).channel().id().asShortText();
<add> }
<add> return (id != null ? id : ObjectUtils.getIdentityHexString(this));
<add> }
<add>
<ide> @Override
<ide> public Flux<DataBuffer> getBody() {
<ide> return this.inbound.receive()
<ide> private static String getSameSite(Cookie cookie) {
<ide> void releaseAfterCancel(HttpMethod method) {
<ide> if (mayHaveBody(method) && this.state.compareAndSet(0, 2)) {
<ide> if (logger.isDebugEnabled()) {
<del> logger.debug(this.logPrefix + "Releasing body, not yet subscribed.");
<add> logger.debug("[" + getId() + "]" + "Releasing body, not yet subscribed.");
<ide> }
<ide> this.inbound.receive().doOnNext(byteBuf -> {}).subscribe(byteBuf -> {}, ex -> {});
<ide> }
<ide> public String toString() {
<ide> "status=" + getRawStatusCode() + '}';
<ide> }
<ide>
<add>
<add> private static class ChannelOperationsIdHelper {
<add>
<add> @Nullable
<add> public static String getId(HttpClientResponse response) {
<add> if (response instanceof reactor.netty.ChannelOperationsId) {
<add> return (logger.isDebugEnabled() ?
<add> ((reactor.netty.ChannelOperationsId) response).asLongText() :
<add> ((reactor.netty.ChannelOperationsId) response).asShortText());
<add> }
<add> return null;
<add> }
<add> }
<add>
<ide> }
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/client/ExchangeFunctions.java
<ide> /*
<del> * Copyright 2002-2019 the original author or authors.
<add> * Copyright 2002-2021 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public Mono<ClientResponse> exchange(ClientRequest clientRequest) {
<ide> Assert.notNull(clientRequest, "ClientRequest must not be null");
<ide> HttpMethod httpMethod = clientRequest.method();
<ide> URI url = clientRequest.url();
<del> String logPrefix = clientRequest.logPrefix();
<ide>
<ide> return this.connector
<ide> .connect(httpMethod, url, httpRequest -> clientRequest.writeTo(httpRequest, this.strategies))
<ide> .doOnRequest(n -> logRequest(clientRequest))
<del> .doOnCancel(() -> logger.debug(logPrefix + "Cancel signal (to close connection)"))
<add> .doOnCancel(() -> logger.debug(clientRequest.logPrefix() + "Cancel signal (to close connection)"))
<ide> .onErrorResume(WebClientUtils.WRAP_EXCEPTION_PREDICATE, t -> wrapException(t, clientRequest))
<ide> .map(httpResponse -> {
<add> String logPrefix = getLogPrefix(clientRequest, httpResponse);
<ide> logResponse(httpResponse, logPrefix);
<ide> return new DefaultClientResponse(
<ide> httpResponse, this.strategies, logPrefix, httpMethod.name() + " " + url,
<ide> private void logRequest(ClientRequest request) {
<ide> );
<ide> }
<ide>
<add> private String getLogPrefix(ClientRequest request, ClientHttpResponse response) {
<add> return request.logPrefix() + "[" + response.getId() + "] ";
<add> }
<add>
<ide> private void logResponse(ClientHttpResponse response, String logPrefix) {
<ide> LogFormatUtils.traceDebug(logger, traceOn -> {
<ide> int code = response.getRawStatusCode(); | 4 |
Python | Python | add examples to numpy.char.replace | 9b94c6fed78d8aaf358f7552c17754037b3bf320 | <ide><path>numpy/core/defchararray.py
<ide> def replace(a, old, new, count=None):
<ide> See Also
<ide> --------
<ide> str.replace
<add>
<add> Examples
<add> --------
<add> >>> a = np.array(["That is a mango", "Monkeys eat mangos"])
<add> >>> np.char.replace(a, 'mango', 'banana')
<add> array(['That is a banana', 'Monkeys eat bananas'], dtype='<U19')
<ide>
<add> >>> a = np.array(["The dish is fresh", "This is it"])
<add> >>> np.char.replace(a, 'is', 'was')
<add> array(['The dwash was fresh', 'Thwas was it'], dtype='<U19')
<ide> """
<ide> return _to_string_or_unicode_array(
<ide> _vec_string( | 1 |
Java | Java | reduce logcat spew when checking packager status | 05c79059aefdd60e813223319abd9da564137253 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/devsupport/DevServerHelper.java
<ide> public void isPackagerRunning(final PackagerStatusCallback callback) {
<ide> new Callback() {
<ide> @Override
<ide> public void onFailure(Request request, IOException e) {
<del> FLog.e(ReactConstants.TAG, "IOException requesting status from packager", e);
<add> FLog.w(
<add> ReactConstants.TAG,
<add> "The packager does not seem to be running as we got an IOException requesting " +
<add> "its status: " + e.getMessage());
<ide> callback.onPackagerStatusFetched(false);
<ide> }
<ide> | 1 |
Javascript | Javascript | document the `multiple` attribute | 865f6065e745c1143c3de8aa97d90147e549845e | <ide><path>src/ng/directive/select.js
<ide> var SelectController =
<ide> *
<ide> * @param {string} ngModel Assignable angular expression to data-bind to.
<ide> * @param {string=} name Property name of the form under which the control is published.
<add> * @param {string=} multiple Allows multiple options to be selected. The selected values will be
<add> * bound to the model as an array.
<ide> * @param {string=} required Sets `required` validation error key if the value is not entered.
<ide> * @param {string=} ngRequired Adds required attribute and required validation constraint to
<ide> * the element when the ngRequired expression evaluates to true. Use ngRequired instead of required | 1 |
PHP | PHP | replace custom implementation | 394f0fecfa84764f66c49c3b025dd7eb26aa61cd | <ide><path>src/Illuminate/Console/ConfirmableTrait.php
<ide> public function confirmToProceed($warning = 'Application In Production!', $callb
<ide> return true;
<ide> }
<ide>
<del> $this->comment(str_repeat('*', strlen($warning) + 12));
<del> $this->comment('* '.$warning.' *');
<del> $this->comment(str_repeat('*', strlen($warning) + 12));
<add> $this->alert($warning);
<ide> $this->output->writeln('');
<ide>
<ide> $confirmed = $this->confirm('Do you really wish to run this command?'); | 1 |
Text | Text | update pr-url for dep0016 eol | cd0f5a239e5c0d1f6fd374741173d62d2df64340 | <ide><path>doc/api/deprecations.md
<ide> The [`fs.readSync()`][] legacy `String` interface is deprecated. Use the
<ide> <!-- YAML
<ide> changes:
<ide> - version: REPLACEME
<del> pr-url: REPLACEME
<add> pr-url: https://github.com/nodejs/node/pull/31167
<ide> description: End-of-Life
<ide> - version: v6.12.0
<ide> pr-url: https://github.com/nodejs/node/pull/10116 | 1 |
Python | Python | pass request to schema generation | b50d8950eeeac03843b9da1cb96ba52b5b98c8bc | <ide><path>rest_framework/schemas.py
<ide> def __init__(self, title=None, url=None, patterns=None, urlconf=None):
<ide> urls = import_module(urlconf)
<ide> else:
<ide> urls = urlconf
<del> patterns = urls.urlpatterns
<add> self.patterns = urls.urlpatterns
<ide> elif patterns is None and urlconf is None:
<ide> urls = import_module(settings.ROOT_URLCONF)
<del> patterns = urls.urlpatterns
<add> self.patterns = urls.urlpatterns
<add> else:
<add> self.patterns = patterns
<ide>
<ide> if url and not url.endswith('/'):
<ide> url += '/'
<ide>
<ide> self.title = title
<ide> self.url = url
<del> self.endpoints = self.get_api_endpoints(patterns)
<add> self.endpoints = None
<ide>
<ide> def get_schema(self, request=None):
<del> if request is None:
<del> endpoints = self.endpoints
<del> else:
<del> # Filter the list of endpoints to only include those that
<del> # the user has permission on.
<del> endpoints = []
<del> for key, link, callback in self.endpoints:
<del> method = link.action.upper()
<del> view = callback.cls()
<add> if self.endpoints is None:
<add> self.endpoints = self.get_api_endpoints(self.patterns)
<add>
<add> links = []
<add> for key, path, method, callback in self.endpoints:
<add> view = callback.cls()
<add> for attr, val in getattr(callback, 'initkwargs', {}).items():
<add> setattr(view, attr, val)
<add> view.args = ()
<add> view.kwargs = {}
<add> view.format_kwarg = None
<add>
<add> if request is not None:
<ide> view.request = clone_request(request, method)
<del> view.format_kwarg = None
<ide> try:
<ide> view.check_permissions(view.request)
<ide> except exceptions.APIException:
<del> pass
<del> else:
<del> endpoints.append((key, link, callback))
<add> continue
<add> else:
<add> view.request = None
<add>
<add> link = self.get_link(path, method, callback, view)
<add> links.append((key, link))
<ide>
<del> if not endpoints:
<add> if not link:
<ide> return None
<ide>
<ide> # Generate the schema content structure, from the endpoints.
<ide> # ('users', 'list'), Link -> {'users': {'list': Link()}}
<ide> content = {}
<del> for key, link, callback in endpoints:
<add> for key, link in links:
<ide> insert_into(content, key, link)
<ide>
<ide> # Return the schema document.
<ide> def get_api_endpoints(self, patterns, prefix=''):
<ide> if self.should_include_endpoint(path, callback):
<ide> for method in self.get_allowed_methods(callback):
<ide> key = self.get_key(path, method, callback)
<del> link = self.get_link(path, method, callback)
<del> endpoint = (key, link, callback)
<add> endpoint = (key, path, method, callback)
<ide> api_endpoints.append(endpoint)
<ide>
<ide> elif isinstance(pattern, RegexURLResolver):
<ide> def get_key(self, path, method, callback):
<ide>
<ide> # Methods for generating each individual `Link` instance...
<ide>
<del> def get_link(self, path, method, callback):
<add> def get_link(self, path, method, callback, view):
<ide> """
<ide> Return a `coreapi.Link` instance for the given endpoint.
<ide> """
<del> view = callback.cls()
<del> for attr, val in getattr(callback, 'initkwargs', {}).items():
<del> setattr(view, attr, val)
<del>
<ide> fields = self.get_path_fields(path, method, callback, view)
<ide> fields += self.get_serializer_fields(path, method, callback, view)
<ide> fields += self.get_pagination_fields(path, method, callback, view)
<ide> def get_serializer_fields(self, path, method, callback, view):
<ide> if method not in ('PUT', 'PATCH', 'POST'):
<ide> return []
<ide>
<del> if not hasattr(view, 'get_serializer_class'):
<add> if not hasattr(view, 'get_serializer'):
<ide> return []
<ide>
<del> fields = []
<del>
<del> serializer_class = view.get_serializer_class()
<del> serializer = serializer_class()
<add> serializer = view.get_serializer()
<ide>
<ide> if isinstance(serializer, serializers.ListSerializer):
<del> return coreapi.Field(name='data', location='body', required=True)
<add> return [coreapi.Field(name='data', location='body', required=True)]
<ide>
<ide> if not isinstance(serializer, serializers.Serializer):
<ide> return []
<ide>
<add> fields = []
<ide> for field in serializer.fields.values():
<ide> if field.read_only:
<ide> continue
<ide><path>tests/test_schemas.py
<ide> class ExampleViewSet(ModelViewSet):
<ide> def custom_action(self, request, pk):
<ide> return super(ExampleSerializer, self).retrieve(self, request)
<ide>
<add> def get_serializer(self, *args, **kwargs):
<add> assert self.request
<add> return super(ExampleViewSet, self).get_serializer(*args, **kwargs)
<add>
<ide>
<ide> class ExampleView(APIView):
<ide> permission_classes = [permissions.IsAuthenticatedOrReadOnly] | 2 |
Javascript | Javascript | use same example code for async effect warning | f0621fe232f31cb0fcd63992c3440ec1b4ce5813 | <ide><path>packages/eslint-plugin-react-hooks/__tests__/ESLintRuleExhaustiveDeps-test.js
<ide> const tests = {
<ide> errors: [
<ide> `Effect callbacks are synchronous to prevent race conditions. ` +
<ide> `Put the async function inside:\n\n` +
<del> `useEffect(() => {\n` +
<del> ` let ignore = false;\n` +
<del> ` fetchSomething();\n` +
<del> `\n` +
<del> ` async function fetchSomething() {\n` +
<del> ` const result = await ...\n` +
<del> ` if (!ignore) setState(result);\n` +
<del> ` }\n` +
<del> `\n` +
<del> ` return () => { ignore = true; };\n` +
<del> `}, ...);\n` +
<del> `\n` +
<del> `This lets you handle multiple requests without bugs.`,
<add> 'useEffect(() => {\n' +
<add> ' async function fetchData() {\n' +
<add> ' // You can await here\n' +
<add> ' const response = await MyAPI.getData(someId);\n' +
<add> ' // ...\n' +
<add> ' }\n' +
<add> ' fetchData();\n' +
<add> `}, [someId]); // Or [] if effect doesn't need props or state\n\n` +
<add> 'Learn more about data fetching with Hooks: https://fb.me/react-hooks-data-fetching',
<ide> ],
<ide> },
<ide> {
<ide><path>packages/eslint-plugin-react-hooks/src/ExhaustiveDeps.js
<ide> export default {
<ide> message:
<ide> `Effect callbacks are synchronous to prevent race conditions. ` +
<ide> `Put the async function inside:\n\n` +
<del> `useEffect(() => {\n` +
<del> ` let ignore = false;\n` +
<del> ` fetchSomething();\n` +
<del> `\n` +
<del> ` async function fetchSomething() {\n` +
<del> ` const result = await ...\n` +
<del> ` if (!ignore) setState(result);\n` +
<del> ` }\n` +
<del> `\n` +
<del> ` return () => { ignore = true; };\n` +
<del> `}, ...);\n` +
<del> `\n` +
<del> `This lets you handle multiple requests without bugs.`,
<add> 'useEffect(() => {\n' +
<add> ' async function fetchData() {\n' +
<add> ' // You can await here\n' +
<add> ' const response = await MyAPI.getData(someId);\n' +
<add> ' // ...\n' +
<add> ' }\n' +
<add> ' fetchData();\n' +
<add> `}, [someId]); // Or [] if effect doesn't need props or state\n\n` +
<add> 'Learn more about data fetching with Hooks: https://fb.me/react-hooks-data-fetching',
<ide> });
<ide> }
<ide>
<ide><path>packages/react-reconciler/src/ReactFiberCommitWork.js
<ide> function commitHookEffectList(
<ide> ' // ...\n' +
<ide> ' }\n' +
<ide> ' fetchData();\n' +
<del> '}, [someId]);\n\n' +
<add> `}, [someId]); // Or [] if effect doesn't need props or state\n\n` +
<ide> 'Learn more about data fetching with Hooks: https://fb.me/react-hooks-data-fetching';
<ide> } else {
<ide> addendum = ' You returned: ' + destroy; | 3 |
PHP | PHP | apply fixes from styleci | c53166e55270dca0d19db280f970d9ba86089ee3 | <ide><path>tests/Queue/RedisQueueIntegrationTest.php
<ide> <?php
<ide>
<ide> use Mockery as m;
<add>use Carbon\Carbon;
<ide> use Illuminate\Redis\Database;
<ide> use Illuminate\Queue\RedisQueue;
<ide> use Illuminate\Container\Container; | 1 |
Python | Python | fix bug in `learning_rate` config | 177e2d71760a1207ddc7bb2dc737a3720bc63036 | <ide><path>official/vision/beta/projects/panoptic_maskrcnn/configs/panoptic_deeplab.py
<ide> def panoptic_deeplab_coco() -> cfg.ExperimentConfig:
<ide>
<ide> config = cfg.ExperimentConfig(
<ide> runtime=cfg.RuntimeConfig(
<del> mixed_precision_dtype='float32', enable_xla=True),
<add> mixed_precision_dtype='bfloat16', enable_xla=True),
<ide> task=PanopticDeeplabTask(
<ide> init_checkpoint='gs://cloud-tpu-checkpoints/vision-2.0/deeplab/deeplab_resnet101_imagenet/ckpt-62400', # pylint: disable=line-too-long
<ide> init_checkpoint_modules=['backbone'],
<ide> def panoptic_deeplab_coco() -> cfg.ExperimentConfig:
<ide> 'type': 'adam',
<ide> },
<ide> 'learning_rate': {
<add> 'type': 'polynomial',
<ide> 'polynomial': {
<ide> 'initial_learning_rate': 0.0005,
<ide> 'decay_steps': train_steps, | 1 |
Javascript | Javascript | avoid side-effects when calling jquery.hasdata | 332a490573bbbd9e7df1381bde8f590240cb8679 | <ide><path>src/data.js
<ide> function Data() {
<ide> Data.uid = 1;
<ide>
<ide> Data.prototype = {
<del> key: function( owner ) {
<add> key: function( owner, options ) {
<ide> var descriptor = {},
<ide> // Check if the owner object already has a cache key
<ide> unlock = owner[ this.expando ];
<ide>
<add> // `readonly` calls from hasData, on owners with no key
<add> // should not create new/empty cache records
<add> if ( !unlock && (options && options.readonly) ) {
<add> return null;
<add> }
<add>
<ide> // If not, create one
<ide> if ( !unlock ) {
<ide> unlock = Data.uid++;
<ide> Data.prototype = {
<ide> },
<ide> hasData: function( owner ) {
<ide> return !jQuery.isEmptyObject(
<del> this.cache[ this.key( owner ) ]
<add> this.cache[ this.key( owner, { readonly: true }) ] || {}
<ide> );
<ide> },
<ide> discard: function( owner ) {
<ide><path>test/unit/data.js
<ide> test( "jQuery._data & _removeData, expected returns", function() {
<ide> );
<ide> });
<ide>
<add>test( "jQuery.hasData no side effects", function() {
<add> expect(1);
<add> var obj = {};
<add>
<add> jQuery.hasData( obj );
<add>
<add> equal( Object.getOwnPropertyNames( obj ).length, 0,
<add> "No data expandos where added when calling jQuery.hasData(o)"
<add> );
<add>});
<add>
<ide> function dataTests (elem) {
<ide> var oldCacheLength, dataObj, internalDataObj, expected, actual;
<ide> | 2 |
Ruby | Ruby | add link to pypi downloads page to problem message | 70dfaf3b42f263a83edf35934f6b360a6bf927cd | <ide><path>Library/Homebrew/rubocops/urls.rb
<ide> def audit_formula(_node, _class_node, _parent_class_node, body_node)
<ide>
<ide> # Check pypi urls
<ide> pypi_pattern = %r{^https?://pypi.python.org/}
<del> audit_urls(urls, pypi_pattern) do
<del> problem "use the `files.pythonhosted.org` url found on the pypi downloads page"
<add> audit_urls(urls, pypi_pattern) do |_, url|
<add> problem "use the `Source` url found on PyPI downloads page (`#{get_pypi_url(url)}`)"
<ide> end
<ide>
<ide> # Require long files.pythonhosted.org urls
<ide> pythonhosted_pattern = %r{^https?://files.pythonhosted.org/packages/source/}
<del> audit_urls(urls, pythonhosted_pattern) do
<del> problem "use the url found on the pypi downloads page"
<add> audit_urls(urls, pythonhosted_pattern) do |_, url|
<add> problem "use the `Source` url found on PyPI downloads page (`#{get_pypi_url(url)}`)"
<ide> end
<ide> end
<add>
<add> def get_pypi_url(url)
<add> package_file = File.basename(url)
<add> package_name = package_file.match(/^(.+)-[a-z0-9.]+$/)[1]
<add> "https://pypi.org/project/#{package_name}/#files"
<add> end
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/test/rubocops/urls_spec.rb
<ide> class Foo < Formula
<ide> class Foo < Formula
<ide> desc "foo"
<ide> url "https://pypi.python.org/packages/source/foo/foo-0.1.tar.gz"
<del> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use the `files.pythonhosted.org` url found on the pypi downloads page
<add> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use the `Source` url found on PyPI downloads page (`https://pypi.org/project/foo/#files`)
<ide> end
<ide> RUBY
<ide> end
<ide> class Foo < Formula
<ide> class Foo < Formula
<ide> desc "foo"
<ide> url "https://files.pythonhosted.org/packages/source/f/foo/foo-0.1.tar.gz"
<del> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use the url found on the pypi downloads page
<add> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use the `Source` url found on PyPI downloads page (`https://pypi.org/project/foo/#files`)
<ide> end
<ide> RUBY
<ide> end | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.