content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Text | Text | describe apm linking when creating a theme | dbef7a26ce1d3b4252897e339e5f4f6bee78b488 | <ide><path>docs/creating-a-theme.md
<ide> a few things before starting:
<ide> that have been already been defined.
<ide> 1. When you're ready update the `README.md` and include an example screenshot
<ide> of your new theme in action.
<del>1. Open a terminal, find your new theme's directory, initialize the git
<del> repository and push it to repository on GitHub.
<del>1. Once you're ready for others to use your theme, run `apm publish` from within
<del> that directory to make it available.
<add>1. Open a terminal to your new theme directory
<add> 1. Run `apm link` to install it locally.
<add> 1. Reload Atom (`cmd-r`) and your theme should now be applied.
<add> 1. To publish, initialize a git repository, push to GitHub, and run
<add> `apm publish`.
<ide>
<ide> ## Interface Themes
<ide> | 1 |
Javascript | Javascript | add hoc component filter | a173f4eb85a81b86f1457d901be6fc198e36b728 | <ide><path>src/backend/renderer.js
<ide> import { gte } from 'semver';
<ide> import {
<ide> ComponentFilterDisplayName,
<ide> ComponentFilterElementType,
<add> ComponentFilterHOC,
<ide> ComponentFilterLocation,
<ide> ElementTypeClass,
<ide> ElementTypeContext,
<ide> export function attach(
<ide> hideElementsWithPaths.add(new RegExp(componentFilter.value, 'i'));
<ide> }
<ide> break;
<add> case ComponentFilterHOC:
<add> hideElementsWithDisplayNames.add(new RegExp('\\('));
<add> break;
<ide> default:
<ide> console.warn(
<ide> `Invalid component filter type "${componentFilter.type}"`
<ide><path>src/devtools/views/Components/ComponentFiltersModal.js
<ide> import ButtonIcon from '../ButtonIcon';
<ide> import Toggle from '../Toggle';
<ide> import Store from 'src/devtools/store';
<ide> import {
<del> ComponentFilterElementType,
<ide> ComponentFilterDisplayName,
<add> ComponentFilterElementType,
<add> ComponentFilterHOC,
<ide> ComponentFilterLocation,
<ide> ElementTypeClass,
<ide> ElementTypeContext,
<ide> import {
<ide> import styles from './ComponentFiltersModal.css';
<ide>
<ide> import type {
<add> BooleanComponentFilter,
<ide> ComponentFilter,
<ide> ComponentFilterType,
<ide> ElementType,
<ide> function ComponentFiltersModal({ store, setIsModalShowing }: Props) {
<ide> <option value={ComponentFilterLocation}>location</option>
<ide> <option value={ComponentFilterDisplayName}>name</option>
<ide> <option value={ComponentFilterElementType}>type</option>
<add> <option value={ComponentFilterHOC}>hoc</option>
<ide> </select>
<ide> </td>
<ide> <td className={styles.TableCell}>
<del> {componentFilter.type === ComponentFilterElementType
<del> ? 'equals'
<del> : 'matches'}
<add> {componentFilter.type === ComponentFilterElementType &&
<add> 'equals'}
<add> {(componentFilter.type === ComponentFilterLocation ||
<add> componentFilter.type === ComponentFilterDisplayName) &&
<add> 'matches'}
<ide> </td>
<ide> <td className={styles.TableCell}>
<del> {componentFilter.type === ComponentFilterElementType ? (
<add> {componentFilter.type === ComponentFilterElementType && (
<ide> <select
<ide> className={styles.Select}
<ide> value={componentFilter.value}
<ide> function ComponentFiltersModal({ store, setIsModalShowing }: Props) {
<ide> <option value={ElementTypeProfiler}>profiler</option>
<ide> <option value={ElementTypeSuspense}>suspense</option>
<ide> </select>
<del> ) : (
<add> )}
<add> {(componentFilter.type === ComponentFilterLocation ||
<add> componentFilter.type === ComponentFilterDisplayName) && (
<ide> <input
<ide> className={styles.Input}
<ide> type="text"
<ide> function useComponentFilters() {
<ide> isValid: true,
<ide> value: '',
<ide> };
<add> } else if (type === ComponentFilterHOC) {
<add> cloned[index] = {
<add> type: ComponentFilterHOC,
<add> isEnabled: componentFilter.isEnabled,
<add> isValid: true,
<add> };
<ide> }
<ide> }
<ide> return cloned;
<ide> function useComponentFilters() {
<ide> ...((cloned[index]: any): RegExpComponentFilter),
<ide> isEnabled,
<ide> };
<add> } else if (componentFilter.type === ComponentFilterHOC) {
<add> cloned[index] = {
<add> ...((cloned[index]: any): BooleanComponentFilter),
<add> isEnabled,
<add> };
<ide> }
<ide> }
<ide> return cloned;
<ide><path>src/types.js
<ide> export type ElementType = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12;
<ide> export const ComponentFilterElementType = 1;
<ide> export const ComponentFilterDisplayName = 2;
<ide> export const ComponentFilterLocation = 3;
<add>export const ComponentFilterHOC = 4;
<ide>
<del>export type ComponentFilterType = 1 | 2 | 3;
<add>export type ComponentFilterType = 1 | 2 | 3 | 4;
<ide>
<ide> // Hide all elements of types in this Set.
<ide> // We hide host components only by default.
<ide> export type RegExpComponentFilter = {|
<ide> value: string,
<ide> |};
<ide>
<add>export type BooleanComponentFilter = {|
<add> isEnabled: boolean,
<add> isValid: boolean,
<add> type: 4,
<add>|};
<add>
<ide> export type ComponentFilter =
<add> | BooleanComponentFilter
<ide> | ElementTypeComponentFilter
<ide> | RegExpComponentFilter; | 3 |
Ruby | Ruby | add wrapper for github api method | 6d948bf6ab26b0f1d074e5cedb1c401b1af9f483 | <ide><path>Library/Homebrew/utils/github.rb
<ide> module GitHub
<ide>
<ide> module_function
<ide>
<add> def open_api(url, data: nil, data_binary_path: nil, request_method: nil, scopes: [].freeze, parse_json: true)
<add> odeprecated "GitHub.open_api", "GitHub::API.open_api"
<add> API.open_api(url, data: data, data_binary_path: data_binary_path, request_method: request_method,
<add> scopes: scopes, parse_json: parse_json)
<add> end
<add>
<ide> def check_runs(repo: nil, commit: nil, pr: nil)
<ide> if pr
<ide> repo = pr.fetch("base").fetch("repo").fetch("full_name") | 1 |
Text | Text | add immutable data faq | 00e9aa014fa4c1ebfd0ae7a1e85b4381f0eaa797 | <ide><path>docs/faq/ImmutableData.md
<add># Redux FAQ: Immutable Data
<add>
<add>## Table of Contents
<add>- [What are the benefits of Immutability?](#benefits-of-immutability)
<add>- [Why is immutability required in Redux?](#why-is-immutability-required)
<add>- [Do I have to use Immutable.JS?](#do-i-have-to-use-immutable-js)
<add>- [What are the issues with using ES6 for immutable operations?](#issues-with-es6-for-immutable-ops)
<add>- [Why should I use an immutable-focused library such as Immutable.JS?](#why-use-immutable-library)
<add>- [Why should I choose Immutable.JS as an immutable library?](#why-choose-immutable-js)
<add>- [What are the issues with using Immutable.JS?](#issues-with-immutable-js)
<add>- [Is Immutable.JS worth the effort?](#is-immutable-js-worth-effort)
<add>- [What are the Recommended Best Practices for using Immutable.JS with Redux?](#immutable-js-best-practices)
<add>
<add><a id="benefits-of-immutability"></a>
<add>## What are the benefits of immutability?
<add>
<add>Immutability can bring increased performance to your app, and leads to simpler programming and debugging, as data that never changes is easier to reason about than data that is free to be changed arbitrarily throughout your app.
<add>
<add>In particular, immutability in the context of a Web app enables sophisticated change detection techniques to be implemented simply and cheaply, ensuring the computationally expensive process of updating the DOM occurs only when it absolutely has to (a cornerstone of React’s performance improvements over other libraries).
<add><a id="why-is-immutability-required"></a>
<add>## Why is immutability required in Redux?
<add>
<add>In the context of Redux, if your reducers mutate their arguments and directly modify the state tree, you will cause React Redux’s *connect* function to break, as it will be unable to detect changes made to objects and arrays. You MUST therefore use immutable operations in your reducers when using Redux.
<add><a id="do-i-have-to-use-immutable-js"></a>
<add>## Do I have to use Immutable.JS?
<add>
<add>You do not need to use Immutable.JS with Redux. ES6, if written correctly, is perfectly capable of providing immutability without having to use an immutable-focused library. However, guaranteeing immutability with ES6 is difficult, and it can be easy to mutate an object accidentally, causing bugs in your app that are extremely difficult to locate. For this reason, using a library such as Immutable.JS can significantly improve the reliability of your app, and make your app’s development much easier.
<add><a id="issues-with-es6-for-immutable-ops"></a>
<add>## What are the issues with using ES6 for immutable operations?
<add>### Accidental Object Mutation
<add>With ES6, you can accidentally mutate an object (such as the Redux state tree) quite easily without realising it. For example, updating deeply nested properties, creating a new *reference* to an object instead of a new object, or performing a shallow copy rather than a deep copy, can all lead to inadvertent object mutations, and can trip up even the most experienced JavaScript coder.
<add>
<add>To avoid these issues, ensure you follow the recommended [Immutable Update Patterns for ES6] (http://redux.js.org/docs/recipes/reducers/ImmutableUpdatePatterns.html).
<add>
<add>### Verbose Code
<add>Updating complex nested state trees can lead to verbose code that is tedious to write and difficult to debug.
<add>
<add>### Poor Performance
<add>Operating on ES6 objects and arrays in an immutable way can be slow, particularly as your state tree grows larger.
<add><a id="why-use-immutable-library"></a>
<add>## Why should I use an immutable-focused library such as Immutable.JS?
<add>
<add>Immutable-focused libraries such as Immutable.js have been designed to overcome the issues with immutability inherent within ES6, providing all the benefits of immutability with the performance your app requires. Whether you choose to use such a library, or stick with ES6, depends on how comfortable you are with adding another dependency to your app, or how sure you are that you can avoid the pitfalls inherent within ES6’s approach to immutability.
<add>
<add>Whichever option you choose, make sure you’re familiar with the concepts of [immutability, side effects and mutation] (http://redux.js.org/docs/recipes/reducers/PrerequisiteConcepts.html#note-on-immutability-side-effects-and-mutation). In particular, ensure you have a deep understanding of what ES6 does when updating and copying values in order to guard against accidental mutations that will degrade you app’s performance, or break it altogether.
<add><a id="why-choose-immutable-js"></a>
<add>## Why should I choose Immutable.JS as an immutable library?
<add>
<add>Immutable.JS was designed to provide immutability in a performant manner. Its three key advantages are:
<add>
<add>### Guaranteed immutability
<add>
<add>Data encapsulated in an Immutable.JS object is never mutated. A new copy is always returned. This contrasts with ES6, in which some operations do not mutate your data (e.g. some Array methods, inlcuding map, filter, forEach, etc.), but some do (Array’s pop, push, concat, splice, etc.).
<add>
<add>### Rich API
<add>
<add>Immutable.JS provides a rich set of immutable objects to encapsulate your data (e.g. Maps, Lists, Sets, Records, etc.), and an extensive set of methods to manipulate it, including methods to sort, filter, and group the data, reverse it, flatten it, and create subsets.
<add>
<add>### Performance
<add>
<add>Immutable.JS does a lot work behind the scenes to optimize performance. This is the key to its power, as using immutable data structures can involve a lot of expensive copying. In particular, immutably manipulating large, complex data sets, such as a nested Redux state tree, can generate many intermediate copies of objects, which consume memory and slow down performance as the browser’s garbage collector fights to clean things up.
<add>
<add>Immutable.JS avoids this by cleverly sharing data structures under the surface, minimizing the need to copy data. It also enables complex chains of operations to be carried out without creating unnecessary (and costly) cloned intermediate data that will quickly be thrown away.
<add>
<add>You never see this, of course - the data you give to an Immutable.JS object is never mutated. Rather, it’s the *intermediate* data generated within Immutable.JS from a chained sequence of method calls that is free to be mutated. You therefore get all the benefits of immutable data structures with none (or very little) of the potential performance hits.
<add><a id="issues-with-immutable-js"></a>
<add>## What are the issues with using Immutable.JS?
<add>
<add>Although powerful, Immutable.JS needs to be used carefully, as it comes with issues of its own. Note, however, that all of these issues can be overcome quite easily with careful coding.
<add>
<add>### Difficult to interoperate with
<add>
<add>JavaScript does not provide immutable data structures. As such, for Immutable.JS to provide its immutable guarantees, your data must be encapsulated within an Immutable.JS object (such as a Map or a List, etc.). Once it’s contained in this way, it’s hard for that data to then interoperate with other, plain JavaScript objects.
<add>
<add>For example, you will no longer be able to reference an object’s properties through standard JavaScript dot or bracket notation. Instead, you must reference them via Immutable.JS’s get() or getIn() methods, which use an awkward syntax that accesses properties via an array of strings, each of which represents a property key.
<add>
<add>For example, instead of `myObj.prop1.prop2.prop3`, you would use `myImmutableMap.getIn([‘prop1’, ‘prop2’, ‘prop3’])`.
<add>
<add>This makes it awkward to interoperate not just with your own code, but also with other libraries, such as lodash or ramda, that expect plain JavaScript objects.
<add>
<add>Note that Immutable.JS objects do have a `toJS()` method, which returns the data as a plain JavaScript data structure, but this method is extremely slow, and using it extensively will negate the performance benefits that Immutable.JS provides
<add>
<add>### Once used, Immutable.JS will spread throughout your codebase
<add>
<add>Once you encapsulate your data with Immutable.JS, you have to use Immutable.JS’s `get()` or `getIn()` property accessors to access it. This has the effect of spreading Immutable.JS across your entire codebase, including potentially your components, where you may prefer not to have such external dependencies. Your entire codebase must know what is, and what is not, an Immutable.JS object. It also makes removing Immutable.JS from your app difficult in the future, should you ever need to.
<add>
<add>This issue can be avoided with careful coding techniques, as outlined in the best practices section below.
<add>
<add>### No ES6 Destructuring or Spread Operators
<add>
<add>Because you have to access your data via Immutable.JS’s own `get()` and `getIn()` methods, you can no longer use ES6’s destructuring and spread operators, making your code more verbose.
<add>
<add>### Not suitable for small values that change often
<add>
<add>Immutable.JS works best for collections of data, and the larger the better. It can be slow when your data comprises lots of small, simple JavaScript objects, with each comprising a few keys of primitive values. Note, however, that this does not apply to the Redux state tree, which is represented as a large collection of data.
<add>
<add>### Difficult to Debug
<add>
<add>Immutable.JS objects, such as Map, List, etc., can be difficult to debug, as inspecting such an object will reveal an entire nested hierarchy of Immutable.JS-specific properties that you don’t care about, while your actual data that you do care about is encapsulated several layers deep.
<add>
<add>Fortunately, this can be easily overcome with the use of a browser extension such as the [Immutable.js Object Formatter] (https://chrome.google.com/webstore/detail/immutablejs-object-format/hgldghadipiblonfkkicmgcbbijnpeog), which surfaces your data in Chrome Dev Tools, and hides Immutable.JS’s properties when inspecting your data.
<add>
<add>### Breaks object references, causing poor performance
<add>
<add>One of the key advantages of immutability is that it enables shallow equality checking, which dramatically improves performance. If two different variables reference the same immutable object, then a simple equality check of the two variables is enough to determine that they are equal, and that the object they both reference is unchanged. The equality check never has to check the values of any of the object’s properties, as it is, of course, immutable.
<add>
<add>Unfortunately, Immutable.JS’s `toJS()` method, which returns the data contained within an Immutable.js object as a pure JavaScript object, will create a new object every time it’s used, and so break the reference with the encapsulated data. Accordingly, calling `.toJS()` twice, for example, and assigning the result to two different variables will cause an equality check on those two variables to fail, even though the object values themselves haven’t changed.
<add>
<add>This, in turn, causes React’s `shouldComponentUpdate` method (and hence Redux’s `connect` function) to return true every time, causing an unchanged component to be re-rendered every time a change elsewhere in the app is detected, and so severely degrading performance.
<add>
<add>Again, though, this can be prevented quite easily with careful coding, as described in the Best Practices section below.
<add><a id="is-immutable-js-worth-effort"></a>
<add>## Is Using Immutable.JS worth the effort?
<add>
<add>Yes. Do not underestimate the difficulty of trying to track down a property of your state tree that has been inadvertently mutated. Components will both re-render when they shouldn’t, and refuse to render when they should, and tracking down the bug causing the rendering issue is hard, as the component rendering incorrectly is not necessarily the one whose properties are being accidentally mutated.
<add>
<add>This problem is caused predominantly by bugs in a Redux reducer, which requires immutability. If you mutate your state in one reducer, it can affect a completely different part of your app in seemingly arbitrary ways.
<add>
<add>With Immutable.JS, this problem simply does not exist, thereby removing a whole class of bugs from your app. This, together with its performance and rich API for data manipulation, is why Immutable.JS is worth the effort.
<add><a id="immutable-js-best-practices"></a>
<add>## What are the Recommended Best Practices for using Immutable.JS with Redux?
<add>
<add>Immutable.JS can provide significant reliability and performance improvements to your app, but it must be used correctly. Follow these best practices, and you’ll be able to get the most out of it, without tripping up on any of the issues it can potentially cause.
<add>
<add>### Never mix plain JavaScript objects with Immutable.JS
<add>
<add>Never let a plain JavaScript object contain Immutable.JS properties. Equally, never let an Immutable.JS object contain a plain JavaScript object.
<add>
<add>### Make your entire Redux state tree an Immutable.js object
<add>
<add>For a Redux app, your entire state tree should be an Immutable.JS object, with no plain JavaScript objects used at all.
<add>
<add>* Create the tree using Immutable.JS’s `fromJS()` function.
<add>
<add>* Use an Immutable.js-aware version of the `combineReducers` function, such as the one in [redux-immutable] (https://www.npmjs.com/package/redux-immutable), as Redux itself expects the state tree to be a plain JavaScript object.
<add>
<add>* When adding JavaScript objects to an Immutable.JS Map or List using Immutable.JS’s `update`, `merge` or `set` methods, ensure that the object being added is first converted to an Immutable object using `fromJS()`.
<add>
<add>E.g.
<add>
<add>```
<add>
<add>// avoid
<add>const newObj = { key: value};
<add>const newState = state.setIn(['prop1’], newObj); // <-- newObj has been added as a plain JavaScript object - NOT as an Immutable.JS Map
<add>
<add>// recommend
<add>const newObj = { key: value};
<add>const newState = state.setIn(['prop1’], Immutable.fromJS(newObj)); // <-- newObj is now an Immutable.JS Map
<add>
<add>```
<add>
<add>### Use Immutable.JS everywhere except your dumb components
<add>
<add>Using Immutable.JS everywhere keeps your code performant. Use it in your smart components, your selectors, your sagas or thunks, action creators, and especially your reducers.
<add>
<add>Do NOT, however, use Immutable.JS in your dumb components.
<add>
<add>### Limit your use of toJS()
<add>
<add>`toJS()` is an expensive function and negates the purpose of using Immutable.JS. Avoid its use.
<add>
<add>### Your selectors should return Immutable.JS objects
<add>
<add>Always.
<add>
<add>### Use Immutable.JS objects in your Smart Components
<add>
<add>Smart components that access the store via React Redux’s `connect` function must use the Immutable.JS values returned by your selectors.
<add>
<add>### Never use toJS() in mapStateToProps
<add>
<add>Converting an Immutable.JS object to a JavaScript object using `toJS()` will return a new object every time. If you use do this in `mapSateToProps`, you will cause the component to believe that the object has changed every time the state tree changes, and so trigger an unnecessary re-render.
<add>
<add>### Never use Immutable.JS in your Dumb Components
<add>
<add>Your dumb components should rely solely on JavaScript. Using Immutable.JS in your components adds an extra dependency and stops them from being portable.
<add>
<add>### Use a Higher Order Component to convert your Smart Component’s Immutable.JS props to your Dumb Component’s JavaScript props
<add>
<add>Something needs to map the Immutable.JS props in your Smart Component to the pure JavaScript props used in your Dumb Component. That something is a Higher Order Component (HOC) that simply takes the Immutable.JS props from your Smart Component, and converts them using toJS() to JavaScript props, which are then passed to your Dumb Compnent.
<add>
<add>Here is an example of such a HOC:
<add>
<add>```
<add>import { React } from 'react';
<add>import { Iterable } from 'immutable';
<add>
<add>export const toJS = (WrappedComponent) =>
<add> (wrappedComponentProps) => {
<add> const KEY = 0;
<add> const **VALUE = 1;
<add>
<add> const propsJS = Object.entries(wrappedComponentProps).reduce((newProps, wrappedComponentProp) => {
<add> newProps[wrappedComponentProp[KEY]] = Iterable.isIterable(wrappedComponentProp[VALUE])
<add> ? wrappedComponentProp[VALUE].toJS()
<add> : wrappedComponentProp[VALUE];
<add>
<add> return newProps;
<add> }, {});
<add>
<add> return <WrappedComponent {...propsJS} />
<add> };
<add>
<add>```
<add>
<add>And this is how you would use it in your Smart Component:
<add>
<add>```
<add>import { connect } from 'react-redux';
<add>
<add>import { toJS } from `./to-js';
<add>
<add>import DumbComponent from './dumb.component';
<add>
<add>const mapStateToProps = (state) => {
<add> return {
<add> /**
<add> obj is an Immutable object in Smart Component, but it’s converted to a plain
<add> JavaScript object by toJS, and so passed to DumbComponent as a pure JavaScript
<add> object. Because it’s still an Immutable.JS object here in mapStateToProps, though,
<add> there is no issue with errant re-renderings.
<add> */
<add> obj: getImmutableObjectFromStateTree(state)
<add> }
<add>};
<add>
<add>export default connect(mapStateToProps)(toJS(DumbComponent));
<add>```
<add>By converting Immutable.JS objects in a HOC, we achieve Dumb Component portability by converting Immutable.JS objects to JavaScript, but without the performance hits of using `toJS()` in the Smart Component.
<add>
<add>### Use the Immutable Object Formatter Chrome Extension to Aid Debugging
<add>
<add>Install the [Immutable Object Formatter](https://chrome.google.com/webstore/detail/immutablejs-object-format/hgldghadipiblonfkkicmgcbbijnpeog) , and inspect your Immutable.JS data without seeing the noise of Immutable.JS's own object properties.
<add> | 1 |
PHP | PHP | name command | c1e751f5f4d9dcd4af879b76bff706cb8d9d9090 | <ide><path>src/Illuminate/Foundation/Console/AppNameCommand.php
<ide> public function fire()
<ide> {
<ide> $this->currentRoot = trim($this->getAppNamespace(), '\\');
<ide>
<add> $this->setBootstrapNamespaces();
<add>
<ide> $this->setAppDirectoryNamespace();
<ide>
<ide> $this->setConfigNamespaces();
<ide> public function fire()
<ide> protected function setAppDirectoryNamespace()
<ide> {
<ide> $files = Finder::create()
<del> ->in($this->laravel['path'])
<del> ->name('*.php');
<add> ->in($this->laravel['path'])
<add> ->name('*.php');
<ide>
<ide> foreach ($files as $file)
<ide> {
<ide> $this->replaceNamespace($file->getRealPath());
<ide> }
<del>
<del> $this->setServiceProviderNamespaceReferences();
<ide> }
<ide>
<ide> /**
<ide> protected function replaceNamespace($path)
<ide> }
<ide>
<ide> /**
<del> * Set the referenced namespaces in various service providers.
<add> * Set the bootstrap namespaces.
<ide> *
<ide> * @return void
<ide> */
<del> protected function setServiceProviderNamespaceReferences()
<del> {
<del> $this->setReferencedMiddlewareNamespaces();
<del>
<del> $this->setReferencedConsoleNamespaces();
<del>
<del> $this->setReferencedRouteNamespaces();
<del> }
<del>
<del> /**
<del> * Set the namespace on the referenced middleware.
<del> *
<del> * @return void
<del> */
<del> protected function setReferencedMiddlewareNamespaces()
<add> protected function setBootstrapNamespaces()
<ide> {
<ide> $this->replaceIn(
<del> $this->laravel['path'].'/Providers/AppServiceProvider.php',
<del> $this->currentRoot.'\\Http\\Middleware', $this->argument('name').'\\Http\\Middleware'
<add> $this->getBootstrapPath(), $this->currentRoot.'\\Http', $this->argument('name').'\\Http'
<ide> );
<del> }
<ide>
<del> /**
<del> * Set the namespace on the referenced commands in the Artisan service provider.
<del> *
<del> * @return void
<del> */
<del> protected function setReferencedConsoleNamespaces()
<del> {
<ide> $this->replaceIn(
<del> $this->laravel['path'].'/Providers/ArtisanServiceProvider.php',
<del> $this->currentRoot.'\\Console', $this->argument('name').'\\Console'
<del> );
<del> }
<del>
<del> /**
<del> * Set the namespace on the referenced commands in the Routes service provider.
<del> *
<del> * @return void
<del> */
<del> protected function setReferencedRouteNamespaces()
<del> {
<del> $this->replaceIn(
<del> $this->laravel['path'].'/Providers/RouteServiceProvider.php',
<del> $this->currentRoot.'\\Http', $this->argument('name').'\\Http'
<add> $this->getBootstrapPath(), $this->currentRoot.'\\Console', $this->argument('name').'\\Console'
<ide> );
<ide> }
<ide>
<ide> protected function getUserClassPath()
<ide> return $this->laravel['path'].'/Core/User.php';
<ide> }
<ide>
<add> /**
<add> * Get the path to the bootstrap/app.php file.
<add> *
<add> * @return string
<add> */
<add> protected function getBootstrapPath()
<add> {
<add> return $this->laravel['path.base'].'/bootstrap/app.php';
<add> }
<add>
<ide> /**
<ide> * Get the path to the Composer.json file.
<ide> * | 1 |
Javascript | Javascript | clarify policy of track by and select as | ea1897606f15442ee0d734dcdf4848130b6bbd4e | <ide><path>src/ng/directive/select.js
<ide> var ngOptionsMinErr = minErr('ngOptions');
<ide> * <div class="alert alert-info">
<ide> * **Note:** Using `select as` will bind the result of the `select as` expression to the model, but
<ide> * the value of the `<select>` and `<option>` html elements will be either the index (for array data sources)
<del> * or property name (for object data sources) of the value within the collection.
<add> * or property name (for object data sources) of the value within the collection.
<ide> * </div>
<ide> *
<del> * **Note:** Using `select as` together with `trackexpr` is not possible (and will throw an error).
<add> * **Note:** Using `select as` together with `trackexpr` is not recommended.
<ide> * Reasoning:
<ide> * - Example: <select ng-options="item.subItem as item.label for item in values track by item.id" ng-model="selected">
<del> * values: [{id: 1, label: 'aLabel', subItem: {name: 'aSubItem'}}, {id: 2, label: 'bLabel', subItem: {name: 'bSubItemß'}}],
<add> * values: [{id: 1, label: 'aLabel', subItem: {name: 'aSubItem'}}, {id: 2, label: 'bLabel', subItem: {name: 'bSubItem'}}],
<ide> * $scope.selected = {name: 'aSubItem'};
<del> * - track by is always applied to `value`, with purpose to preserve the selection,
<add> * - track by is always applied to `value`, with the purpose of preserving the selection,
<ide> * (to `item` in this case)
<ide> * - to calculate whether an item is selected we do the following:
<ide> * 1. apply `track by` to the values in the array, e.g. | 1 |
Javascript | Javascript | add deprecation notice to imagestore | 62599fa8ff7f308259fe178fa37b7bcf3c1a408c | <ide><path>Libraries/Image/ImageStore.js
<ide>
<ide> const RCTImageStoreManager = require('NativeModules').ImageStoreManager;
<ide>
<add>const Platform = require('Platform');
<add>
<add>const warnOnce = require('warnOnce');
<add>
<add>function warnUnimplementedMethod(methodName: string): void {
<add> warnOnce(
<add> `imagestore-${methodName}`,
<add> `react-native: ImageStore.${methodName}() is not implemented on ${
<add> Platform.OS
<add> }`,
<add> );
<add>}
<add>
<ide> class ImageStore {
<ide> /**
<ide> * Check if the ImageStore contains image data for the specified URI.
<ide> class ImageStore {
<ide> if (RCTImageStoreManager.hasImageForTag) {
<ide> RCTImageStoreManager.hasImageForTag(uri, callback);
<ide> } else {
<del> console.warn('hasImageForTag() not implemented');
<add> warnUnimplementedMethod('hasImageForTag');
<ide> }
<ide> }
<ide>
<ide> class ImageStore {
<ide> if (RCTImageStoreManager.removeImageForTag) {
<ide> RCTImageStoreManager.removeImageForTag(uri);
<ide> } else {
<del> console.warn('removeImageForTag() not implemented');
<add> warnUnimplementedMethod('removeImageForTag');
<ide> }
<ide> }
<ide>
<ide> class ImageStore {
<ide> success: (uri: string) => void,
<ide> failure: (error: any) => void,
<ide> ) {
<del> RCTImageStoreManager.addImageFromBase64(base64ImageData, success, failure);
<add> if (RCTImageStoreManager.addImageFromBase64) {
<add> RCTImageStoreManager.addImageFromBase64(
<add> base64ImageData,
<add> success,
<add> failure,
<add> );
<add> } else {
<add> warnUnimplementedMethod('addImageFromBase64');
<add> }
<ide> }
<ide>
<ide> /**
<ide> class ImageStore {
<ide> success: (base64ImageData: string) => void,
<ide> failure: (error: any) => void,
<ide> ) {
<del> RCTImageStoreManager.getBase64ForTag(uri, success, failure);
<add> if (RCTImageStoreManager.getBase64ForTag) {
<add> RCTImageStoreManager.getBase64ForTag(uri, success, failure);
<add> } else {
<add> warnUnimplementedMethod('getBase64ForTag');
<add> }
<ide> }
<ide> }
<ide>
<ide><path>Libraries/react-native/react-native-implementation.js
<ide> module.exports = {
<ide> return require('ImageEditor');
<ide> },
<ide> get ImageStore() {
<add> warnOnce(
<add> 'imagestore-deprecation',
<add> 'ImageStore is deprecated and will be removed in a future release. ' +
<add> 'To get a base64-encoded string from a local image use either of the following third-party libraries:' +
<add> "* expo-file-system: `readAsStringAsync(filepath, 'base64')`" +
<add> "* react-native-fs: `readFile(filepath, 'base64')`",
<add> );
<ide> return require('ImageStore');
<ide> },
<ide> get InputAccessoryView() { | 2 |
Python | Python | use labels to remove deprecation warnings | f1fe18465d8c4ee3f5710cdfd7de387a1d136f6b | <ide><path>tests/test_modeling_albert.py
<ide> def create_and_check_albert_for_pretraining(
<ide> input_ids,
<ide> attention_mask=input_mask,
<ide> token_type_ids=token_type_ids,
<del> masked_lm_labels=token_labels,
<add> labels=token_labels,
<ide> sentence_order_label=sequence_labels,
<ide> )
<ide> result = {
<ide> def create_and_check_albert_for_masked_lm(
<ide> model.to(torch_device)
<ide> model.eval()
<ide> loss, prediction_scores = model(
<del> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, masked_lm_labels=token_labels
<add> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels
<ide> )
<ide> result = {
<ide> "loss": loss,
<ide><path>tests/test_modeling_bart.py
<ide> def test_mbart_fast_forward(self):
<ide> lm_model = BartForConditionalGeneration(config).to(torch_device)
<ide> context = torch.Tensor([[71, 82, 18, 33, 46, 91, 2], [68, 34, 26, 58, 30, 2, 1]]).long().to(torch_device)
<ide> summary = torch.Tensor([[82, 71, 82, 18, 2], [58, 68, 2, 1, 1]]).long().to(torch_device)
<del> loss, logits, enc_features = lm_model(input_ids=context, decoder_input_ids=summary, lm_labels=summary)
<add> loss, logits, enc_features = lm_model(input_ids=context, decoder_input_ids=summary, labels=summary)
<ide> expected_shape = (*summary.shape, config.vocab_size)
<ide> self.assertEqual(logits.shape, expected_shape)
<ide>
<ide> def test_lm_forward(self):
<ide> lm_labels = ids_tensor([batch_size, input_ids.shape[1]], self.vocab_size).to(torch_device)
<ide> lm_model = BartForConditionalGeneration(config)
<ide> lm_model.to(torch_device)
<del> loss, logits, enc_features = lm_model(input_ids=input_ids, lm_labels=lm_labels)
<add> loss, logits, enc_features = lm_model(input_ids=input_ids, labels=lm_labels)
<ide> expected_shape = (batch_size, input_ids.shape[1], config.vocab_size)
<ide> self.assertEqual(logits.shape, expected_shape)
<ide> self.assertIsInstance(loss.item(), float)
<ide> def test_lm_uneven_forward(self):
<ide> lm_model = BartForConditionalGeneration(config).to(torch_device)
<ide> context = torch.Tensor([[71, 82, 18, 33, 46, 91, 2], [68, 34, 26, 58, 30, 2, 1]]).long().to(torch_device)
<ide> summary = torch.Tensor([[82, 71, 82, 18, 2], [58, 68, 2, 1, 1]]).long().to(torch_device)
<del> loss, logits, enc_features = lm_model(input_ids=context, decoder_input_ids=summary, lm_labels=summary)
<add> loss, logits, enc_features = lm_model(input_ids=context, decoder_input_ids=summary, labels=summary)
<ide> expected_shape = (*summary.shape, config.vocab_size)
<ide> self.assertEqual(logits.shape, expected_shape)
<ide>
<ide><path>tests/test_modeling_bert.py
<ide> def create_and_check_bert_for_masked_lm(
<ide> model.to(torch_device)
<ide> model.eval()
<ide> loss, prediction_scores = model(
<del> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, masked_lm_labels=token_labels
<add> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels
<ide> )
<ide> result = {
<ide> "loss": loss,
<ide> def create_and_check_bert_model_for_masked_lm_as_decoder(
<ide> input_ids,
<ide> attention_mask=input_mask,
<ide> token_type_ids=token_type_ids,
<del> masked_lm_labels=token_labels,
<add> labels=token_labels,
<ide> encoder_hidden_states=encoder_hidden_states,
<ide> encoder_attention_mask=encoder_attention_mask,
<ide> )
<ide> loss, prediction_scores = model(
<ide> input_ids,
<ide> attention_mask=input_mask,
<ide> token_type_ids=token_type_ids,
<del> masked_lm_labels=token_labels,
<add> labels=token_labels,
<ide> encoder_hidden_states=encoder_hidden_states,
<ide> )
<ide> result = {
<ide> def create_and_check_bert_for_pretraining(
<ide> input_ids,
<ide> attention_mask=input_mask,
<ide> token_type_ids=token_type_ids,
<del> masked_lm_labels=token_labels,
<add> labels=token_labels,
<ide> next_sentence_label=sequence_labels,
<ide> )
<ide> result = {
<ide><path>tests/test_modeling_distilbert.py
<ide> def create_and_check_distilbert_for_masked_lm(
<ide> model = DistilBertForMaskedLM(config=config)
<ide> model.to(torch_device)
<ide> model.eval()
<del> loss, prediction_scores = model(input_ids, attention_mask=input_mask, masked_lm_labels=token_labels)
<add> loss, prediction_scores = model(input_ids, attention_mask=input_mask, labels=token_labels)
<ide> result = {
<ide> "loss": loss,
<ide> "prediction_scores": prediction_scores,
<ide><path>tests/test_modeling_electra.py
<ide> def create_and_check_electra_for_masked_lm(
<ide> model.to(torch_device)
<ide> model.eval()
<ide> loss, prediction_scores = model(
<del> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, masked_lm_labels=token_labels
<add> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels
<ide> )
<ide> result = {
<ide> "loss": loss,
<ide><path>tests/test_modeling_gpt2.py
<ide> def create_and_check_double_lm_head_model(
<ide> "mc_token_ids": mc_token_ids,
<ide> "attention_mask": multiple_choice_input_mask,
<ide> "token_type_ids": multiple_choice_token_type_ids,
<del> "lm_labels": multiple_choice_inputs_ids,
<add> "labels": multiple_choice_inputs_ids,
<ide> }
<ide>
<ide> loss, lm_logits, mc_logits, _ = model(**inputs)
<ide><path>tests/test_modeling_longformer.py
<ide> def create_and_check_longformer_for_masked_lm(
<ide> model.to(torch_device)
<ide> model.eval()
<ide> loss, prediction_scores = model(
<del> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, masked_lm_labels=token_labels
<add> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels
<ide> )
<ide> result = {
<ide> "loss": loss,
<ide> def test_inference_masked_lm(self):
<ide> [[0] + [20920, 232, 328, 1437] * 1000 + [2]], dtype=torch.long, device=torch_device
<ide> ) # long input
<ide>
<del> loss, prediction_scores = model(input_ids, masked_lm_labels=input_ids)
<add> loss, prediction_scores = model(input_ids, labels=input_ids)
<ide>
<ide> expected_loss = torch.tensor(0.0620, device=torch_device)
<ide> expected_prediction_scores_sum = torch.tensor(-6.1599e08, device=torch_device)
<ide><path>tests/test_modeling_openai.py
<ide> def create_and_check_double_lm_head_model(self, config, input_ids, head_mask, to
<ide> model.to(torch_device)
<ide> model.eval()
<ide>
<del> loss, lm_logits, mc_logits = model(input_ids, token_type_ids=token_type_ids, lm_labels=input_ids)
<add> loss, lm_logits, mc_logits = model(input_ids, token_type_ids=token_type_ids, labels=input_ids)
<ide>
<ide> result = {"loss": loss, "lm_logits": lm_logits}
<ide>
<ide><path>tests/test_modeling_roberta.py
<ide> def create_and_check_roberta_for_masked_lm(
<ide> model.to(torch_device)
<ide> model.eval()
<ide> loss, prediction_scores = model(
<del> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, masked_lm_labels=token_labels
<add> input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels
<ide> )
<ide> result = {
<ide> "loss": loss,
<ide><path>tests/test_modeling_t5.py
<ide> def create_and_check_t5_with_lm_head(
<ide> input_ids=input_ids,
<ide> decoder_input_ids=decoder_input_ids,
<ide> decoder_attention_mask=decoder_attention_mask,
<del> lm_labels=lm_labels,
<add> labels=lm_labels,
<ide> )
<ide> loss, prediction_scores, _, _ = outputs
<ide> self.parent.assertEqual(len(outputs), 4) | 10 |
Python | Python | add success message | e33d2b1bea819472541a8027e6638a538c900994 | <ide><path>spacy/cli/evaluate.py
<ide> def evaluate(
<ide> data = {re.sub(r"[\s/]", "_", k.lower()): v for k, v in results.items()}
<ide> if output_path is not None:
<ide> srsly.write_json(output_path, data)
<add> msg.good(f"Saved results to {output_path}")
<ide> return data
<ide>
<ide> | 1 |
PHP | PHP | allow inject view builder into mail renderer | d33e22c738b3990dd0a4379a3b4963f9d8de9065 | <ide><path>src/Mailer/Renderer.php
<ide> */
<ide> namespace Cake\Mailer;
<ide>
<add>use Cake\View\ViewBuilder;
<ide> use Cake\View\ViewVarsTrait;
<ide>
<ide> /**
<ide> class Renderer
<ide> */
<ide> public const TEMPLATE_FOLDER = 'email';
<ide>
<add> /**
<add> * Constructor
<add> *
<add> * @param \Cake\View\ViewBuilder|null $viewBuilder View builder instance.
<add> */
<add> public function __construct(?ViewBuilder $viewBuilder = null)
<add> {
<add> $this->_viewBuilder = $viewBuilder;
<add> }
<add>
<ide> /**
<ide> * Build and set all the view properties needed to render the templated emails.
<ide> * If there is no template set, the $content will be returned in a hash | 1 |
PHP | PHP | remove undefined variable from compact | 0a6bd84aed6faaff0a1716e26c62ff7aef7971cb | <ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function addWhereExistsQuery(self $query, $boolean = 'and', $not = false)
<ide> {
<ide> $type = $not ? 'NotExists' : 'Exists';
<ide>
<del> $this->wheres[] = compact('type', 'operator', 'query', 'boolean');
<add> $this->wheres[] = compact('type', 'query', 'boolean');
<ide>
<ide> $this->addBinding($query->getBindings(), 'where');
<ide> | 1 |
Javascript | Javascript | return 404 for update-my-current-challenge | 21709a11cbb143655c5e93baaecd6343186786ff | <ide><path>api-server/src/server/boot/certificate.js
<ide> import {
<ide> } from '../../../../config/certification-settings';
<ide> import { reportError } from '../middlewares/sentry-error-handler.js';
<ide>
<del>import { deprecatedEndpoint } from '../utils/deprecatedEndpoint';
<add>import { deprecatedEndpoint } from '../utils/disabled-endpoints';
<ide> import { getChallenges } from '../utils/get-curriculum';
<ide> import { ifNoUser401 } from '../utils/middleware';
<ide> import { observeQuery } from '../utils/rx';
<ide><path>api-server/src/server/boot/randomAPIs.js
<ide> import { getRedirectParams } from '../utils/redirection';
<del>import { deprecatedEndpoint } from '../utils/deprecatedEndpoint';
<add>import { deprecatedEndpoint } from '../utils/disabled-endpoints';
<ide>
<ide> module.exports = function (app) {
<ide> const router = app.loopback.Router();
<ide><path>api-server/src/server/boot/settings.js
<ide> import isURL from 'validator/lib/isURL';
<ide>
<ide> import { isValidUsername } from '../../../../utils/validate';
<ide> import { alertTypes } from '../../common/utils/flash.js';
<del>import { deprecatedEndpoint } from '../utils/deprecatedEndpoint';
<add>import {
<add> deprecatedEndpoint,
<add> temporarilyDisabledEndpoint
<add>} from '../utils/disabled-endpoints';
<ide> import { ifNoUser401, createValidatorErrorHandler } from '../utils/middleware';
<ide>
<ide> const log = debug('fcc:boot:settings');
<ide> export default function settingsController(app) {
<ide> api.put('/update-privacy-terms', ifNoUser401, updatePrivacyTerms);
<ide>
<ide> api.post('/refetch-user-completed-challenges', deprecatedEndpoint);
<del> api.post(
<del> '/update-my-current-challenge',
<del> ifNoUser401,
<del> updateMyCurrentChallengeValidators,
<del> createValidatorErrorHandler(alertTypes.danger),
<del> updateMyCurrentChallenge
<del> );
<add> // Re-enable once we can handle the traffic
<add> // api.post(
<add> // '/update-my-current-challenge',
<add> // ifNoUser401,
<add> // updateMyCurrentChallengeValidators,
<add> // createValidatorErrorHandler(alertTypes.danger),
<add> // updateMyCurrentChallenge
<add> // );
<add> api.post('/update-my-current-challenge', temporarilyDisabledEndpoint);
<ide> api.put('/update-my-portfolio', ifNoUser401, updateMyPortfolio);
<ide> api.put('/update-my-theme', ifNoUser401, updateMyTheme);
<ide> api.put('/update-my-about', ifNoUser401, updateMyAbout);
<ide> function updateMyEmail(req, res, next) {
<ide> .subscribe(message => res.json({ message }), next);
<ide> }
<ide>
<del>const updateMyCurrentChallengeValidators = [
<del> check('currentChallengeId')
<del> .isMongoId()
<del> .withMessage('currentChallengeId is not a valid challenge ID')
<del>];
<del>
<del>function updateMyCurrentChallenge(req, res, next) {
<del> const {
<del> user,
<del> body: { currentChallengeId }
<del> } = req;
<del> return user.updateAttribute(
<del> 'currentChallengeId',
<del> currentChallengeId,
<del> (err, updatedUser) => {
<del> if (err) {
<del> return next(err);
<del> }
<del> const { currentChallengeId } = updatedUser;
<del> return res.status(200).json(currentChallengeId);
<del> }
<del> );
<del>}
<add>// Re-enable once we can handle the traffic
<add>// const updateMyCurrentChallengeValidators = [
<add>// check('currentChallengeId')
<add>// .isMongoId()
<add>// .withMessage('currentChallengeId is not a valid challenge ID')
<add>// ];
<add>
<add>// Re-enable once we can handle the traffic
<add>// function updateMyCurrentChallenge(req, res, next) {
<add>// const {
<add>// user,
<add>// body: { currentChallengeId }
<add>// } = req;
<add>// return user.updateAttribute(
<add>// 'currentChallengeId',
<add>// currentChallengeId,
<add>// (err, updatedUser) => {
<add>// if (err) {
<add>// return next(err);
<add>// }
<add>// const { currentChallengeId } = updatedUser;
<add>// return res.status(200).json(currentChallengeId);
<add>// }
<add>// );
<add>// }
<ide>
<ide> function updateMyPortfolio(...args) {
<ide> const portfolioKeys = ['id', 'title', 'description', 'url', 'image'];
<ide><path>api-server/src/server/utils/deprecatedEndpoint.js
<del>export function deprecatedEndpoint(_, res) {
<del> return res.status(410).json({
<del> message: {
<del> type: 'info',
<del> message: 'Please reload the app, this feature is no longer available.'
<del> }
<del> });
<del>}
<ide><path>api-server/src/server/utils/disabled-endpoints.js
<add>export function deprecatedEndpoint(_, res) {
<add> return res.status(410).json({
<add> message: {
<add> type: 'info',
<add> message: 'Please reload the app, this feature is no longer available.'
<add> }
<add> });
<add>}
<add>
<add>export function temporarilyDisabledEndpoint(_, res) {
<add> return res.status(404).json({
<add> message: {
<add> type: 'info',
<add> message: 'Please reload the app, this feature is no longer available.'
<add> }
<add> });
<add>} | 5 |
Java | Java | remove shared bridge experiment | 260e6d23554a8e7f1743263894c9ca9a0cfbf01e | <ide><path>ReactAndroid/src/main/java/com/facebook/react/ReactInstanceManager.java
<ide> public static ReactInstanceManagerBuilder builder() {
<ide> @Nullable DevBundleDownloadListener devBundleDownloadListener,
<ide> boolean useSeparateUIBackgroundThread,
<ide> int minNumShakes,
<del> boolean splitPackagesEnabled,
<del> boolean useOnlyDefaultPackages,
<ide> int minTimeLeftInFrameForNonBatchedOperationMs) {
<ide> Log.d(ReactConstants.TAG, "ReactInstanceManager.ctor()");
<ide> initializeSoLoaderIfNecessary(applicationContext);
<ide> public static ReactInstanceManagerBuilder builder() {
<ide> mUseSeparateUIBackgroundThread = useSeparateUIBackgroundThread;
<ide> mMinNumShakes = minNumShakes;
<ide> synchronized (mPackages) {
<del> if (!splitPackagesEnabled) {
<del> CoreModulesPackage coreModulesPackage =
<del> new CoreModulesPackage(
<del> this,
<del> mBackBtnHandler,
<del> mUIImplementationProvider,
<del> mLazyViewManagersEnabled,
<del> mMinTimeLeftInFrameForNonBatchedOperationMs);
<del> mPackages.add(coreModulesPackage);
<del> } else {
<del> PrinterHolder.getPrinter().logMessage(ReactDebugOverlayTags.RN_CORE, "RNCore: Use Split Packages");
<del> mPackages.add(new BridgeCorePackage(this, mBackBtnHandler));
<del> if (mUseDeveloperSupport) {
<del> mPackages.add(new DebugCorePackage());
<del> }
<del> if (!useOnlyDefaultPackages) {
<del> mPackages.add(
<del> new ReactNativeCorePackage(
<del> this,
<del> mUIImplementationProvider,
<del> mLazyViewManagersEnabled,
<del> mMinTimeLeftInFrameForNonBatchedOperationMs));
<del> }
<add> PrinterHolder.getPrinter().logMessage(ReactDebugOverlayTags.RN_CORE, "RNCore: Use Split Packages");
<add> mPackages.add(new BridgeCorePackage(this, mBackBtnHandler));
<add> if (mUseDeveloperSupport) {
<add> mPackages.add(new DebugCorePackage());
<ide> }
<add> mPackages.add(
<add> new ReactNativeCorePackage(
<add> this,
<add> mUIImplementationProvider,
<add> mLazyViewManagersEnabled,
<add> mMinTimeLeftInFrameForNonBatchedOperationMs));
<ide> mPackages.addAll(packages);
<ide> }
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/react/ReactInstanceManagerBuilder.java
<ide> public class ReactInstanceManagerBuilder {
<ide> private @Nullable JavaScriptExecutorFactory mJavaScriptExecutorFactory;
<ide> private boolean mUseSeparateUIBackgroundThread;
<ide> private int mMinNumShakes = 1;
<del> private boolean mEnableSplitPackage;
<del> private boolean mUseOnlyDefaultPackages;
<ide> private int mMinTimeLeftInFrameForNonBatchedOperationMs = -1;
<ide>
<ide> /* package protected */ ReactInstanceManagerBuilder() {
<ide> public ReactInstanceManagerBuilder setMinNumShakes(int minNumShakes) {
<ide> return this;
<ide> }
<ide>
<del> public ReactInstanceManagerBuilder setEnableSplitPackage(boolean enableSplitPackage) {
<del> mEnableSplitPackage = enableSplitPackage;
<del> return this;
<del> }
<del>
<del> public ReactInstanceManagerBuilder setUseOnlyDefaultPackages(boolean useOnlyDefaultPackages) {
<del> mUseOnlyDefaultPackages = useOnlyDefaultPackages;
<del> return this;
<del> }
<del>
<ide> public ReactInstanceManagerBuilder setMinTimeLeftInFrameForNonBatchedOperationMs(
<ide> int minTimeLeftInFrameForNonBatchedOperationMs) {
<ide> mMinTimeLeftInFrameForNonBatchedOperationMs = minTimeLeftInFrameForNonBatchedOperationMs;
<ide> public ReactInstanceManager build() {
<ide> mDevBundleDownloadListener,
<ide> mUseSeparateUIBackgroundThread,
<ide> mMinNumShakes,
<del> mEnableSplitPackage,
<del> mUseOnlyDefaultPackages,
<ide> mMinTimeLeftInFrameForNonBatchedOperationMs);
<ide> }
<ide> } | 2 |
Ruby | Ruby | set version to nil if version is null | 4e3d23ad14b29832f14784939acc5afe2f1b28f8 | <ide><path>Library/Homebrew/resource.rb
<ide> def url(val = nil, specs = {})
<ide> end
<ide>
<ide> def version(val = nil)
<del> @version ||= detect_version(val)
<add> @version ||= begin
<add> version = detect_version(val)
<add> version.null? ? nil : version
<add> end
<ide> end
<ide>
<ide> def mirror(val)
<ide> def mirror(val)
<ide> private
<ide>
<ide> def detect_version(val)
<del> return if val.nil? && url.nil?
<add> return Version::NULL if val.nil? && url.nil?
<ide>
<ide> case val
<ide> when nil then Version.detect(url, specs) | 1 |
Javascript | Javascript | fix manipulation tests in android 4.4 | 0b0d4c634ab8a953a58bf531dd93e0ea4d632fdd | <ide><path>test/unit/manipulation.js
<ide> QUnit.test( "Tag name processing respects the HTML Standard (gh-2005)", function
<ide> }
<ide>
<ide> function assertSpecialCharsSupport( method, characters ) {
<add> // Support: Android 4.4 only
<add> // Chromium < 35 incorrectly upper-cases µ; Android 4.4 uses such a version by default
<add> // (and its WebView, being un-updatable, will use it for eternity) so we need to blacklist
<add> // that one for the tests to pass.
<add> if ( characters === "µ" && /chrome/i.test( navigator.userAgent ) &&
<add> navigator.userAgent.match( /chrome\/(\d+)/i )[ 1 ] < 35 ) {
<add> assert.ok( true, "This Chromium version upper-cases µ incorrectly; skip test" );
<add> assert.ok( true, "This Chromium version upper-cases µ incorrectly; skip test" );
<add> assert.ok( true, "This Chromium version upper-cases µ incorrectly; skip test" );
<add> return;
<add> }
<add>
<ide> var child,
<ide> codepoint = characters.charCodeAt( 0 ).toString( 16 ).toUpperCase(),
<ide> description = characters.length === 1 ? | 1 |
PHP | PHP | fix method name | 08cde109bfc94098fb79bc2b505de6497867c291 | <ide><path>tests/Notifications/NotificationSlackChannelTest.php
<ide> public function tearDown()
<ide> Mockery::close();
<ide> }
<ide>
<del> public function testSmsIsSentViaNexmo()
<add> public function testCorrectPayloadIsSentToSlack()
<ide> {
<ide> $notification = new Notification([
<ide> $notifiable = new NotificationSlackChannelTestNotifiable, | 1 |
Go | Go | remove devicemapper lazy initialization | b5795749d1688f99df422dd7068b189e5b25e5e9 | <ide><path>devmapper/deviceset.go
<ide> type MetaData struct {
<ide> type DeviceSet struct {
<ide> MetaData
<ide> sync.Mutex
<del> initialized bool
<ide> root string
<ide> devicePrefix string
<ide> TransactionId uint64
<ide> func (devices *DeviceSet) AddDevice(hash, baseHash string) error {
<ide> devices.Lock()
<ide> defer devices.Unlock()
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> utils.Debugf("Error init: %s\n", err)
<del> return err
<del> }
<del>
<ide> if devices.Devices[hash] != nil {
<ide> return fmt.Errorf("hash %s already exists", hash)
<ide> }
<ide> func (devices *DeviceSet) RemoveDevice(hash string) error {
<ide> devices.Lock()
<ide> defer devices.Unlock()
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> utils.Debugf("\n--->Err: %s\n", err)
<del> return err
<del> }
<del>
<ide> return devices.removeDevice(hash)
<ide> }
<ide>
<ide> func (devices *DeviceSet) Shutdown() error {
<ide> utils.Debugf("[devmapper] Shutting down DeviceSet: %s", devices.root)
<ide> defer devices.Unlock()
<ide>
<del> if !devices.initialized {
<del> return nil
<del> }
<del>
<ide> for path, count := range devices.activeMounts {
<ide> for i := count; i > 0; i-- {
<ide> if err := syscall.Unmount(path, 0); err != nil {
<ide> func (devices *DeviceSet) MountDevice(hash, path string, readOnly bool) error {
<ide> devices.Lock()
<ide> defer devices.Unlock()
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> return fmt.Errorf("Error initializing devmapper: %s", err)
<del> }
<del>
<ide> if err := devices.activateDeviceIfNeeded(hash); err != nil {
<ide> return fmt.Errorf("Error activating devmapper device for '%s': %s", hash, err)
<ide> }
<ide> func (devices *DeviceSet) HasDevice(hash string) bool {
<ide> devices.Lock()
<ide> defer devices.Unlock()
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> return false
<del> }
<ide> return devices.Devices[hash] != nil
<ide> }
<ide>
<ide> func (devices *DeviceSet) HasInitializedDevice(hash string) bool {
<ide> devices.Lock()
<ide> defer devices.Unlock()
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> return false
<del> }
<del>
<ide> info := devices.Devices[hash]
<ide> return info != nil && info.Initialized
<ide> }
<ide> func (devices *DeviceSet) HasActivatedDevice(hash string) bool {
<ide> devices.Lock()
<ide> defer devices.Unlock()
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> return false
<del> }
<del>
<ide> info := devices.Devices[hash]
<ide> if info == nil {
<ide> return false
<ide> func (devices *DeviceSet) SetInitialized(hash string) error {
<ide> devices.Lock()
<ide> defer devices.Unlock()
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> utils.Debugf("\n--->Err: %s\n", err)
<del> return err
<del> }
<del>
<ide> info := devices.Devices[hash]
<ide> if info == nil {
<ide> return fmt.Errorf("Unknown device %s", hash)
<ide> func (devices *DeviceSet) Status() *Status {
<ide>
<ide> status := &Status{}
<ide>
<del> if err := devices.ensureInit(); err != nil {
<del> return status
<del> }
<del>
<ide> status.PoolName = devices.getPoolName()
<ide> status.DataLoopback = path.Join(devices.loopbackDir(), "data")
<ide> status.MetadataLoopback = path.Join(devices.loopbackDir(), "metadata")
<ide> func (devices *DeviceSet) Status() *Status {
<ide> return status
<ide> }
<ide>
<del>func (devices *DeviceSet) ensureInit() error {
<del> if !devices.initialized {
<del> devices.initialized = true
<del> if err := devices.initDevmapper(); err != nil {
<del> utils.Debugf("\n--->Err: %s\n", err)
<del> return err
<del> }
<del> }
<del> return nil
<del>}
<del>
<del>func NewDeviceSet(root string) *DeviceSet {
<add>func NewDeviceSet(root string) (*DeviceSet, error) {
<ide> SetDevDir("/dev")
<ide>
<del> return &DeviceSet{
<del> initialized: false,
<add> devices := &DeviceSet{
<ide> root: root,
<ide> MetaData: MetaData{Devices: make(map[string]*DevInfo)},
<ide> activeMounts: make(map[string]int),
<ide> }
<add>
<add> if err := devices.initDevmapper(); err != nil {
<add> return nil, err
<add> }
<add>
<add> return devices, nil
<ide> }
<ide><path>devmapper/driver.go
<ide> type Driver struct {
<ide> }
<ide>
<ide> func Init(home string) (graphdriver.Driver, error) {
<add> deviceSet, err := NewDeviceSet(home);
<add> if err != nil {
<add> return nil, err
<add> }
<ide> d := &Driver{
<del> DeviceSet: NewDeviceSet(home),
<add> DeviceSet: deviceSet,
<ide> home: home,
<ide> }
<del> if err := d.DeviceSet.ensureInit(); err != nil {
<del> return nil, err
<del> }
<ide> return d, nil
<ide> }
<ide> | 2 |
Ruby | Ruby | fix deprecation warnings in action text tests | e9e2fd6f9f4033ca67ddb65c32fd658fefcbbd92 | <ide><path>actiontext/test/integration/controller_render_test.rb
<ide>
<ide> class ActionText::ControllerRenderTest < ActionDispatch::IntegrationTest
<ide> test "uses current request environment" do
<del> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpg")
<add> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg")
<ide> message = Message.create!(content: ActionText::Content.new.append_attachables(blob))
<ide>
<ide> host! "loocalhoost"
<ide> class ActionText::ControllerRenderTest < ActionDispatch::IntegrationTest
<ide> end
<ide>
<ide> test "renders as HTML when the request format is not HTML" do
<del> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpg")
<add> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg")
<ide> message = Message.create!(content: ActionText::Content.new.append_attachables(blob))
<ide>
<ide> host! "loocalhoost"
<ide> class ActionText::ControllerRenderTest < ActionDispatch::IntegrationTest
<ide> end
<ide>
<ide> test "resolves partials when controller is namespaced" do
<del> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpg")
<add> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg")
<ide> message = Message.create!(content: ActionText::Content.new.append_attachables(blob))
<ide>
<ide> get admin_message_path(message)
<ide><path>actiontext/test/integration/mailer_render_test.rb
<ide> class ActionText::MailerRenderTest < ActionMailer::TestCase
<ide> original_default_url_options = ActionMailer::Base.default_url_options
<ide> ActionMailer::Base.default_url_options = { host: "hoost" }
<ide>
<del> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpg")
<add> blob = create_file_blob(filename: "racecar.jpg", content_type: "image/jpeg")
<ide> message = Message.new(content: ActionText::Content.new.append_attachables(blob))
<ide>
<ide> MessagesMailer.with(recipient: "test", message: message).notification.deliver_now | 2 |
Text | Text | adjust install instructions [ci skip] | ef59ce783b9f1f1ce69b2316e9b705e21021e804 | <ide><path>website/docs/usage/index.md
<ide> source code and recompiling frequently.
<ide> previous installs with `pip uninstall spacy`, which you may need to run
<ide> multiple times to remove all traces of earlier installs.
<ide>
<del> ```diff
<del> - $ pip install .
<del> + $ pip install -r requirements.txt
<del> + $ pip install --no-build-isolation --editable .
<add> ```bash
<add> $ pip install -r requirements.txt
<add> $ pip install --no-build-isolation --editable .
<ide> ```
<ide>
<ide> - Build in parallel using `N` CPUs to speed up compilation and then install in
<ide> editable mode:
<ide>
<del> ```diff
<del> - $ pip install .
<del> + $ pip install -r requirements.txt
<del> + $ python setup.py build_ext --inplace -j N
<del> + $ pip install --no-build-isolation --editable .
<add> ```bash
<add> $ pip install -r requirements.txt
<add> $ python setup.py build_ext --inplace -j N
<add> $ pip install --no-build-isolation --editable .
<ide> ```
<ide>
<ide> ### Building an executable {#executable} | 1 |
Java | Java | update eviction queue before size | b451379c2f6da834a24d230bc9f1355660da3e7c | <ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/broker/DefaultSubscriptionRegistry.java
<ide> public LinkedMultiValueMap<String, String> getSubscriptions(String destination)
<ide> if (sessionIdToSubscriptionIds == null) {
<ide> sessionIdToSubscriptionIds = this.destinationCache.computeIfAbsent(destination, _destination -> {
<ide> LinkedMultiValueMap<String, String> matches = computeMatchingSubscriptions(destination);
<del> this.cacheSize.incrementAndGet();
<add> // Update queue first, so that cacheSize <= queue.size(
<ide> this.cacheEvictionPolicy.add(destination);
<add> this.cacheSize.incrementAndGet();
<ide> return matches;
<ide> });
<ide> ensureCacheLimit();
<ide> private void ensureCacheLimit() {
<ide> if (size > cacheLimit) {
<ide> do {
<ide> if (this.cacheSize.compareAndSet(size, size - 1)) {
<del> this.destinationCache.remove(this.cacheEvictionPolicy.poll());
<add> // Remove (vs poll): we expect an element
<add> String head = this.cacheEvictionPolicy.remove();
<add> this.destinationCache.remove(head);
<ide> }
<ide> } while ((size = this.cacheSize.get()) > cacheLimit);
<ide> } | 1 |
Python | Python | add option to predict on test set | 5adb39e757183a00b946d3b0571e1983fd0e26b7 | <ide><path>examples/run_ner.py
<ide> def train(args, train_dataset, model, tokenizer, labels, pad_token_label_id):
<ide> if args.local_rank in [-1, 0] and args.logging_steps > 0 and global_step % args.logging_steps == 0:
<ide> # Log metrics
<ide> if args.local_rank == -1 and args.evaluate_during_training: # Only evaluate when single GPU otherwise metrics may not average well
<del> results = evaluate(args, model, tokenizer, labels, pad_token_label_id)
<add> results, _ = evaluate(args, model, tokenizer, labels, pad_token_label_id)
<ide> for key, value in results.items():
<ide> tb_writer.add_scalar("eval_{}".format(key), value, global_step)
<ide> tb_writer.add_scalar("lr", scheduler.get_lr()[0], global_step)
<ide> def train(args, train_dataset, model, tokenizer, labels, pad_token_label_id):
<ide> return global_step, tr_loss / global_step
<ide>
<ide>
<del>def evaluate(args, model, tokenizer, labels, pad_token_label_id, prefix=""):
<del> eval_dataset = load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, evaluate=True)
<add>def evaluate(args, model, tokenizer, labels, pad_token_label_id, mode, prefix=""):
<add> eval_dataset = load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, mode=mode)
<ide>
<ide> args.eval_batch_size = args.per_gpu_eval_batch_size * max(1, args.n_gpu)
<ide> # Note that DistributedSampler samples randomly
<ide> def evaluate(args, model, tokenizer, labels, pad_token_label_id, prefix=""):
<ide> for key in sorted(results.keys()):
<ide> logger.info(" %s = %s", key, str(results[key]))
<ide>
<del> return results
<add> return results, preds_list
<ide>
<ide>
<del>def load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, evaluate=False):
<add>def load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, mode):
<ide> if args.local_rank not in [-1, 0] and not evaluate:
<ide> torch.distributed.barrier() # Make sure only the first process in distributed training process the dataset, and the others will use the cache
<ide>
<ide> # Load data features from cache or dataset file
<del> cached_features_file = os.path.join(args.data_dir, "cached_{}_{}_{}".format("dev" if evaluate else "train",
<add> cached_features_file = os.path.join(args.data_dir, "cached_{}_{}_{}".format(mode,
<ide> list(filter(None, args.model_name_or_path.split("/"))).pop(),
<ide> str(args.max_seq_length)))
<ide> if os.path.exists(cached_features_file):
<ide> logger.info("Loading features from cached file %s", cached_features_file)
<ide> features = torch.load(cached_features_file)
<ide> else:
<ide> logger.info("Creating features from dataset file at %s", args.data_dir)
<del> examples = read_examples_from_file(args.data_dir, evaluate=evaluate)
<add> examples = read_examples_from_file(args.data_dir, mode)
<ide> features = convert_examples_to_features(examples, labels, args.max_seq_length, tokenizer,
<ide> cls_token_at_end=bool(args.model_type in ["xlnet"]),
<ide> # xlnet has a cls token at the end
<ide> def main():
<ide> help="Whether to run training.")
<ide> parser.add_argument("--do_eval", action="store_true",
<ide> help="Whether to run eval on the dev set.")
<add> parser.add_argument("--do_predict", action="store_true",
<add> help="Whether to run predictions on the test set.")
<ide> parser.add_argument("--evaluate_during_training", action="store_true",
<ide> help="Whether to run evaluation during training at each logging step.")
<ide> parser.add_argument("--do_lower_case", action="store_true",
<ide> def main():
<ide>
<ide> # Training
<ide> if args.do_train:
<del> train_dataset = load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, evaluate=False)
<add> train_dataset = load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, mode="train")
<ide> global_step, tr_loss = train(args, train_dataset, model, tokenizer, labels, pad_token_label_id)
<ide> logger.info(" global_step = %s, average loss = %s", global_step, tr_loss)
<ide>
<ide> def main():
<ide> global_step = checkpoint.split("-")[-1] if len(checkpoints) > 1 else ""
<ide> model = model_class.from_pretrained(checkpoint)
<ide> model.to(args.device)
<del> result = evaluate(args, model, tokenizer, labels, pad_token_label_id, prefix=global_step)
<add> result, _ = evaluate(args, model, tokenizer, labels, pad_token_label_id, mode="dev", prefix=global_step)
<ide> if global_step:
<ide> result = {"{}_{}".format(global_step, k): v for k, v in result.items()}
<ide> results.update(result)
<ide> def main():
<ide> for key in sorted(results.keys()):
<ide> writer.write("{} = {}\n".format(key, str(results[key])))
<ide>
<add> if args.do_predict and args.local_rank in [-1, 0]:
<add> tokenizer = tokenizer_class.from_pretrained(args.output_dir, do_lower_case=args.do_lower_case)
<add> model = model_class.from_pretrained(args.output_dir)
<add> model.to(args.device)
<add> result, predictions = evaluate(args, model, tokenizer, labels, pad_token_label_id, mode="test")
<add> # Save results
<add> output_test_results_file = os.path.join(args.output_dir, "test_results.txt")
<add> with open(output_test_results_file, "w") as writer:
<add> for key in sorted(result.keys()):
<add> writer.write("{} = {}\n".format(key, str(result[key])))
<add> # Save predictions
<add> output_test_predictions_file = os.path.join(args.output_dir, "test_predictions.txt")
<add> with open(output_test_predictions_file, "w") as writer:
<add> with open(os.path.join(args.data_dir, "test.txt"), "r") as f:
<add> example_id = 0
<add> for line in f:
<add> if line.startswith("-DOCSTART-") or line == "" or line == "\n":
<add> writer.write(line)
<add> if not predictions[example_id]:
<add> example_id += 1
<add> elif predictions[example_id]:
<add> output_line = line.split()[0] + " " + predictions[example_id].pop(0) + "\n"
<add> writer.write(output_line)
<add> else:
<add> logger.warning("Maximum sequence length exceeded: No prediction for '%s'.", line.split()[0])
<add>
<ide> return results
<ide>
<ide>
<ide><path>examples/utils_ner.py
<ide> def __init__(self, input_ids, input_mask, segment_ids, label_ids):
<ide> self.label_ids = label_ids
<ide>
<ide>
<del>def read_examples_from_file(data_dir, evaluate=False):
<del> if evaluate:
<del> file_path = os.path.join(data_dir, "dev.txt")
<del> guid_prefix = "dev"
<del> else:
<del> file_path = os.path.join(data_dir, "train.txt")
<del> guid_prefix = "train"
<add>def read_examples_from_file(data_dir, mode):
<add> file_path = os.path.join(data_dir, "{}.txt".format(mode))
<ide> guid_index = 1
<ide> examples = []
<ide> with open(file_path, encoding="utf-8") as f:
<ide> def read_examples_from_file(data_dir, evaluate=False):
<ide> for line in f:
<ide> if line.startswith("-DOCSTART-") or line == "" or line == "\n":
<ide> if words:
<del> examples.append(InputExample(guid="{}-{}".format(guid_prefix, guid_index),
<add> examples.append(InputExample(guid="{}-{}".format(mode, guid_index),
<ide> words=words,
<ide> labels=labels))
<ide> guid_index += 1
<ide> def read_examples_from_file(data_dir, evaluate=False):
<ide> else:
<ide> splits = line.split(" ")
<ide> words.append(splits[0])
<del> labels.append(splits[-1].replace("\n", ""))
<add> if len(splits) > 1:
<add> labels.append(splits[-1].replace("\n", ""))
<add> else:
<add> # Examples could have no label for mode = "test"
<add> labels.append("O")
<ide> if words:
<del> examples.append(InputExample(guid="%s-%d".format(guid_prefix, guid_index),
<add> examples.append(InputExample(guid="%s-%d".format(mode, guid_index),
<ide> words=words,
<ide> labels=labels))
<ide> return examples | 2 |
Javascript | Javascript | fix a bug of ci linet error (src/menu-helpers.js) | 31422befed73d76285b1c6c1c4f0db8a106281b7 | <ide><path>src/menu-helpers.js
<ide> function findMatchingItemIndex(menu, { type, id, submenu }) {
<ide> }
<ide> for (let index = 0; index < menu.length; index++) {
<ide> const item = menu[index];
<del> if (
<del> item.id === id && (item.submenu != null) === (submenu != null)
<del> ) {
<add> if (item.id === id && (item.submenu != null) === (submenu != null)) {
<ide> return index;
<ide> }
<ide> } | 1 |
PHP | PHP | add ability to inject route instances | ac4f411ac3a772c9f4660cf08c9e8f3f62b1bb4d | <ide><path>src/Routing/ScopedRouteCollection.php
<ide> public function connect($route, array $defaults = [], $options = []) {
<ide> $options['_ext'] = $this->_extensions;
<ide> }
<ide>
<del> // TODO don't hardcode
<del> $routeClass = 'Cake\Routing\Route\Route';
<del> if (isset($options['routeClass'])) {
<del> $routeClass = App::className($options['routeClass'], 'Routing/Route');
<del> $routeClass = $this->_validateRouteClass($routeClass);
<del> unset($options['routeClass']);
<del> }
<del> if ($routeClass === 'Cake\Routing\Route\RedirectRoute' && isset($defaults['redirect'])) {
<del> $defaults = $defaults['redirect'];
<del> }
<del>
<del> $route = str_replace('//', '/', $this->_path . $route);
<del> if (is_array($defaults)) {
<del> $defaults += $this->_params;
<del> }
<del>
<del> // Store the route and named index if possible.
<del> $route = new $routeClass($route, $defaults, $options);
<del>
<add> $route = $this->_makeRoute($route, $defaults, $options);
<ide> if (isset($options['_name'])) {
<ide> $this->_named[$options['_name']] = $route;
<ide> }
<ide> public function connect($route, array $defaults = [], $options = []) {
<ide> }
<ide>
<ide> /**
<del> * Validates that the passed route class exists and is a subclass of Cake\Routing\Route\Route
<add> * Create a route object, or return the provided object.
<ide> *
<del> * @param string $routeClass Route class name
<del> * @return string
<del> * @throws \Cake\Error\Exception
<add> * @param string|\Cake\Routing\Route\Route $route The route template or route object.
<add> * @param array $defaults Default parameters.
<add> * @param array $options Additional options parameters.
<add> * @return \Cake\Routing\Route\Route
<ide> */
<del> protected function _validateRouteClass($routeClass) {
<del> if (
<del> $routeClass !== 'Cake\Routing\Route\Route' &&
<del> (!class_exists($routeClass) || !is_subclass_of($routeClass, 'Cake\Routing\Route\Route'))
<del> ) {
<del> throw new Error\Exception('Route class not found, or route class is not a subclass of Cake\Routing\Route\Route');
<add> protected function _makeRoute($route, $defaults, $options) {
<add> if (is_string($route)) {
<add> $routeClass = 'Cake\Routing\Route\Route';
<add> if (isset($options['routeClass'])) {
<add> $routeClass = App::className($options['routeClass'], 'Routing/Route');
<add> unset($options['routeClass']);
<add> }
<add> if ($routeClass === 'Cake\Routing\Route\RedirectRoute' && isset($defaults['redirect'])) {
<add> $defaults = $defaults['redirect'];
<add> }
<add>
<add> $route = str_replace('//', '/', $this->_path . $route);
<add> if (is_array($defaults)) {
<add> $defaults += $this->_params;
<add> }
<add> $route = new $routeClass($route, $defaults, $options);
<add> }
<add>
<add> if ($route instanceof Route) {
<add> return $route;
<ide> }
<del> return $routeClass;
<add> throw new Error\Exception('Route class not found, or route class is not a subclass of Cake\Routing\Route\Route');
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Routing/ScopedRouteCollectionTest.php
<ide> public function testGetNamed() {
<ide> $this->assertEquals('/l/:controller', $route->template);
<ide> }
<ide>
<add>/**
<add> * Test connecting an instance routes.
<add> *
<add> * @return void
<add> */
<add> public function testConnectInstance() {
<add> $routes = new ScopedRouteCollection('/l', ['prefix' => 'api']);
<add>
<add> $route = new Route('/:controller');
<add> $this->assertNull($routes->connect($route));
<add>
<add> $result = $routes->routes()[0];
<add> $this->assertSame($route, $result);
<add> }
<add>
<ide> /**
<ide> * Test connecting basic routes.
<ide> * | 2 |
Ruby | Ruby | fix descendanttracker.clear on ruby 3.1 | cb82f5f0a438c7be64ca1c0633556f6a535138b6 | <ide><path>activesupport/lib/active_support/core_ext/class/subclasses.rb
<ide> def descendants
<ide> ObjectSpace.each_object(singleton_class).reject do |k|
<ide> k.singleton_class? || k == self
<ide> end
<del> end unless ActiveSupport::RubyFeatures::CLASS_DESCENDANTS
<add> end unless ActiveSupport::RubyFeatures::CLASS_SUBCLASSES
<ide>
<ide> # Returns an array with the direct children of +self+.
<ide> #
<ide><path>activesupport/lib/active_support/descendants_tracker.rb
<ide> def direct_descendants(klass)
<ide> end
<ide> end
<ide>
<del> if RubyFeatures::CLASS_DESCENDANTS
<add> @clear_disabled = false
<add>
<add> if RubyFeatures::CLASS_SUBCLASSES
<add> @@excluded_descendants = if RUBY_ENGINE == "ruby"
<add> # On MRI `ObjectSpace::WeakMap` keys are weak references.
<add> # So we can simply use WeakMap as a `Set`.
<add> ObjectSpace::WeakMap.new
<add> else
<add> # On TruffleRuby `ObjectSpace::WeakMap` keys are strong references.
<add> # So we use `object_id` as a key and the actual object as a value.
<add> #
<add> # JRuby for now doesn't have Class#descendant, but when it will, it will likely
<add> # have the same WeakMap semantic than Truffle so we future proof this as much as possible.
<add> class WeakSet
<add> def initialize
<add> @map = ObjectSpace::WeakMap.new
<add> end
<add>
<add> def [](object)
<add> @map.key?(object.object_id)
<add> end
<add>
<add> def []=(object, _present)
<add> @map[object_id] = object
<add> end
<add> end
<add> end
<add>
<ide> class << self
<add> def disable_clear! # :nodoc:
<add> unless @clear_disabled
<add> @clear_disabled = true
<add> remove_method(:subclasses)
<add> remove_method(:descendants)
<add> @@excluded_descendants = nil
<add> end
<add> end
<add>
<ide> def subclasses(klass)
<ide> klass.subclasses
<ide> end
<ide> def descendants(klass)
<ide> klass.descendants
<ide> end
<ide>
<del> def clear(only: nil) # :nodoc:
<del> # noop
<add> def clear(classes) # :nodoc:
<add> raise "DescendantsTracker.clear was disabled because config.cache_classes = true" if @clear_disabled
<add>
<add> classes.each do |klass|
<add> @@excluded_descendants[klass] = true
<add> klass.descendants.each do |descendant|
<add> @@excluded_descendants[descendant] = true
<add> end
<add> end
<ide> end
<ide>
<ide> def native? # :nodoc:
<ide> true
<ide> end
<ide> end
<ide>
<del> unless RubyFeatures::CLASS_SUBCLASSES
<del> def subclasses
<del> descendants.select { |descendant| descendant.superclass == self }
<del> end
<add> def subclasses
<add> subclasses = super
<add> subclasses.reject! { |d| @@excluded_descendants[d] }
<add> subclasses
<add> end
<add>
<add> def descendants
<add> descendants = super
<add> descendants.reject! { |d| @@excluded_descendants[d] }
<add> descendants
<ide> end
<ide>
<ide> def direct_descendants
<ide> def direct_descendants
<ide> @@direct_descendants = {}
<ide>
<ide> class << self
<add> def disable_clear! # :nodoc:
<add> @clear_disabled = true
<add> end
<add>
<ide> def subclasses(klass)
<ide> descendants = @@direct_descendants[klass]
<ide> descendants ? descendants.to_a : []
<ide> def descendants(klass)
<ide> arr
<ide> end
<ide>
<del> def clear(only: nil) # :nodoc:
<del> if only.nil?
<del> @@direct_descendants.clear
<del> return
<del> end
<add> def clear(classes) # :nodoc:
<add> raise "DescendantsTracker.clear was disabled because config.cache_classes = true" if @clear_disabled
<ide>
<ide> @@direct_descendants.each do |klass, direct_descendants_of_klass|
<del> if only.member?(klass)
<add> if classes.member?(klass)
<ide> @@direct_descendants.delete(klass)
<ide> else
<ide> direct_descendants_of_klass.reject! do |direct_descendant_of_class|
<del> only.member?(direct_descendant_of_class)
<add> classes.member?(direct_descendant_of_class)
<ide> end
<ide> end
<ide> end
<ide><path>activesupport/lib/active_support/ruby_features.rb
<ide>
<ide> module ActiveSupport
<ide> module RubyFeatures # :nodoc:
<del> CLASS_DESCENDANTS = Class.method_defined?(:descendants) # RUBY_VERSION >= "3.1"
<ide> CLASS_SUBCLASSES = Class.method_defined?(:subclasses) # RUBY_VERSION >= "3.1"
<ide> end
<ide> end
<ide><path>activesupport/test/descendants_tracker_test.rb
<ide> class DescendantsTrackerTest < ActiveSupport::TestCase
<ide> @original_state.each { |k, v| @original_state[k] = v.dup }
<ide> end
<ide>
<del> ActiveSupport::DescendantsTracker.clear
<ide> eval <<~RUBY
<ide> class Parent
<ide> extend ActiveSupport::DescendantsTracker
<ide> class Grandchild2 < Child1
<ide> end
<ide> end
<ide>
<del> test ".clear deletes all state" do
<del> ActiveSupport::DescendantsTracker.clear
<del> if ActiveSupport::DescendantsTracker.class_variable_defined?(:@@direct_descendants)
<del> assert_empty ActiveSupport::DescendantsTracker.class_variable_get(:@@direct_descendants)
<del> end
<del> end
<del>
<del> test ".clear(only) deletes the given classes only" do
<del> skip "Irrelevant for native Class#descendants" if ActiveSupport::DescendantsTracker.native?
<del>
<del> ActiveSupport::DescendantsTracker.clear(only: Set[Child2, Grandchild1])
<add> test ".clear(classes) deletes the given classes only" do
<add> ActiveSupport::DescendantsTracker.clear(Set[Child2, Grandchild1])
<ide>
<ide> assert_equal_sets [Child1, Grandchild2], Parent.descendants
<ide> assert_equal_sets [Grandchild2], Child1.descendants
<ide><path>railties/lib/rails/application/finisher.rb
<ide> def self.complete(_state)
<ide> initializer :set_clear_dependencies_hook, group: :all do |app|
<ide> callback = lambda do
<ide> # Order matters.
<del> ActiveSupport::DescendantsTracker.clear(
<del> only: ActiveSupport::Dependencies._autoloaded_tracked_classes
<del> )
<add> ActiveSupport::DescendantsTracker.clear(ActiveSupport::Dependencies._autoloaded_tracked_classes)
<ide> ActiveSupport::Dependencies.clear
<ide> end
<ide>
<ide> def self.complete(_state)
<ide>
<ide> if config.cache_classes
<ide> # No reloader
<add> ActiveSupport::DescendantsTracker.disable_clear!
<ide> elsif config.reload_classes_only_on_change
<ide> reloader = config.file_watcher.new(*watchable_args, &callback)
<ide> reloaders << reloader | 5 |
PHP | PHP | prevent surpirses with folder not being available | 010feb2092fde218695d0505f0ec6b08e77793ab | <ide><path>lib/Cake/Console/Command/SchemaShell.php
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<ide> App::uses('File', 'Utility');
<add>App::uses('Folder', 'Utility');
<ide> App::uses('CakeSchema', 'Model');
<ide>
<ide> /**
<ide> public function generate() {
<ide>
<ide> $content = $this->Schema->read($options);
<ide> $content['file'] = $this->params['file'];
<del>
<add>
<ide> Configure::write('Cache.disable', $cacheDisable);
<ide>
<ide> if ($snapshot === true) {
<ide> function _loadSchema() {
<ide> if (!empty($this->params['plugin'])) {
<ide> $plugin = $this->params['plugin'];
<ide> }
<del>
<add>
<ide> if (!empty($this->params['dry'])) {
<ide> $this->__dry = true;
<ide> $this->out(__d('cake_console', 'Performing a dry run.'));
<ide> public function getOptionParser() {
<ide> $write = array(
<ide> 'help' => __d('cake_console', 'Write the dumped SQL to a file.')
<ide> );
<del>
<add>
<ide> $parser = parent::getOptionParser();
<ide> $parser->description(
<ide> 'The Schema Shell generates a schema object from' .
<ide><path>lib/Cake/Console/Command/Task/ExtractTask.php
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<ide> App::uses('File', 'Utility');
<add>App::uses('Folder', 'Utility');
<add>
<ide> /**
<ide> * Language string extractor
<ide> *
<ide> function __markerError($file, $line, $marker, $count) {
<ide> function __searchFiles() {
<ide> $pattern = false;
<ide> if (!empty($this->_exclude)) {
<del> $pattern = '/[\/\\\\]' . implode('|', $this->_exclude) . '[\/\\\\]/';
<add> $pattern = '/[\/\\\\]' . implode('|', $this->_exclude) . '[\/\\\\]/';
<ide> }
<ide> foreach ($this->__paths as $path) {
<ide> $Folder = new Folder($path);
<ide><path>lib/Cake/Console/Command/Task/ProjectTask.php
<ide> */
<ide>
<ide> App::uses('File', 'Utility');
<add>App::uses('Folder', 'Utility');
<ide> App::uses('String', 'Utility');
<ide> App::uses('Security', 'Utility');
<ide>
<ide> public function getOptionParser() {
<ide> ));
<ide> }
<ide>
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/tests/Case/Utility/FileTest.php
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<ide> App::uses('File', 'Utility');
<add>App::uses('Folder', 'Utility');
<ide>
<ide> /**
<ide> * FileTest class
<ide> function _getTmpFile($paintSkip = true) {
<ide> }
<ide> return false;
<ide> }
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/tests/Case/View/Helper/HtmlHelperTest.php
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<ide>
<add>App::uses('Controller', 'Controller');
<ide> App::uses('Helper', 'View');
<ide> App::uses('AppHelper', 'View/Helper');
<del>App::uses('ClassRegistry', 'Utility');
<del>App::uses('Controller', 'Controller');
<ide> App::uses('HtmlHelper', 'View/Helper');
<ide> App::uses('FomrHelper', 'View/Helper');
<add>App::uses('ClassRegistry', 'Utility');
<add>App::uses('Folder', 'Utility');
<ide>
<ide> if (!defined('FULL_BASE_URL')) {
<ide> define('FULL_BASE_URL', 'http://cakephp.org');
<ide> class TestHtmlHelper extends HtmlHelper {
<ide> /**
<ide> * expose a method as public
<ide> *
<del> * @param string $options
<del> * @param string $exclude
<del> * @param string $insertBefore
<del> * @param string $insertAfter
<add> * @param string $options
<add> * @param string $exclude
<add> * @param string $insertBefore
<add> * @param string $insertAfter
<ide> * @return void
<ide> */
<ide> function parseAttributes($options, $exclude = null, $insertBefore = ' ', $insertAfter = null) {
<ide> function testScript() {
<ide> 'script' => array('type' => 'text/javascript', 'src' => 'js/jquery-1.3.2.js', 'defer' => 'defer', 'encoding' => 'utf-8')
<ide> );
<ide> $this->assertTags($result, $expected);
<del>
<add>
<ide> $this->View->expects($this->any())->method('addScript')
<ide> ->with($this->matchesRegularExpression('/script_in_head.js/'));
<ide> $result = $this->Html->script('script_in_head', array('inline' => false));
<ide> function testScriptBlock() {
<ide> );
<ide> $this->assertTags($result, $expected);
<ide>
<del>
<add>
<ide> $this->View->expects($this->any())->method('addScript')
<ide> ->with($this->matchesRegularExpression('/window\.foo\s\=\s2;/'));
<ide>
<ide> function testPara() {
<ide> $result = $this->Html->para('class-name', '<text>', array('escape' => true));
<ide> $this->assertTags($result, array('p' => array('class' => 'class-name'), '<text>', '/p'));
<ide> }
<del>
<add>
<ide> /**
<ide> * testCrumbList method
<del> *
<add> *
<ide> * @access public
<del> *
<add> *
<ide> * @return void
<ide> */
<ide> function testCrumbList() {
<ide> function testParseAttributeCompact() {
<ide> $helper = new TestHtmlHelper($this->View);
<ide> $compact = array('compact', 'checked', 'declare', 'readonly', 'disabled',
<ide> 'selected', 'defer', 'ismap', 'nohref', 'noshade', 'nowrap', 'multiple', 'noresize');
<del>
<add>
<ide> foreach ($compact as $attribute) {
<ide> foreach (array('true', true, 1, '1', $attribute) as $value) {
<ide> $attrs = array($attribute => $value); | 5 |
Go | Go | remove curl function from future | b0265e0a38a71fdbebf3b6ef53120249d399e293 | <ide><path>future/future.go
<ide> import (
<ide> "io"
<ide> "math/rand"
<ide> "net/http"
<del> "os/exec"
<ide> "time"
<ide> )
<ide>
<ide> func Pv(src io.Reader, info io.Writer) io.Reader {
<ide> return r
<ide> }
<ide>
<del>// Curl makes an http request by executing the unix command 'curl', and returns
<del>// the body of the response. If `stderr` is not nil, a progress bar will be
<del>// written to it.
<del>func Curl(url string, stderr io.Writer) (io.Reader, error) {
<del> curl := exec.Command("curl", "-#", "-L", "--fail", url)
<del> output, err := curl.StdoutPipe()
<del> if err != nil {
<del> return nil, err
<del> }
<del> curl.Stderr = stderr
<del> if err := curl.Start(); err != nil {
<del> return nil, err
<del> }
<del> if err := curl.Wait(); err != nil {
<del> return nil, err
<del> }
<del> return output, nil
<del>}
<del>
<ide> // Request a given URL and return an io.Reader
<ide> func Download(url string, stderr io.Writer) (*http.Response, error) {
<ide> var resp *http.Response | 1 |
Text | Text | add note on batch contract for listeners | 52b8c2d2e0241e1c515131c5e5f576d5dad65059 | <ide><path>website/docs/api/architectures.md
<ide> Instead of defining its own `Tok2Vec` instance, a model architecture like
<ide> [Tagger](/api/architectures#tagger) can define a listener as its `tok2vec`
<ide> argument that connects to the shared `tok2vec` component in the pipeline.
<ide>
<add>Listeners work by caching the `Tok2Vec` output for a given batch of `Doc`s. This
<add>means that in order for a component to work with the listener, the batch of
<add>`Doc`s passed to the listener must be the same as the batch of `Doc`s passed to
<add>the `Tok2Vec`. As a result, any manipulation of the `Doc`s which would affect
<add>`Tok2Vec` output, such as to create special contexts or remove `Doc`s for which
<add>no prediction can be made, must happen inside the model, **after** the call to
<add>the `Tok2Vec` component.
<add>
<ide> | Name | Description |
<ide> | ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
<ide> | `width` | The width of the vectors produced by the "upstream" [`Tok2Vec`](/api/tok2vec) component. ~~int~~ | | 1 |
Text | Text | add arm64 to os.machine() | 490c3d69898f53a5317f4dff35fa9e19d357763e | <ide><path>doc/api/os.md
<ide> added:
<ide>
<ide> * Returns {string}
<ide>
<del>Returns the machine type as a string, such as `arm`, `aarch64`, `mips`,
<del>`mips64`, `ppc64`, `ppc64le`, `s390`, `s390x`, `i386`, `i686`, `x86_64`.
<add>Returns the machine type as a string, such as `arm`, `arm64`, `aarch64`,
<add>`mips`, `mips64`, `ppc64`, `ppc64le`, `s390`, `s390x`, `i386`, `i686`, `x86_64`.
<ide>
<ide> On POSIX systems, the machine type is determined by calling
<ide> [`uname(3)`][]. On Windows, `RtlGetVersion()` is used, and if it is not | 1 |
Ruby | Ruby | use enumerator for blockless range#step | 99664d54440f6420e9f9c3753dda295cb3bbbc9f | <ide><path>activesupport/lib/active_support/core_ext/range/blockless_step.rb
<ide> def self.included(base) #:nodoc:
<ide> base.alias_method_chain :step, :blockless
<ide> end
<ide>
<del> def step_with_blockless(value, &block)
<del> if block_given?
<del> step_without_blockless(value, &block)
<del> else
<del> returning [] do |array|
<del> step_without_blockless(value) { |step| array << step }
<add> if RUBY_VERSION < '1.9'
<add> def step_with_blockless(value, &block)
<add> if block_given?
<add> step_without_blockless(value, &block)
<add> else
<add> returning [] do |array|
<add> step_without_blockless(value) { |step| array << step }
<add> end
<add> end
<add> end
<add> else
<add> def step_with_blockless(value, &block)
<add> if block_given?
<add> step_without_blockless(value, &block)
<add> else
<add> step_without_blockless(value).to_a
<ide> end
<ide> end
<ide> end | 1 |
Text | Text | fix all links to point to /react as root | 0475834470e1acac60b6f16c0f82f0e1f4cfc20f | <ide><path>docs/cookbook/cb-01-introduction.md
<ide> layout: docs
<ide> permalink: introduction.html
<ide> ---
<ide>
<del>The React.js cookbook provides solutions for common questions asked when working with the React framework. It's written in the [cookbook format](http://shop.oreilly.com/category/series/cookbooks.do) commonly used by O'Reilly Media. See [Inline Styles](inline-styles.html) recipe as an example.
<add>The React.js cookbook provides solutions for common questions asked when working with the React framework. It's written in the [cookbook format](http://shop.oreilly.com/category/series/cookbooks.do) commonly used by O'Reilly Media. See [Inline Styles](/react/docs/cookbook/inline-styles.html) recipe as an example.
<ide>
<ide> ### Contributing
<ide>
<ide><path>docs/cookbook/cb-03-if-else-in-JSX tip.md
<ide> What you're searching for is ternary expression:
<ide> React.renderComponent(<div id={true ? 'msg' : ''}>Hello World!</div>, mountNode);
<ide> ```
<ide>
<del>Try the [JSX compiler](http://facebook.github.io/react/jsx-compiler.html).
<add>Try the [JSX compiler](/react/jsx-compiler.html).
<ide><path>docs/cookbook/cb-03-if-else-in-JSX.md
<ide> React.renderComponent(<div id={true ? 'msg' : ''}>Hello World!</div>, mountNode)
<ide> ```
<ide>
<ide> ### Discussion
<del>Try the [JSX compiler](http://facebook.github.io/react/jsx-compiler.html).
<add>Try the [JSX compiler](react/jsx-compiler.html).
<ide><path>docs/cookbook/cb-06-style-prop-value-px tip.md
<ide> var divStyle = {height: 10}; // rendered as "height:10px"
<ide> React.renderComponent(<div style={divStyle}>Hello World!</div>, mountNode);
<ide> ```
<ide>
<del>See [Inline Styles](inline-styles-tip.html) for more info.
<add>See [Inline Styles](/react/docs/cookbook/inline-styles-tip.html) for more info.
<ide>
<ide> Sometimes you _do_ want to keep the CSS properties unitless. Here's a list of properties that won't get the automatic "px" suffix:
<ide>
<ide><path>docs/cookbook/cb-06-style-prop-value-px.md
<ide> React.renderComponent(<div style={divStyle}>Hello World!</div>, mountNode);
<ide> ```
<ide>
<ide> ### Discussion
<del>See [Inline Styles](inline-styles.html) for more info.
<add>See [Inline Styles](/react/docs/cookbook/inline-styles.html) for more info.
<ide>
<ide> Sometimes you _do_ want to keep the CSS properties unitless. Here's a list of properties that won't get the automatic "px" suffix:
<ide>
<ide><path>docs/cookbook/cb-08-controlled-input-null-value tip.md
<ide> layout: docs
<ide> permalink: controlled-input-null-value-tip.html
<ide> ---
<ide>
<del>Specifying the `value` prop on a [controlled component](forms.html) prevents the user from changing the input unless you desire so.
<add>Specifying the `value` prop on a [controlled component](/react/docs/cookbook/forms.html) prevents the user from changing the input unless you desire so.
<ide>
<ide> You might have run into a problem where `value` is specified, but the input can still be changed without consent. In this case, you might have accidentally set `value` to `undefined` or `null`.
<ide>
<ide><path>docs/cookbook/cb-08-controlled-input-null-value.md
<ide> permalink: controlled-input-null-value.html
<ide> ---
<ide>
<ide> ### Problem
<del>You specified a `value` parameter for your form input, but the input value can still be modified, contrary to [what you'd expect](forms.html).
<add>You specified a `value` parameter for your form input, but the input value can still be modified, contrary to [what you'd expect](/react/docs/cookbook/forms.html).
<ide>
<ide> ### Solution
<ide> You might have accidentally set `value` to `undefined` or `null`. The snippet below shows this phenomenon; after a second, the text becomes editable.
<ide><path>docs/cookbook/cb-09-componentWillReceiveProps-not-triggered-after-mounting-tip.md
<ide> layout: docs
<ide> permalink: componentWillReceiveProps-not-triggered-after-mounting-tip.html
<ide> ---
<ide>
<del>`componentWillReceiveProps` isn't triggered after the node is put on scene. This is by design. Check out [other lifecycle methods](component-specs.html) for the one that suits your needs.
<add>`componentWillReceiveProps` isn't triggered after the node is put on scene. This is by design. Check out [other lifecycle methods](/react/docs/cookbook/component-specs.html) for the one that suits your needs.
<ide>
<ide> The reason for that is because `componentWillReceiveProps` often handles the logic of comparing with the old props and acting upon changes; not triggering it at mounting (where there are no old props) helps in defining what the method does.
<ide><path>docs/cookbook/cb-09-componentWillReceiveProps-not-triggered-after-mounting.md
<ide> permalink: componentWillReceiveProps-not-triggered-after-mounting.html
<ide> `componentWillReceiveProps` isn't triggered after the node is put on scene.
<ide>
<ide> ### Solution
<del>This is by design. Check out [other lifecycle methods](component-specs.html) for the one that suits your needs.
<add>This is by design. Check out [other lifecycle methods](/react/docs/cookbook/component-specs.html) for the one that suits your needs.
<ide>
<ide> ### Discussion
<ide> `componentWillReceiveProps` often handles the logic of comparing with the old props and acting upon changes; not triggering it at mounting (where there are no old props) helps in defining what the method does.
<ide><path>docs/cookbook/cb-11-dom-event-listeners tip.md
<ide> permalink: dom-event-listeners-tip.html
<ide>
<ide> > Note:
<ide> >
<del>> This entry shows how to attach DOM events not provided by React ([check here for more info](events.html)). This is good for integrations with other libraries such as jQuery.
<add>> This entry shows how to attach DOM events not provided by React ([check here for more info](/react/docs/cookbook/events.html)). This is good for integrations with other libraries such as jQuery.
<ide>
<ide> This example displays the window width:
<ide>
<ide><path>docs/cookbook/cb-11-dom-event-listeners.md
<ide> You want to listen to an event inside a component.
<ide> ### Solution
<ide> > Note:
<ide> >
<del>> This entry shows how to attach DOM events not provided by React ([check here for more info](events.html)). This is good for integrations with other libraries such as jQuery.
<add>> This entry shows how to attach DOM events not provided by React ([check here for more info](/react/docs/cookbook/events.html)). This is good for integrations with other libraries such as jQuery.
<ide>
<ide> This example displays the window width:
<ide> | 11 |
Javascript | Javascript | fix navigation jump on multi-tap | b3886652ab96fda55ed579abb54344178ddcf877 | <ide><path>Libraries/CustomComponents/Navigator/Navigator.js
<ide> var Navigator = React.createClass({
<ide> },
<ide>
<ide> _transitionTo: function(destIndex, velocity, jumpSpringTo, cb) {
<add> if (
<add> destIndex === this.state.presentedIndex &&
<add> this.state.transitionQueue.length === 0
<add> ) {
<add> return;
<add> }
<ide> if (this.state.transitionFromIndex !== null) {
<ide> this.state.transitionQueue.push({
<ide> destIndex,
<ide> var Navigator = React.createClass({
<ide> });
<ide> return;
<ide> }
<del> if (destIndex === this.state.presentedIndex) {
<del> return;
<del> }
<add>
<ide> this.state.transitionFromIndex = this.state.presentedIndex;
<ide> this.state.presentedIndex = destIndex;
<ide> this.state.transitionCb = cb; | 1 |
Go | Go | define consts for ipcmode | 98f0f0dd87d42f2372412795bfd013f94a9c1cf2 | <ide><path>api/server/router/container/container_routes.go
<ide> func (s *containerRouter) postContainersCreate(ctx context.Context, w http.Respo
<ide>
<ide> // Older clients (API < 1.40) expects the default to be shareable, make them happy
<ide> if hostConfig.IpcMode.IsEmpty() {
<del> hostConfig.IpcMode = container.IpcMode("shareable")
<add> hostConfig.IpcMode = container.IPCModeShareable
<ide> }
<ide> }
<ide> if hostConfig != nil && versions.LessThan(version, "1.41") && !s.cgroup2 {
<ide><path>api/types/container/host_config.go
<ide> const (
<ide> // IpcMode represents the container ipc stack.
<ide> type IpcMode string
<ide>
<add>// IpcMode constants
<add>const (
<add> IPCModeNone IpcMode = "none"
<add> IPCModeHost IpcMode = "host"
<add> IPCModeContainer IpcMode = "container"
<add> IPCModePrivate IpcMode = "private"
<add> IPCModeShareable IpcMode = "shareable"
<add>)
<add>
<ide> // IsPrivate indicates whether the container uses its own private ipc namespace which can not be shared.
<ide> func (n IpcMode) IsPrivate() bool {
<del> return n == "private"
<add> return n == IPCModePrivate
<ide> }
<ide>
<ide> // IsHost indicates whether the container shares the host's ipc namespace.
<ide> func (n IpcMode) IsHost() bool {
<del> return n == "host"
<add> return n == IPCModeHost
<ide> }
<ide>
<ide> // IsShareable indicates whether the container's ipc namespace can be shared with another container.
<ide> func (n IpcMode) IsShareable() bool {
<del> return n == "shareable"
<add> return n == IPCModeShareable
<ide> }
<ide>
<ide> // IsContainer indicates whether the container uses another container's ipc namespace.
<ide> func (n IpcMode) IsContainer() bool {
<del> parts := strings.SplitN(string(n), ":", 2)
<del> return len(parts) > 1 && parts[0] == "container"
<add> return strings.HasPrefix(string(n), string(IPCModeContainer)+":")
<ide> }
<ide>
<ide> // IsNone indicates whether container IpcMode is set to "none".
<ide> func (n IpcMode) IsNone() bool {
<del> return n == "none"
<add> return n == IPCModeNone
<ide> }
<ide>
<ide> // IsEmpty indicates whether container IpcMode is empty
<ide> func (n IpcMode) Valid() bool {
<ide>
<ide> // Container returns the name of the container ipc stack is going to be used.
<ide> func (n IpcMode) Container() string {
<del> parts := strings.SplitN(string(n), ":", 2)
<del> if len(parts) > 1 && parts[0] == "container" {
<del> return parts[1]
<add> if n.IsContainer() {
<add> return strings.TrimPrefix(string(n), string(IPCModeContainer)+":")
<ide> }
<ide> return ""
<ide> }
<ide><path>cmd/dockerd/config_unix.go
<ide> func installConfigFlags(conf *config.Config, flags *pflag.FlagSet) error {
<ide> flags.StringVar(&conf.SeccompProfile, "seccomp-profile", "", "Path to seccomp profile")
<ide> flags.Var(&conf.ShmSize, "default-shm-size", "Default shm size for containers")
<ide> flags.BoolVar(&conf.NoNewPrivileges, "no-new-privileges", false, "Set no-new-privileges by default for new containers")
<del> flags.StringVar(&conf.IpcMode, "default-ipc-mode", config.DefaultIpcMode, `Default mode for containers ipc ("shareable" | "private")`)
<add> flags.StringVar(&conf.IpcMode, "default-ipc-mode", string(config.DefaultIpcMode), `Default mode for containers ipc ("shareable" | "private")`)
<ide> flags.Var(&conf.NetworkConfig.DefaultAddressPools, "default-address-pool", "Default address pools for node specific local networks")
<ide> // rootless needs to be explicitly specified for running "rootful" dockerd in rootless dockerd (#38702)
<ide> // Note that defaultUserlandProxyPath and honorXDG are configured according to the value of rootless.RunningWithRootlessKit, not the value of --rootless.
<ide><path>daemon/config/config_linux.go
<ide> import (
<ide>
<ide> const (
<ide> // DefaultIpcMode is default for container's IpcMode, if not set otherwise
<del> DefaultIpcMode = "private"
<add> DefaultIpcMode = containertypes.IPCModePrivate
<ide> )
<ide>
<ide> // BridgeConfig stores all the bridge driver specific
<ide><path>daemon/daemon_unix.go
<ide> func (daemon *Daemon) adaptContainerSettings(hostConfig *containertypes.HostConf
<ide> if hostConfig.IpcMode.IsEmpty() {
<ide> m := config.DefaultIpcMode
<ide> if daemon.configStore != nil {
<del> m = daemon.configStore.IpcMode
<add> m = containertypes.IpcMode(daemon.configStore.IpcMode)
<ide> }
<del> hostConfig.IpcMode = containertypes.IpcMode(m)
<add> hostConfig.IpcMode = m
<ide> }
<ide>
<ide> // Set default cgroup namespace mode, if unset for container
<ide><path>daemon/oci_linux_test.go
<ide> func TestTmpfsDevShmNoDupMount(t *testing.T) {
<ide> c := &container.Container{
<ide> ShmPath: "foobar", // non-empty, for c.IpcMounts() to work
<ide> HostConfig: &containertypes.HostConfig{
<del> IpcMode: containertypes.IpcMode("shareable"), // default mode
<add> IpcMode: containertypes.IPCModeShareable, // default mode
<ide> // --tmpfs /dev/shm:rw,exec,size=NNN
<ide> Tmpfs: map[string]string{
<ide> "/dev/shm": "rw,exec,size=1g",
<ide> func TestIpcPrivateVsReadonly(t *testing.T) {
<ide> skip.If(t, os.Getuid() != 0, "skipping test that requires root")
<ide> c := &container.Container{
<ide> HostConfig: &containertypes.HostConfig{
<del> IpcMode: containertypes.IpcMode("private"),
<add> IpcMode: containertypes.IPCModePrivate,
<ide> ReadonlyRootfs: true,
<ide> },
<ide> }
<ide><path>integration/container/ipcmode_linux_test.go
<ide> func TestAPIIpcModeHost(t *testing.T) {
<ide> Cmd: []string{"top"},
<ide> }
<ide> hostCfg := containertypes.HostConfig{
<del> IpcMode: containertypes.IpcMode("host"),
<add> IpcMode: containertypes.IPCModeHost,
<ide> }
<ide> ctx := context.Background()
<ide> | 7 |
Javascript | Javascript | improve hash caching | 43df5e3cde5c99d83369cb6566a05c09a4b9f6eb | <ide><path>lib/util/createHash.js
<ide>
<ide> const Hash = require("./Hash");
<ide>
<del>const BULK_SIZE = 1000;
<add>const BULK_SIZE = 2000;
<ide>
<del>const digestCache = new Map();
<add>// We are using an object instead of a Map as this will stay static during the runtime
<add>// so access to it can be optimized by v8
<add>const digestCaches = {};
<ide>
<ide> class BulkUpdateDecorator extends Hash {
<ide> /**
<ide> class BulkUpdateDecorator extends Hash {
<ide> * @returns {string|Buffer} digest
<ide> */
<ide> digest(encoding) {
<del> let cacheKey;
<add> let digestCache;
<ide> if (this.hash === undefined) {
<ide> // short data for hash, we can use caching
<del> cacheKey = `${this.hashKey}-${encoding}-${this.buffer}`;
<del> const cacheEntry = digestCache.get(cacheKey);
<add> const cacheKey = `${this.hashKey}-${encoding}`;
<add> digestCache = digestCaches[cacheKey];
<add> if (digestCache === undefined) {
<add> digestCache = digestCaches[cacheKey] = new Map();
<add> }
<add> const cacheEntry = digestCache.get(this.buffer);
<ide> if (cacheEntry !== undefined) return cacheEntry;
<ide> this.hash = this.hashFactory();
<ide> }
<ide> class BulkUpdateDecorator extends Hash {
<ide> const digestResult = this.hash.digest(encoding);
<ide> const result =
<ide> typeof digestResult === "string" ? digestResult : digestResult.toString();
<del> if (cacheKey !== undefined) {
<del> digestCache.set(cacheKey, result);
<add> if (digestCache !== undefined) {
<add> digestCache.set(this.buffer, result);
<ide> }
<ide> return result;
<ide> } | 1 |
Text | Text | update teletype roadmap to reflect recent progress | 39fcb66ed269ea0a43bdc0e4c3ea33a3b2b4c8cf | <ide><path>docs/focus/README.md
<ide> Longer-term goal: Provide the world's fastest transition from "I want to collabo
<ide>
<ide> In no particular order:
<ide>
<del>- [ ] 🐛 Resolve or reduce impact of package initialization errors (https://github.com/atom/teletype/issues/266)
<add>- [x] 🐛 Resolve or reduce impact of package initialization errors (https://github.com/atom/teletype/issues/266)
<add>- [x] ✨ Ensure remote buffers are updated when host renames files (https://github.com/atom/teletype/issues/147)
<ide> - [ ] 🐛 Surface uncaught errors in promises (https://github.com/atom/teletype/issues/298#issuecomment-355369327)
<del>- [ ] ✨ Ensure remote buffers are updated when host renames files (https://github.com/atom/teletype/issues/147)
<ide> - [ ] 💖 In the buddy list, you can see which people are currently online (i.e., presence)
<ide> - [ ] 💖 Screen-sharing -- (We should prioritize screen-sharing above audio. We can keep using Slack/Skype/Zoom/Whatever for audio and use Atom for screen-sharing, whereas the opposite is not tr[ ] ue; disabling audio on a Slack call would feel unintuitive.)
<ide> - [ ] 💖 Audio | 1 |
Python | Python | delete a needless duplicate condition | f0329ea516d1562c9e7442b87e5eb76d2b876eae | <ide><path>src/transformers/generation_logits_process.py
<ide> def __init__(self, diversity_penalty: float, num_beams: int, num_beam_groups: in
<ide> raise ValueError("`num_beam_groups` should be an integer strictly larger than 1.")
<ide> if num_beam_groups > num_beams:
<ide> raise ValueError("`beam_groups` has to be smaller or equal to `num_beams`.")
<del> if num_beam_groups > num_beams:
<del> raise ValueError("`beam_groups` has to be smaller or equal to `num_beams`")
<ide> self._num_sub_beams = num_beams // num_beam_groups
<ide>
<ide> def __call__( | 1 |
PHP | PHP | replace deprecated phpunit methods | b81c4cef61996a579ee172c3fefe4f6ec663bb19 | <ide><path>tests/Database/DatabaseEloquentModelTest.php
<ide> public function testToArray()
<ide> $model->setRelation('multi', new BaseCollection);
<ide> $array = $model->toArray();
<ide>
<del> $this->assertInternalType('array', $array);
<add> $this->assertIsArray($array);
<ide> $this->assertEquals('foo', $array['name']);
<ide> $this->assertEquals('baz', $array['names'][0]['bar']);
<ide> $this->assertEquals('boom', $array['names'][1]['bam']);
<ide> public function testModelAttributesAreCastedWhenPresentInCastsArray()
<ide> $this->assertInternalType('boolean', $model->boolAttribute);
<ide> $this->assertInternalType('boolean', $model->booleanAttribute);
<ide> $this->assertInternalType('object', $model->objectAttribute);
<del> $this->assertInternalType('array', $model->arrayAttribute);
<del> $this->assertInternalType('array', $model->jsonAttribute);
<add> $this->assertIsArray($model->arrayAttribute);
<add> $this->assertIsArray($model->jsonAttribute);
<ide> $this->assertTrue($model->boolAttribute);
<ide> $this->assertFalse($model->booleanAttribute);
<ide> $this->assertEquals($obj, $model->objectAttribute);
<ide> public function testModelAttributesAreCastedWhenPresentInCastsArray()
<ide> $this->assertInternalType('boolean', $arr['boolAttribute']);
<ide> $this->assertInternalType('boolean', $arr['booleanAttribute']);
<ide> $this->assertInternalType('object', $arr['objectAttribute']);
<del> $this->assertInternalType('array', $arr['arrayAttribute']);
<del> $this->assertInternalType('array', $arr['jsonAttribute']);
<add> $this->assertIsArray($arr['arrayAttribute']);
<add> $this->assertIsArray($arr['jsonAttribute']);
<ide> $this->assertTrue($arr['boolAttribute']);
<ide> $this->assertFalse($arr['booleanAttribute']);
<ide> $this->assertEquals($obj, $arr['objectAttribute']);
<ide><path>tests/Http/HttpMimeTypeTest.php
<ide> public function testMimeTypeFromExtensionExistsFalse()
<ide>
<ide> public function testGetAllMimeTypes()
<ide> {
<del> $this->assertInternalType('array', MimeType::get());
<add> $this->assertIsArray(MimeType::get());
<ide> $this->assertArraySubset(['jpg' => 'image/jpeg'], MimeType::get());
<ide> }
<ide>
<ide><path>tests/Mail/MailableQueuedTest.php
<ide> public function testQueuedMailableWithAttachmentSent()
<ide> $mailable = new MailableQueableStub;
<ide> $attachmentOption = ['mime' => 'image/jpeg', 'as' => 'bar.jpg'];
<ide> $mailable->attach('foo.jpg', $attachmentOption);
<del> $this->assertInternalType('array', $mailable->attachments);
<add> $this->assertIsArray($mailable->attachments);
<ide> $this->assertCount(1, $mailable->attachments);
<ide> $this->assertEquals($mailable->attachments[0]['options'], $attachmentOption);
<ide> $queueFake->assertNothingPushed();
<ide> public function testQueuedMailableWithAttachmentFromDiskSent()
<ide>
<ide> $mailable->attachFromStorage('/', 'foo.jpg', $attachmentOption);
<ide>
<del> $this->assertInternalType('array', $mailable->diskAttachments);
<add> $this->assertIsArray($mailable->diskAttachments);
<ide> $this->assertCount(1, $mailable->diskAttachments);
<ide> $this->assertEquals($mailable->diskAttachments[0]['options'], $attachmentOption);
<ide>
<ide><path>tests/Routing/RoutingRouteTest.php
<ide> public function testRouteGetAction()
<ide> return 'foo';
<ide> })->name('foo');
<ide>
<del> $this->assertInternalType('array', $route->getAction());
<add> $this->assertIsArray($route->getAction());
<ide> $this->assertArrayHasKey('as', $route->getAction());
<ide> $this->assertEquals('foo', $route->getAction('as'));
<ide> $this->assertNull($route->getAction('unknown_property'));
<ide><path>tests/Support/SupportArrTest.php
<ide> public function testRandom()
<ide> $this->assertContains($random, ['foo', 'bar', 'baz']);
<ide>
<ide> $random = Arr::random(['foo', 'bar', 'baz'], 0);
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(0, $random);
<ide>
<ide> $random = Arr::random(['foo', 'bar', 'baz'], 1);
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(1, $random);
<ide> $this->assertContains($random[0], ['foo', 'bar', 'baz']);
<ide>
<ide> $random = Arr::random(['foo', 'bar', 'baz'], 2);
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(2, $random);
<ide> $this->assertContains($random[0], ['foo', 'bar', 'baz']);
<ide> $this->assertContains($random[1], ['foo', 'bar', 'baz']);
<ide>
<ide> $random = Arr::random(['foo', 'bar', 'baz'], '0');
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(0, $random);
<ide>
<ide> $random = Arr::random(['foo', 'bar', 'baz'], '1');
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(1, $random);
<ide> $this->assertContains($random[0], ['foo', 'bar', 'baz']);
<ide>
<ide> $random = Arr::random(['foo', 'bar', 'baz'], '2');
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(2, $random);
<ide> $this->assertContains($random[0], ['foo', 'bar', 'baz']);
<ide> $this->assertContains($random[1], ['foo', 'bar', 'baz']);
<ide> public function testRandom()
<ide> public function testRandomOnEmptyArray()
<ide> {
<ide> $random = Arr::random([], 0);
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(0, $random);
<ide>
<ide> $random = Arr::random([], '0');
<del> $this->assertInternalType('array', $random);
<add> $this->assertIsArray($random);
<ide> $this->assertCount(0, $random);
<ide> }
<ide>
<ide><path>tests/Support/SupportCollectionTest.php
<ide> public function testMapToDictionary()
<ide>
<ide> $this->assertInstanceOf(Collection::class, $groups);
<ide> $this->assertEquals(['A' => [1], 'B' => [2, 4], 'C' => [3]], $groups->toArray());
<del> $this->assertInternalType('array', $groups['A']);
<add> $this->assertIsArray($groups['A']);
<ide> }
<ide>
<ide> public function testMapToDictionaryWithNumericKeys() | 6 |
Text | Text | use "caveats" name explicitly | 9b0b10ca03b10d6886b25c98e5a5a197504c990f | <ide><path>docs/Formula-Cookbook.md
<ide> Homebrew expects to find manual pages in `#{prefix}/share/man/...`, and not in `
<ide>
<ide> Some software installs to `man` instead of `share/man`, so check the output and add a `"--mandir=#{man}"` to the `./configure` line if needed.
<ide>
<del>### Special instructions
<add>### Caveats
<ide>
<ide> In case there are specific issues with the Homebrew packaging (compared to how the software is installed from other sources) a `caveats` block can be added to the formula to warn users. This can indicate non-standard install paths, an example from the `ruby` formula:
<ide> | 1 |
Python | Python | remove last bits of parsermixin | 941742593c50dcb0e1ca426621d107f12c9ee65c | <ide><path>djangorestframework/parsers.py
<ide> except ImportError:
<ide> from cgi import parse_qs
<ide>
<del>class ParserMixin(object):
<del> parsers = ()
<del>
<del>
<ide>
<ide>
<ide> class BaseParser(object):
<ide><path>djangorestframework/request.py
<ide> from StringIO import StringIO
<ide>
<ide> class RequestMixin(object):
<del> """Delegate class that supplements an HttpRequest object with additional behaviour."""
<add> """Mixin behaviour to deal with requests."""
<ide>
<ide> USE_FORM_OVERLOADING = True
<ide> METHOD_PARAM = "_method"
<ide><path>djangorestframework/resource.py
<ide>
<ide> from djangorestframework.compat import View
<ide> from djangorestframework.emitters import EmitterMixin
<del>from djangorestframework.parsers import ParserMixin
<ide> from djangorestframework.authenticators import AuthenticatorMixin
<ide> from djangorestframework.validators import FormValidatorMixin
<ide> from djangorestframework.response import Response, ResponseException
<ide> __all__ = ['Resource']
<ide>
<ide>
<del>class Resource(EmitterMixin, ParserMixin, AuthenticatorMixin, FormValidatorMixin, RequestMixin, View):
<add>class Resource(EmitterMixin, AuthenticatorMixin, FormValidatorMixin, RequestMixin, View):
<ide> """Handles incoming requests and maps them to REST operations,
<ide> performing authentication, input deserialization, input validation, output serialization."""
<ide>
<ide><path>djangorestframework/tests/content.py
<del># TODO: finish off the refactoring
<add>"""
<add>Tests for content parsing, and form-overloaded content parsing.
<add>"""
<ide> from django.test import TestCase
<ide> from djangorestframework.compat import RequestFactory
<ide> from djangorestframework.request import RequestMixin
<ide> class TestContentParsing(TestCase):
<ide> def setUp(self):
<ide> self.req = RequestFactory()
<ide>
<del># # Interface tests
<del>#
<del># def test_content_mixin_interface(self):
<del># """Ensure the ContentMixin interface is as expected."""
<del># self.assertRaises(NotImplementedError, ContentMixin().determine_content, None)
<del>#
<del># def test_standard_content_mixin_interface(self):
<del># """Ensure the OverloadedContentMixin interface is as expected."""
<del># self.assertTrue(issubclass(StandardContentMixin, ContentMixin))
<del># getattr(StandardContentMixin, 'determine_content')
<del>#
<del># def test_overloaded_content_mixin_interface(self):
<del># """Ensure the OverloadedContentMixin interface is as expected."""
<del># self.assertTrue(issubclass(OverloadedContentMixin, ContentMixin))
<del># getattr(OverloadedContentMixin, 'CONTENT_PARAM')
<del># getattr(OverloadedContentMixin, 'CONTENTTYPE_PARAM')
<del># getattr(OverloadedContentMixin, 'determine_content')
<del>#
<del>#
<del># # Common functionality to test with both StandardContentMixin and OverloadedContentMixin
<del>#
<ide> def ensure_determines_no_content_GET(self, view):
<ide> """Ensure view.RAW_CONTENT returns None for GET request with no content."""
<ide> view.request = self.req.get('/')
<ide> def test_standard_behaviour_determines_non_form_content_PUT(self):
<ide> """Ensure view.RAW_CONTENT returns content for PUT request with non-form content."""
<ide> self.ensure_determines_non_form_content_PUT(RequestMixin())
<ide>
<del># # OverloadedContentMixin behavioural tests
<del>#
<del># def test_overloaded_behaviour_determines_no_content_GET(self):
<del># """Ensure StandardContentMixin.determine_content(request) returns None for GET request with no content."""
<del># self.ensure_determines_no_content_GET(OverloadedContentMixin())
<del>#
<del># def test_overloaded_behaviour_determines_form_content_POST(self):
<del># """Ensure StandardContentMixin.determine_content(request) returns content for POST request with content."""
<del># self.ensure_determines_form_content_POST(OverloadedContentMixin())
<del>#
<del># def test_overloaded_behaviour_determines_non_form_content_POST(self):
<del># """Ensure StandardContentMixin.determine_content(request) returns (content type, content) for POST request with content."""
<del># self.ensure_determines_non_form_content_POST(OverloadedContentMixin())
<del>#
<del># def test_overloaded_behaviour_determines_form_content_PUT(self):
<del># """Ensure StandardContentMixin.determine_content(request) returns content for PUT request with content."""
<del># self.ensure_determines_form_content_PUT(OverloadedContentMixin())
<del>#
<del># def test_overloaded_behaviour_determines_non_form_content_PUT(self):
<del># """Ensure StandardContentMixin.determine_content(request) returns (content type, content) for PUT request with content."""
<del># self.ensure_determines_non_form_content_PUT(OverloadedContentMixin())
<del>#
<ide> def test_overloaded_behaviour_allows_content_tunnelling(self):
<ide> """Ensure request.RAW_CONTENT returns content for overloaded POST request"""
<ide> content = 'qwerty'
<ide> def test_overloaded_behaviour_allows_content_tunnelling(self):
<ide> view.parsers = (PlainTextParser,)
<ide> view.perform_form_overloading()
<ide> self.assertEqual(view.RAW_CONTENT, content)
<del>
<del># def test_overloaded_behaviour_allows_content_tunnelling_content_type_not_set(self):
<del># """Ensure determine_content(request) returns (None, content) for overloaded POST request with content type not set"""
<del># content = 'qwerty'
<del># request = self.req.post('/', {OverloadedContentMixin.CONTENT_PARAM: content})
<del># self.assertEqual(OverloadedContentMixin().determine_content(request), (None, content)) | 4 |
Text | Text | fix typo in text | d2167c573d23ec95bd85b1b9fa040b4c5234542b | <ide><path>guide/english/vim/modes/index.md
<ide> certain key presses can:
<ide> - **k** move one row up
<ide> - **l** move one character right
<ide>
<del>As many vim commands, row movement can be prefixed by a number to move s
<del>everal lines at a time:
<add>As many vim commands, row movement can be prefixed by a number to move several lines at a time:
<ide> - **4j** move 4 rows down
<ide> - **6k** move 6 rows up
<ide> | 1 |
Javascript | Javascript | remove useless line | 72ce9baa75434f64e42ee4e666897ddfd754c822 | <ide><path>lib/stream.js
<ide> Stream.prototype.pipe = function(dest, options) {
<ide> source.removeListener('end', cleanup);
<ide> source.removeListener('close', cleanup);
<ide>
<del> dest.removeListener('end', cleanup);
<ide> dest.removeListener('close', cleanup);
<ide> }
<ide> | 1 |
Javascript | Javascript | remove quotes from object keys | 33faa3bada8084b38cb12f204abf240f7be0bea1 | <ide><path>server/boot/randomAPIs.js
<ide> module.exports = function(app) {
<ide> return res.redirect('/map');
<ide> }
<ide> return user.updateAttributes({
<del> 'sendMonthlyEmail': false,
<del> 'sendQuincyEmail': false,
<del> 'sendNotificationEmail': false
<add> sendMonthlyEmail: false,
<add> sendQuincyEmail: false,
<add> sendNotificationEmail: false
<ide> }, (err) => {
<ide> if (err) { return next(err); }
<ide> req.flash('info', {
<ide> module.exports = function(app) {
<ide> return res.redirect('/map');
<ide> }
<ide> return user.updateAttributes({
<del> 'sendQuincyEmail': false,
<del> 'sendMonthlyEmail': false,
<del> 'sendNotificationEmail': false
<add> sendQuincyEmail: false,
<add> sendMonthlyEmail: false,
<add> sendNotificationEmail: false
<ide> }, (err) => {
<ide> if (err) { return next(err); }
<ide> req.flash('info', { | 1 |
Python | Python | fix lint error | 283de38b29e304afb918fef58b96ef50d328c0b9 | <ide><path>official/recommendation/ncf_keras_main.py
<ide> def run_ncf(_):
<ide> if keras_utils.is_v2_0() and strategy is not None:
<ide> logging.error("NCF Keras only works with distribution strategy in TF 2.0")
<ide> return
<del>
<add>
<ide> if (params["keras_use_ctl"] and (
<ide> not keras_utils.is_v2_0() or strategy is None)):
<ide> logging.error( | 1 |
Text | Text | update readme with warning about withmutations | 0efba239d432152f3b660aac202aaf58d7ef5fc1 | <ide><path>README.md
<ide> Batching Mutations
<ide> > — Rich Hickey, Clojure
<ide>
<ide> Applying a mutation to create a new immutable object results in some overhead,
<del>which can add up to a performance penalty. If you need to apply a series of
<del>mutations locally before returning, `Immutable` gives you the ability to create
<del>a temporary mutable (transient) copy of a collection and apply a batch of
<add>which can add up to a minor performance penalty. If you need to apply a series
<add>of mutations locally before returning, `Immutable` gives you the ability to
<add>create a temporary mutable (transient) copy of a collection and apply a batch of
<ide> mutations in a performant manner by using `withMutations`. In fact, this is
<ide> exactly how `Immutable` applies complex mutations itself.
<ide>
<ide> Note: `immutable` also provides `asMutable` and `asImmutable`, but only
<ide> encourages their use when `withMutations` will not suffice. Use caution to not
<ide> return a mutable copy, which could result in undesired behavior.
<ide>
<add>*Important!*: Only a select few methods can be used in `withMutations` including
<add>`set`, `push` and `pop`. These methods can be applied directly against a
<add>persistent data-structure where other methods like `map`, `filter`, `sort`,
<add>and `splice` will always return new immutable data-structures and never mutate
<add>a mutable collection.
<add>
<ide>
<ide> Documentation
<ide> ------------- | 1 |
Python | Python | improve error message wrt get_config missing | 9de9148614562785b4e58e53cb2cd60052301729 | <ide><path>keras/engine/base_layer.py
<ide> def get_config(self):
<ide> textwrap.dedent(
<ide> f"""
<ide> Layer {self.__class__.__name__} has arguments {extra_args}
<del> in `__init__` and therefore must override `get_config()`.
<add> in `__init__()` and therefore must override `get_config()` in
<add> order to be serializable.
<ide>
<ide> Example:
<ide>
<ide> class CustomLayer(keras.layers.Layer):
<del> def __init__(self, arg1, arg2):
<del> super().__init__()
<add> def __init__(self, arg1, arg2, **kwargs):
<add> super().__init__(**kwargs)
<ide> self.arg1 = arg1
<ide> self.arg2 = arg2
<ide> | 1 |
Mixed | Javascript | use sentence case for class property | 936ce85c0be81f73304249aa5861436d4b71cc23 | <ide><path>doc/api/buffer.md
<ide> console.log(Buffer.isEncoding(''));
<ide> // Prints: false
<ide> ```
<ide>
<del>### Class Property: `Buffer.poolSize`
<add>### Class property: `Buffer.poolSize`
<ide> <!-- YAML
<ide> added: v0.11.3
<ide> -->
<ide><path>tools/doc/json.js
<ide> const eventPrefix = '^Event: +';
<ide> const classPrefix = '^[Cc]lass: +';
<ide> const ctorPrefix = '^(?:[Cc]onstructor: +)?`?new +';
<ide> const classMethodPrefix = '^Static method: +';
<del>const maybeClassPropertyPrefix = '(?:Class Property: +)?';
<add>const maybeClassPropertyPrefix = '(?:Class property: +)?';
<ide>
<ide> const maybeQuote = '[\'"]?';
<ide> const notQuotes = '[^\'"]+'; | 2 |
Javascript | Javascript | make head method to work with keep-alive" | f504c9c6b897f14891f147ad4a05e743133a0d35 | <ide><path>lib/_http_outgoing.js
<ide> function _storeHeader(firstLine, headers) {
<ide> }
<ide>
<ide> if (!state.contLen && !state.te) {
<del> if (!this._hasBody && (this.statusCode === 204 ||
<del> this.statusCode === 304)) {
<add> if (!this._hasBody) {
<ide> // Make sure we don't end the 0\r\n\r\n at the end of the message.
<ide> this.chunkedEncoding = false;
<ide> } else if (!this.useChunkedEncodingByDefault) {
<ide><path>test/parallel/test-http-reuse-socket.js
<del>'use strict';
<del>const common = require('../common');
<del>const http = require('http');
<del>const assert = require('assert');
<del>const Countdown = require('../common/countdown');
<del>
<del>// The HEAD:204, GET:200 is the most pathological test case.
<del>// GETs following a 204 response with a content-encoding header failed.
<del>// Responses without bodies and without content-length or encoding caused
<del>// the socket to be closed.
<del>const codes = [204, 200, 200, 304, 200];
<del>const methods = ['HEAD', 'HEAD', 'GET', 'HEAD', 'GET'];
<del>
<del>const sockets = [];
<del>const agent = new http.Agent();
<del>agent.maxSockets = 1;
<del>
<del>const countdown = new Countdown(codes.length, () => server.close());
<del>
<del>const server = http.createServer(common.mustCall((req, res) => {
<del> const code = codes.shift();
<del> assert.strictEqual(typeof code, 'number');
<del> assert.ok(code > 0);
<del> res.writeHead(code, {});
<del> res.end();
<del>}, codes.length));
<del>
<del>function nextRequest() {
<del> const request = http.request({
<del> port: server.address().port,
<del> path: '/',
<del> agent: agent,
<del> method: methods.shift()
<del> }, common.mustCall((response) => {
<del> response.on('end', common.mustCall(() => {
<del> if (countdown.dec()) {
<del> nextRequest();
<del> }
<del> assert.strictEqual(sockets.length, 1);
<del> }));
<del> response.resume();
<del> }));
<del> request.on('socket', common.mustCall((socket) => {
<del> if (!sockets.includes(socket)) {
<del> sockets.push(socket);
<del> }
<del> }));
<del> request.end();
<del>}
<del>
<del>server.listen(0, common.mustCall(nextRequest)); | 2 |
Ruby | Ruby | prefer pathname methods over regexp matching | 4f014836f2a5603a05a6de686ef4a298706111c9 | <ide><path>Library/Homebrew/formula_cellar_checks.rb
<ide> def check_infopages
<ide>
<ide> def check_jars
<ide> return unless f.lib.directory?
<del>
<del> jars = f.lib.children.select{|g| g.to_s =~ /\.jar$/}
<add> jars = f.lib.children.select { |g| g.extname == ".jar" }
<ide> return if jars.empty?
<ide>
<ide> ["JARs were installed to \"#{f.lib}\".",
<ide> def check_non_executables bin
<ide>
<ide> def check_generic_executables bin
<ide> return unless bin.directory?
<del> generics = bin.children.select { |g| g.to_s =~ /\/(run|service)$/}
<add> generic_names = %w[run service]
<add> generics = bin.children.select { |g| generic_names.include? g.basename.to_s }
<ide> return if generics.empty?
<ide>
<ide> ["Generic binaries were installed to \"#{bin}\".", | 1 |
Go | Go | move daemon stores to per platform | 3aa4a0071536d3b106374eaa44d8a55765901aa6 | <ide><path>api/server/backend/build/backend.go
<ide> import (
<ide> // ImageComponent provides an interface for working with images
<ide> type ImageComponent interface {
<ide> SquashImage(from string, to string) (string, error)
<del> TagImageWithReference(image.ID, reference.Named) error
<add> TagImageWithReference(image.ID, string, reference.Named) error
<ide> }
<ide>
<ide> // Backend provides build functionality to the API router
<ide><path>api/server/backend/build/tag.go
<ide> package build
<ide> import (
<ide> "fmt"
<ide> "io"
<add> "runtime"
<ide>
<ide> "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/image"
<add> "github.com/docker/docker/pkg/system"
<ide> "github.com/pkg/errors"
<ide> )
<ide>
<ide> func NewTagger(backend ImageComponent, stdout io.Writer, names []string) (*Tagge
<ide> // TagImages creates image tags for the imageID
<ide> func (bt *Tagger) TagImages(imageID image.ID) error {
<ide> for _, rt := range bt.repoAndTags {
<del> if err := bt.imageComponent.TagImageWithReference(imageID, rt); err != nil {
<add> // TODO @jhowardmsft LCOW support. Will need revisiting.
<add> platform := runtime.GOOS
<add> if platform == "windows" && system.LCOWSupported() {
<add> platform = "linux"
<add> }
<add> if err := bt.imageComponent.TagImageWithReference(imageID, platform, rt); err != nil {
<ide> return err
<ide> }
<ide> fmt.Fprintf(bt.stdout, "Successfully tagged %s\n", reference.FamiliarString(rt))
<ide><path>api/types/types.go
<ide> type ContainerJSONBase struct {
<ide> Name string
<ide> RestartCount int
<ide> Driver string
<add> Platform string
<ide> MountLabel string
<ide> ProcessLabel string
<ide> AppArmorProfile string
<ide><path>cmd/dockerd/daemon.go
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide>
<ide> logrus.Info("Daemon has completed initialization")
<ide>
<del> logrus.WithFields(logrus.Fields{
<del> "version": dockerversion.Version,
<del> "commit": dockerversion.GitCommit,
<del> "graphdriver": d.GraphDriverName(),
<del> }).Info("Docker daemon")
<del>
<ide> cli.d = d
<ide>
<ide> initRouter(api, d, c)
<ide><path>daemon/build.go
<ide> package daemon
<ide>
<ide> import (
<ide> "io"
<add> "runtime"
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/distribution/reference"
<ide> func (daemon *Daemon) GetImageAndReleasableLayer(ctx context.Context, refOrID st
<ide> image, _ := daemon.GetImage(refOrID)
<ide> // TODO: shouldn't we error out if error is different from "not found" ?
<ide> if image != nil {
<del> layer, err := newReleasableLayerForImage(image, daemon.layerStore)
<add> // TODO LCOW @jhowardmsft. For now using runtime.GOOS for this, will need enhancing for platform when porting the builder
<add> layer, err := newReleasableLayerForImage(image, daemon.stores[runtime.GOOS].layerStore)
<ide> return image, layer, err
<ide> }
<ide> }
<ide> func (daemon *Daemon) GetImageAndReleasableLayer(ctx context.Context, refOrID st
<ide> if err != nil {
<ide> return nil, nil, err
<ide> }
<del> layer, err := newReleasableLayerForImage(image, daemon.layerStore)
<add> // TODO LCOW @jhowardmsft. For now using runtime.GOOS for this, will need enhancing for platform when porting the builder
<add> layer, err := newReleasableLayerForImage(image, daemon.stores[runtime.GOOS].layerStore)
<ide> return image, layer, err
<ide> }
<ide>
<ide><path>daemon/cache.go
<ide> package daemon
<ide>
<ide> import (
<add> "runtime"
<add>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/builder"
<ide> "github.com/docker/docker/image/cache"
<ide> import (
<ide> // MakeImageCache creates a stateful image cache.
<ide> func (daemon *Daemon) MakeImageCache(sourceRefs []string) builder.ImageCache {
<ide> if len(sourceRefs) == 0 {
<del> return cache.NewLocal(daemon.imageStore)
<add> // TODO @jhowardmsft LCOW. For now, assume it is the OS of the host
<add> return cache.NewLocal(daemon.stores[runtime.GOOS].imageStore)
<ide> }
<ide>
<del> cache := cache.New(daemon.imageStore)
<add> // TODO @jhowardmsft LCOW. For now, assume it is the OS of the host
<add> cache := cache.New(daemon.stores[runtime.GOOS].imageStore)
<ide>
<ide> for _, ref := range sourceRefs {
<ide> img, err := daemon.GetImage(ref)
<ide><path>daemon/commit.go
<ide> func (daemon *Daemon) Commit(name string, c *backend.ContainerCommitConfig) (str
<ide> }()
<ide>
<ide> var parent *image.Image
<del> os := runtime.GOOS
<ide> if container.ImageID == "" {
<ide> parent = new(image.Image)
<ide> parent.RootFS = image.NewRootFS()
<ide> } else {
<del> parent, err = daemon.imageStore.Get(container.ImageID)
<add> parent, err = daemon.stores[container.Platform].imageStore.Get(container.ImageID)
<ide> if err != nil {
<ide> return "", err
<ide> }
<del> // To support LCOW, Windows needs to pass the platform in when registering the layer in the store
<del> if runtime.GOOS == "windows" {
<del> os = parent.OS
<del> }
<ide> }
<ide>
<del> l, err := daemon.layerStore.Register(rwTar, parent.RootFS.ChainID(), layer.Platform(os))
<add> l, err := daemon.stores[container.Platform].layerStore.Register(rwTar, rootFS.ChainID(), layer.Platform(container.Platform))
<ide> if err != nil {
<ide> return "", err
<ide> }
<del> defer layer.ReleaseAndLog(daemon.layerStore, l)
<add> defer layer.ReleaseAndLog(daemon.stores[container.Platform].layerStore, l)
<ide>
<ide> containerConfig := c.ContainerConfig
<ide> if containerConfig == nil {
<ide> func (daemon *Daemon) Commit(name string, c *backend.ContainerCommitConfig) (str
<ide> return "", err
<ide> }
<ide>
<del> id, err := daemon.imageStore.Create(config)
<add> id, err := daemon.stores[container.Platform].imageStore.Create(config)
<ide> if err != nil {
<ide> return "", err
<ide> }
<ide>
<ide> if container.ImageID != "" {
<del> if err := daemon.imageStore.SetParent(id, container.ImageID); err != nil {
<add> if err := daemon.stores[container.Platform].imageStore.SetParent(id, container.ImageID); err != nil {
<ide> return "", err
<ide> }
<ide> }
<ide> func (daemon *Daemon) Commit(name string, c *backend.ContainerCommitConfig) (str
<ide> return "", err
<ide> }
<ide> }
<del> if err := daemon.TagImageWithReference(id, newTag); err != nil {
<add> if err := daemon.TagImageWithReference(id, container.Platform, newTag); err != nil {
<ide> return "", err
<ide> }
<ide> imageRef = reference.FamiliarString(newTag)
<ide><path>daemon/container.go
<ide> import (
<ide> "fmt"
<ide> "os"
<ide> "path/filepath"
<add> "runtime"
<ide> "time"
<ide>
<ide> "github.com/docker/docker/api/errors"
<ide> func (daemon *Daemon) newContainer(name string, config *containertypes.Config, h
<ide> base.ImageID = imgID
<ide> base.NetworkSettings = &network.Settings{IsAnonymousEndpoint: noExplicitName}
<ide> base.Name = name
<del> base.Driver = daemon.GraphDriverName()
<del>
<add> // TODO @jhowardmsft LCOW - Get it from the platform of the container. For now, assume it is the OS of the host
<add> base.Driver = daemon.GraphDriverName(runtime.GOOS)
<add> // TODO @jhowardmsft LCOW - Similarly on this field. To solve this it will need a CLI/REST change in a subsequent PR during LCOW development
<add> base.Platform = runtime.GOOS
<ide> return base, err
<ide> }
<ide>
<ide><path>daemon/create.go
<ide> func (daemon *Daemon) generateSecurityOpt(hostConfig *containertypes.HostConfig)
<ide> func (daemon *Daemon) setRWLayer(container *container.Container) error {
<ide> var layerID layer.ChainID
<ide> if container.ImageID != "" {
<del> img, err := daemon.imageStore.Get(container.ImageID)
<add> img, err := daemon.stores[container.Platform].imageStore.Get(container.ImageID)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> func (daemon *Daemon) setRWLayer(container *container.Container) error {
<ide> StorageOpt: container.HostConfig.StorageOpt,
<ide> }
<ide>
<del> rwLayer, err := daemon.layerStore.CreateRWLayer(container.ID, layerID, rwLayerOpts)
<add> rwLayer, err := daemon.stores[container.Platform].layerStore.CreateRWLayer(container.ID, layerID, rwLayerOpts)
<ide> if err != nil {
<ide> return err
<ide> }
<ide><path>daemon/daemon.go
<ide> var (
<ide> errSystemNotSupported = errors.New("The Docker daemon is not supported on this platform.")
<ide> )
<ide>
<add>type daemonStore struct {
<add> graphDriver string
<add> imageRoot string
<add> imageStore image.Store
<add> layerStore layer.Store
<add> distributionMetadataStore dmetadata.Store
<add> referenceStore refstore.Store
<add>}
<add>
<ide> // Daemon holds information about the Docker daemon.
<ide> type Daemon struct {
<del> ID string
<del> repository string
<del> containers container.Store
<del> execCommands *exec.Store
<del> referenceStore refstore.Store
<del> downloadManager *xfer.LayerDownloadManager
<del> uploadManager *xfer.LayerUploadManager
<del> distributionMetadataStore dmetadata.Store
<del> trustKey libtrust.PrivateKey
<del> idIndex *truncindex.TruncIndex
<del> configStore *config.Config
<del> statsCollector *stats.Collector
<del> defaultLogConfig containertypes.LogConfig
<del> RegistryService registry.Service
<del> EventsService *events.Events
<del> netController libnetwork.NetworkController
<del> volumes *store.VolumeStore
<del> discoveryWatcher discovery.Reloader
<del> root string
<del> seccompEnabled bool
<del> apparmorEnabled bool
<del> shutdown bool
<del> idMappings *idtools.IDMappings
<del> layerStore layer.Store
<del> imageStore image.Store
<del> PluginStore *plugin.Store // todo: remove
<del> pluginManager *plugin.Manager
<del> nameIndex *registrar.Registrar
<del> linkIndex *linkIndex
<del> containerd libcontainerd.Client
<del> containerdRemote libcontainerd.Remote
<del> defaultIsolation containertypes.Isolation // Default isolation mode on Windows
<del> clusterProvider cluster.Provider
<del> cluster Cluster
<del> metricsPluginListener net.Listener
<add> ID string
<add> repository string
<add> containers container.Store
<add> execCommands *exec.Store
<add> downloadManager *xfer.LayerDownloadManager
<add> uploadManager *xfer.LayerUploadManager
<add> trustKey libtrust.PrivateKey
<add> idIndex *truncindex.TruncIndex
<add> configStore *config.Config
<add> statsCollector *stats.Collector
<add> defaultLogConfig containertypes.LogConfig
<add> RegistryService registry.Service
<add> EventsService *events.Events
<add> netController libnetwork.NetworkController
<add> volumes *store.VolumeStore
<add> discoveryWatcher discovery.Reloader
<add> root string
<add> seccompEnabled bool
<add> apparmorEnabled bool
<add> shutdown bool
<add> idMappings *idtools.IDMappings
<add> stores map[string]daemonStore // By container target platform
<add> PluginStore *plugin.Store // todo: remove
<add> pluginManager *plugin.Manager
<add> nameIndex *registrar.Registrar
<add> linkIndex *linkIndex
<add> containerd libcontainerd.Client
<add> containerdRemote libcontainerd.Remote
<add> defaultIsolation containertypes.Isolation // Default isolation mode on Windows
<add> clusterProvider cluster.Provider
<add> cluster Cluster
<add> metricsPluginListener net.Listener
<ide>
<ide> machineMemory uint64
<ide>
<ide> func (daemon *Daemon) HasExperimental() bool {
<ide> }
<ide>
<ide> func (daemon *Daemon) restore() error {
<del> var (
<del> currentDriver = daemon.GraphDriverName()
<del> containers = make(map[string]*container.Container)
<del> )
<add> containers := make(map[string]*container.Container)
<ide>
<ide> logrus.Info("Loading containers: start.")
<ide>
<ide> func (daemon *Daemon) restore() error {
<ide> }
<ide>
<ide> // Ignore the container if it does not support the current driver being used by the graph
<del> if (container.Driver == "" && currentDriver == "aufs") || container.Driver == currentDriver {
<del> rwlayer, err := daemon.layerStore.GetRWLayer(container.ID)
<add> currentDriverForContainerPlatform := daemon.stores[container.Platform].graphDriver
<add> if (container.Driver == "" && currentDriverForContainerPlatform == "aufs") || container.Driver == currentDriverForContainerPlatform {
<add> rwlayer, err := daemon.stores[container.Platform].layerStore.GetRWLayer(container.ID)
<ide> if err != nil {
<ide> logrus.Errorf("Failed to load container mount %v: %v", id, err)
<ide> continue
<ide> func NewDaemon(config *config.Config, registryService registry.Service, containe
<ide> }
<ide> }
<ide>
<del> driverName := os.Getenv("DOCKER_DRIVER")
<del> if driverName == "" {
<del> driverName = config.GraphDriver
<add> // On Windows we don't support the environment variable, or a user supplied graphdriver
<add> // as Windows has no choice in terms of which graphdrivers to use. It's a case of
<add> // running Windows containers on Windows - windowsfilter, running Linux containers on Windows,
<add> // lcow. Unix platforms however run a single graphdriver for all containers, and it can
<add> // be set through an environment variable, a daemon start parameter, or chosen through
<add> // initialization of the layerstore through driver priority order for example.
<add> d.stores = make(map[string]daemonStore)
<add> if runtime.GOOS == "windows" {
<add> d.stores["windows"] = daemonStore{graphDriver: "windowsfilter"}
<add> if system.LCOWSupported() {
<add> d.stores["linux"] = daemonStore{graphDriver: "lcow"}
<add> }
<add> } else {
<add> driverName := os.Getenv("DOCKER_DRIVER")
<add> if driverName == "" {
<add> driverName = config.GraphDriver
<add> }
<add> d.stores[runtime.GOOS] = daemonStore{graphDriver: driverName} // May still be empty. Layerstore init determines instead.
<ide> }
<ide>
<ide> d.RegistryService = registryService
<ide> func NewDaemon(config *config.Config, registryService registry.Service, containe
<ide> return nil, errors.Wrap(err, "couldn't create plugin manager")
<ide> }
<ide>
<del> d.layerStore, err = layer.NewStoreFromOptions(layer.StoreOptions{
<del> StorePath: config.Root,
<del> MetadataStorePathTemplate: filepath.Join(config.Root, "image", "%s", "layerdb"),
<del> GraphDriver: driverName,
<del> GraphDriverOptions: config.GraphOptions,
<del> IDMappings: idMappings,
<del> PluginGetter: d.PluginStore,
<del> ExperimentalEnabled: config.Experimental,
<del> })
<del> if err != nil {
<del> return nil, err
<add> var graphDrivers []string
<add> for platform, ds := range d.stores {
<add> ls, err := layer.NewStoreFromOptions(layer.StoreOptions{
<add> StorePath: config.Root,
<add> MetadataStorePathTemplate: filepath.Join(config.Root, "image", "%s", "layerdb"),
<add> GraphDriver: ds.graphDriver,
<add> GraphDriverOptions: config.GraphOptions,
<add> IDMappings: idMappings,
<add> PluginGetter: d.PluginStore,
<add> ExperimentalEnabled: config.Experimental,
<add> })
<add> if err != nil {
<add> return nil, err
<add> }
<add> ds.graphDriver = ls.DriverName() // As layerstore may set the driver
<add> ds.layerStore = ls
<add> d.stores[platform] = ds
<add> graphDrivers = append(graphDrivers, ls.DriverName())
<ide> }
<ide>
<del> graphDriver := d.layerStore.DriverName()
<del> imageRoot := filepath.Join(config.Root, "image", graphDriver)
<del>
<ide> // Configure and validate the kernels security support
<del> if err := configureKernelSecuritySupport(config, graphDriver); err != nil {
<add> if err := configureKernelSecuritySupport(config, graphDrivers); err != nil {
<ide> return nil, err
<ide> }
<ide>
<ide> logrus.Debugf("Max Concurrent Downloads: %d", *config.MaxConcurrentDownloads)
<del> d.downloadManager = xfer.NewLayerDownloadManager(d.layerStore, *config.MaxConcurrentDownloads)
<add> lsMap := make(map[string]layer.Store)
<add> for platform, ds := range d.stores {
<add> lsMap[platform] = ds.layerStore
<add> }
<add> d.downloadManager = xfer.NewLayerDownloadManager(lsMap, *config.MaxConcurrentDownloads)
<ide> logrus.Debugf("Max Concurrent Uploads: %d", *config.MaxConcurrentUploads)
<ide> d.uploadManager = xfer.NewLayerUploadManager(*config.MaxConcurrentUploads)
<ide>
<del> ifs, err := image.NewFSStoreBackend(filepath.Join(imageRoot, "imagedb"))
<del> if err != nil {
<del> return nil, err
<del> }
<add> for platform, ds := range d.stores {
<add> imageRoot := filepath.Join(config.Root, "image", ds.graphDriver)
<add> ifs, err := image.NewFSStoreBackend(filepath.Join(imageRoot, "imagedb"))
<add> if err != nil {
<add> return nil, err
<add> }
<ide>
<del> // TODO LCOW @jhowardmsft. For now assume it's the runtime OS. This will be modified
<del> // as the stores are split in a follow-up commit.
<del> d.imageStore, err = image.NewImageStore(ifs, runtime.GOOS, d.layerStore)
<del> if err != nil {
<del> return nil, err
<add> var is image.Store
<add> is, err = image.NewImageStore(ifs, platform, ds.layerStore)
<add> if err != nil {
<add> return nil, err
<add> }
<add> ds.imageRoot = imageRoot
<add> ds.imageStore = is
<add> d.stores[platform] = ds
<ide> }
<ide>
<ide> // Configure the volumes driver
<ide> func NewDaemon(config *config.Config, registryService registry.Service, containe
<ide> return nil, err
<ide> }
<ide>
<del> distributionMetadataStore, err := dmetadata.NewFSMetadataStore(filepath.Join(imageRoot, "distribution"))
<del> if err != nil {
<del> return nil, err
<del> }
<del>
<ide> eventsService := events.New()
<ide>
<del> referenceStore, err := refstore.NewReferenceStore(filepath.Join(imageRoot, "repositories.json"))
<del> if err != nil {
<del> return nil, fmt.Errorf("Couldn't create Tag store repositories: %s", err)
<del> }
<add> for platform, ds := range d.stores {
<add> dms, err := dmetadata.NewFSMetadataStore(filepath.Join(ds.imageRoot, "distribution"), platform)
<add> if err != nil {
<add> return nil, err
<add> }
<ide>
<del> migrationStart := time.Now()
<del> if err := v1.Migrate(config.Root, graphDriver, d.layerStore, d.imageStore, referenceStore, distributionMetadataStore); err != nil {
<del> logrus.Errorf("Graph migration failed: %q. Your old graph data was found to be too inconsistent for upgrading to content-addressable storage. Some of the old data was probably not upgraded. We recommend starting over with a clean storage directory if possible.", err)
<add> rs, err := refstore.NewReferenceStore(filepath.Join(ds.imageRoot, "repositories.json"), platform)
<add> if err != nil {
<add> return nil, fmt.Errorf("Couldn't create Tag store repositories: %s", err)
<add> }
<add> ds.distributionMetadataStore = dms
<add> ds.referenceStore = rs
<add> d.stores[platform] = ds
<add>
<add> // No content-addressability migration on Windows as it never supported pre-CA
<add> if runtime.GOOS != "windows" {
<add> migrationStart := time.Now()
<add> if err := v1.Migrate(config.Root, ds.graphDriver, ds.layerStore, ds.imageStore, rs, dms); err != nil {
<add> logrus.Errorf("Graph migration failed: %q. Your old graph data was found to be too inconsistent for upgrading to content-addressable storage. Some of the old data was probably not upgraded. We recommend starting over with a clean storage directory if possible.", err)
<add> }
<add> logrus.Infof("Graph migration to content-addressability took %.2f seconds", time.Since(migrationStart).Seconds())
<add> }
<ide> }
<del> logrus.Infof("Graph migration to content-addressability took %.2f seconds", time.Since(migrationStart).Seconds())
<ide>
<ide> // Discovery is only enabled when the daemon is launched with an address to advertise. When
<ide> // initialized, the daemon is registered and we can store the discovery backend as it's read-only
<ide> func NewDaemon(config *config.Config, registryService registry.Service, containe
<ide> d.repository = daemonRepo
<ide> d.containers = container.NewMemoryStore()
<ide> d.execCommands = exec.NewStore()
<del> d.referenceStore = referenceStore
<del> d.distributionMetadataStore = distributionMetadataStore
<ide> d.trustKey = trustKey
<ide> d.idIndex = truncindex.NewTruncIndex([]string{})
<ide> d.statsCollector = d.newStatsCollector(1 * time.Second)
<ide> func NewDaemon(config *config.Config, registryService registry.Service, containe
<ide> engineCpus.Set(float64(info.NCPU))
<ide> engineMemory.Set(float64(info.MemTotal))
<ide>
<add> gd := ""
<add> for platform, ds := range d.stores {
<add> if len(gd) > 0 {
<add> gd += ", "
<add> }
<add> gd += ds.graphDriver
<add> if len(d.stores) > 1 {
<add> gd = fmt.Sprintf("%s (%s)", gd, platform)
<add> }
<add> }
<add> logrus.WithFields(logrus.Fields{
<add> "version": dockerversion.Version,
<add> "commit": dockerversion.GitCommit,
<add> "graphdriver(s)": gd,
<add> }).Info("Docker daemon")
<add>
<ide> return d, nil
<ide> }
<ide>
<ide> func (daemon *Daemon) Shutdown() error {
<ide> logrus.Errorf("Stop container error: %v", err)
<ide> return
<ide> }
<del> if mountid, err := daemon.layerStore.GetMountID(c.ID); err == nil {
<add> if mountid, err := daemon.stores[c.Platform].layerStore.GetMountID(c.ID); err == nil {
<ide> daemon.cleanupMountsByID(mountid)
<ide> }
<ide> logrus.Debugf("container stopped %s", c.ID)
<ide> func (daemon *Daemon) Shutdown() error {
<ide> }
<ide> }
<ide>
<del> if daemon.layerStore != nil {
<del> if err := daemon.layerStore.Cleanup(); err != nil {
<del> logrus.Errorf("Error during layer Store.Cleanup(): %v", err)
<add> for platform, ds := range daemon.stores {
<add> if ds.layerStore != nil {
<add> if err := ds.layerStore.Cleanup(); err != nil {
<add> logrus.Errorf("Error during layer Store.Cleanup(): %v %s", err, platform)
<add> }
<ide> }
<ide> }
<ide>
<ide> func (daemon *Daemon) Mount(container *container.Container) error {
<ide> if container.BaseFS != "" && runtime.GOOS != "windows" {
<ide> daemon.Unmount(container)
<ide> return fmt.Errorf("Error: driver %s is returning inconsistent paths for container %s ('%s' then '%s')",
<del> daemon.GraphDriverName(), container.ID, container.BaseFS, dir)
<add> daemon.GraphDriverName(container.Platform), container.ID, container.BaseFS, dir)
<ide> }
<ide> }
<ide> container.BaseFS = dir // TODO: combine these fields
<ide> func (daemon *Daemon) Subnets() ([]net.IPNet, []net.IPNet) {
<ide> }
<ide>
<ide> // GraphDriverName returns the name of the graph driver used by the layer.Store
<del>func (daemon *Daemon) GraphDriverName() string {
<del> return daemon.layerStore.DriverName()
<add>func (daemon *Daemon) GraphDriverName(platform string) string {
<add> return daemon.stores[platform].layerStore.DriverName()
<ide> }
<ide>
<ide> // prepareTempDir prepares and returns the default directory to use
<ide><path>daemon/daemon_solaris.go
<ide> func configureMaxThreads(config *Config) error {
<ide> }
<ide>
<ide> // configureKernelSecuritySupport configures and validate security support for the kernel
<del>func configureKernelSecuritySupport(config *Config, driverName string) error {
<add>func configureKernelSecuritySupport(config *config.Config, driverNames []string) error {
<ide> return nil
<ide> }
<ide>
<ide><path>daemon/daemon_unix.go
<ide> func overlaySupportsSelinux() (bool, error) {
<ide> }
<ide>
<ide> // configureKernelSecuritySupport configures and validates security support for the kernel
<del>func configureKernelSecuritySupport(config *config.Config, driverName string) error {
<add>func configureKernelSecuritySupport(config *config.Config, driverNames []string) error {
<ide> if config.EnableSelinuxSupport {
<ide> if !selinuxEnabled() {
<ide> logrus.Warn("Docker could not enable SELinux on the host system")
<ide> return nil
<ide> }
<ide>
<del> if driverName == "overlay" || driverName == "overlay2" {
<add> overlayFound := false
<add> for _, d := range driverNames {
<add> if d == "overlay" || d == "overlay2" {
<add> overlayFound = true
<add> break
<add> }
<add> }
<add>
<add> if overlayFound {
<ide> // If driver is overlay or overlay2, make sure kernel
<ide> // supports selinux with overlay.
<ide> supported, err := overlaySupportsSelinux()
<ide> func configureKernelSecuritySupport(config *config.Config, driverName string) er
<ide> }
<ide>
<ide> if !supported {
<del> logrus.Warnf("SELinux is not supported with the %s graph driver on this kernel", driverName)
<add> logrus.Warnf("SELinux is not supported with the %v graph driver on this kernel", driverNames)
<ide> }
<ide> }
<ide> } else {
<ide><path>daemon/daemon_windows.go
<ide> func checkSystem() error {
<ide> }
<ide>
<ide> // configureKernelSecuritySupport configures and validate security support for the kernel
<del>func configureKernelSecuritySupport(config *config.Config, driverName string) error {
<add>func configureKernelSecuritySupport(config *config.Config, driverNames []string) error {
<ide> return nil
<ide> }
<ide>
<ide><path>daemon/delete.go
<ide> func (daemon *Daemon) cleanupContainer(container *container.Container, forceRemo
<ide> // When container creation fails and `RWLayer` has not been created yet, we
<ide> // do not call `ReleaseRWLayer`
<ide> if container.RWLayer != nil {
<del> metadata, err := daemon.layerStore.ReleaseRWLayer(container.RWLayer)
<add> metadata, err := daemon.stores[container.Platform].layerStore.ReleaseRWLayer(container.RWLayer)
<ide> layer.LogReleaseMetadata(metadata)
<ide> if err != nil && err != layer.ErrMountDoesNotExist {
<del> return errors.Wrapf(err, "driver %q failed to remove root filesystem for %s", daemon.GraphDriverName(), container.ID)
<add> return errors.Wrapf(err, "driver %q failed to remove root filesystem for %s", daemon.GraphDriverName(container.Platform), container.ID)
<ide> }
<ide> }
<ide>
<ide><path>daemon/disk_usage.go
<ide> import (
<ide> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<del>func (daemon *Daemon) getLayerRefs() map[layer.ChainID]int {
<del> tmpImages := daemon.imageStore.Map()
<add>func (daemon *Daemon) getLayerRefs(platform string) map[layer.ChainID]int {
<add> tmpImages := daemon.stores[platform].imageStore.Map()
<ide> layerRefs := map[layer.ChainID]int{}
<ide> for id, img := range tmpImages {
<ide> dgst := digest.Digest(id)
<del> if len(daemon.referenceStore.References(dgst)) == 0 && len(daemon.imageStore.Children(id)) != 0 {
<add> if len(daemon.stores[platform].referenceStore.References(dgst)) == 0 && len(daemon.stores[platform].imageStore.Children(id)) != 0 {
<ide> continue
<ide> }
<ide>
<ide> func (daemon *Daemon) SystemDiskUsage(ctx context.Context) (*types.DiskUsage, er
<ide> }
<ide>
<ide> // Get all top images with extra attributes
<add> // TODO @jhowardmsft LCOW. This may need revisiting
<ide> allImages, err := daemon.Images(filters.NewArgs(), false, true)
<ide> if err != nil {
<ide> return nil, fmt.Errorf("failed to retrieve image list: %v", err)
<ide> func (daemon *Daemon) SystemDiskUsage(ctx context.Context) (*types.DiskUsage, er
<ide> }
<ide>
<ide> // Get total layers size on disk
<del> layerRefs := daemon.getLayerRefs()
<del> allLayers := daemon.layerStore.Map()
<ide> var allLayersSize int64
<del> for _, l := range allLayers {
<del> select {
<del> case <-ctx.Done():
<del> return nil, ctx.Err()
<del> default:
<del> size, err := l.DiffSize()
<del> if err == nil {
<del> if _, ok := layerRefs[l.ChainID()]; ok {
<del> allLayersSize += size
<add> for platform := range daemon.stores {
<add> layerRefs := daemon.getLayerRefs(platform)
<add> allLayers := daemon.stores[platform].layerStore.Map()
<add> var allLayersSize int64
<add> for _, l := range allLayers {
<add> select {
<add> case <-ctx.Done():
<add> return nil, ctx.Err()
<add> default:
<add> size, err := l.DiffSize()
<add> if err == nil {
<add> if _, ok := layerRefs[l.ChainID()]; ok {
<add> allLayersSize += size
<add> } else {
<add> logrus.Warnf("found leaked image layer %v platform %s", l.ChainID(), platform)
<add> }
<ide> } else {
<del> logrus.Warnf("found leaked image layer %v", l.ChainID())
<add> logrus.Warnf("failed to get diff size for layer %v %s", l.ChainID(), platform)
<ide> }
<del> } else {
<del> logrus.Warnf("failed to get diff size for layer %v", l.ChainID())
<ide> }
<ide> }
<ide> }
<ide><path>daemon/getsize_unix.go
<ide> package daemon
<ide>
<ide> import (
<add> "runtime"
<add>
<ide> "github.com/Sirupsen/logrus"
<ide> )
<ide>
<ide> func (daemon *Daemon) getSize(containerID string) (int64, int64) {
<ide> err error
<ide> )
<ide>
<del> rwlayer, err := daemon.layerStore.GetRWLayer(containerID)
<add> rwlayer, err := daemon.stores[runtime.GOOS].layerStore.GetRWLayer(containerID)
<ide> if err != nil {
<ide> logrus.Errorf("Failed to compute size of container rootfs %v: %v", containerID, err)
<ide> return sizeRw, sizeRootfs
<ide> }
<del> defer daemon.layerStore.ReleaseRWLayer(rwlayer)
<add> defer daemon.stores[runtime.GOOS].layerStore.ReleaseRWLayer(rwlayer)
<ide>
<ide> sizeRw, err = rwlayer.Size()
<ide> if err != nil {
<ide> logrus.Errorf("Driver %s couldn't return diff size of container %s: %s",
<del> daemon.GraphDriverName(), containerID, err)
<add> daemon.GraphDriverName(runtime.GOOS), containerID, err)
<ide> // FIXME: GetSize should return an error. Not changing it now in case
<ide> // there is a side-effect.
<ide> sizeRw = -1
<ide><path>daemon/graphdriver/register/register_windows.go
<ide> package register
<ide>
<ide> import (
<del> // register the windows graph driver
<add> // register the windows graph drivers
<add> _ "github.com/docker/docker/daemon/graphdriver/lcow"
<ide> _ "github.com/docker/docker/daemon/graphdriver/windows"
<ide> )
<ide><path>daemon/graphdriver/windows/windows.go
<ide> func InitFilter(home string, options []string, uidMaps, gidMaps []idtools.IDMap)
<ide> return nil, fmt.Errorf("%s is on an ReFS volume - ReFS volumes are not supported", home)
<ide> }
<ide>
<add> if err := idtools.MkdirAllAs(home, 0700, 0, 0); err != nil {
<add> return nil, fmt.Errorf("windowsfilter failed to create '%s': %v", home, err)
<add> }
<add>
<ide> d := &Driver{
<ide> info: hcsshim.DriverInfo{
<ide> HomeDir: home,
<ide><path>daemon/image.go
<ide> func (e ErrImageDoesNotExist) Error() string {
<ide> return fmt.Sprintf("No such image: %s", reference.FamiliarString(ref))
<ide> }
<ide>
<del>// GetImageID returns an image ID corresponding to the image referred to by
<add>// GetImageIDAndPlatform returns an image ID and platform corresponding to the image referred to by
<ide> // refOrID.
<del>func (daemon *Daemon) GetImageID(refOrID string) (image.ID, error) {
<add>func (daemon *Daemon) GetImageIDAndPlatform(refOrID string) (image.ID, string, error) {
<ide> ref, err := reference.ParseAnyReference(refOrID)
<ide> if err != nil {
<del> return "", err
<add> return "", "", err
<ide> }
<ide> namedRef, ok := ref.(reference.Named)
<ide> if !ok {
<ide> digested, ok := ref.(reference.Digested)
<ide> if !ok {
<del> return "", ErrImageDoesNotExist{ref}
<add> return "", "", ErrImageDoesNotExist{ref}
<ide> }
<ide> id := image.IDFromDigest(digested.Digest())
<del> if _, err := daemon.imageStore.Get(id); err != nil {
<del> return "", ErrImageDoesNotExist{ref}
<add> for platform := range daemon.stores {
<add> if _, err = daemon.stores[platform].imageStore.Get(id); err == nil {
<add> return id, platform, nil
<add> }
<ide> }
<del> return id, nil
<add> return "", "", ErrImageDoesNotExist{ref}
<ide> }
<ide>
<del> if id, err := daemon.referenceStore.Get(namedRef); err == nil {
<del> return image.IDFromDigest(id), nil
<add> for platform := range daemon.stores {
<add> if id, err := daemon.stores[platform].referenceStore.Get(namedRef); err == nil {
<add> return image.IDFromDigest(id), platform, nil
<add> }
<ide> }
<ide>
<ide> // deprecated: repo:shortid https://github.com/docker/docker/pull/799
<ide> if tagged, ok := namedRef.(reference.Tagged); ok {
<ide> if tag := tagged.Tag(); stringid.IsShortID(stringid.TruncateID(tag)) {
<del> if id, err := daemon.imageStore.Search(tag); err == nil {
<del> for _, storeRef := range daemon.referenceStore.References(id.Digest()) {
<del> if storeRef.Name() == namedRef.Name() {
<del> return id, nil
<add> for platform := range daemon.stores {
<add> if id, err := daemon.stores[platform].imageStore.Search(tag); err == nil {
<add> for _, storeRef := range daemon.stores[platform].referenceStore.References(id.Digest()) {
<add> if storeRef.Name() == namedRef.Name() {
<add> return id, platform, nil
<add> }
<ide> }
<ide> }
<ide> }
<ide> }
<ide> }
<ide>
<ide> // Search based on ID
<del> if id, err := daemon.imageStore.Search(refOrID); err == nil {
<del> return id, nil
<add> for platform := range daemon.stores {
<add> if id, err := daemon.stores[platform].imageStore.Search(refOrID); err == nil {
<add> return id, platform, nil
<add> }
<ide> }
<ide>
<del> return "", ErrImageDoesNotExist{ref}
<add> return "", "", ErrImageDoesNotExist{ref}
<ide> }
<ide>
<ide> // GetImage returns an image corresponding to the image referred to by refOrID.
<ide> func (daemon *Daemon) GetImage(refOrID string) (*image.Image, error) {
<del> imgID, err := daemon.GetImageID(refOrID)
<add> imgID, platform, err := daemon.GetImageIDAndPlatform(refOrID)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<del> return daemon.imageStore.Get(imgID)
<add> return daemon.stores[platform].imageStore.Get(imgID)
<ide> }
<ide><path>daemon/image_delete.go
<ide> func (daemon *Daemon) ImageDelete(imageRef string, force, prune bool) ([]types.I
<ide> start := time.Now()
<ide> records := []types.ImageDeleteResponseItem{}
<ide>
<del> imgID, err := daemon.GetImageID(imageRef)
<add> imgID, platform, err := daemon.GetImageIDAndPlatform(imageRef)
<ide> if err != nil {
<ide> return nil, daemon.imageNotExistToErrcode(err)
<ide> }
<ide>
<del> repoRefs := daemon.referenceStore.References(imgID.Digest())
<add> repoRefs := daemon.stores[platform].referenceStore.References(imgID.Digest())
<ide>
<ide> var removedRepositoryRef bool
<ide> if !isImageIDPrefix(imgID.String(), imageRef) {
<ide> func (daemon *Daemon) ImageDelete(imageRef string, force, prune bool) ([]types.I
<ide> return nil, err
<ide> }
<ide>
<del> parsedRef, err = daemon.removeImageRef(parsedRef)
<add> parsedRef, err = daemon.removeImageRef(platform, parsedRef)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> func (daemon *Daemon) ImageDelete(imageRef string, force, prune bool) ([]types.I
<ide> daemon.LogImageEvent(imgID.String(), imgID.String(), "untag")
<ide> records = append(records, untaggedRecord)
<ide>
<del> repoRefs = daemon.referenceStore.References(imgID.Digest())
<add> repoRefs = daemon.stores[platform].referenceStore.References(imgID.Digest())
<ide>
<ide> // If a tag reference was removed and the only remaining
<ide> // references to the same repository are digest references,
<ide> func (daemon *Daemon) ImageDelete(imageRef string, force, prune bool) ([]types.I
<ide> remainingRefs := []reference.Named{}
<ide> for _, repoRef := range repoRefs {
<ide> if _, repoRefIsCanonical := repoRef.(reference.Canonical); repoRefIsCanonical && parsedRef.Name() == repoRef.Name() {
<del> if _, err := daemon.removeImageRef(repoRef); err != nil {
<add> if _, err := daemon.removeImageRef(platform, repoRef); err != nil {
<ide> return records, err
<ide> }
<ide>
<ide> func (daemon *Daemon) ImageDelete(imageRef string, force, prune bool) ([]types.I
<ide> if !force {
<ide> c |= conflictSoft &^ conflictActiveReference
<ide> }
<del> if conflict := daemon.checkImageDeleteConflict(imgID, c); conflict != nil {
<add> if conflict := daemon.checkImageDeleteConflict(imgID, platform, c); conflict != nil {
<ide> return nil, conflict
<ide> }
<ide>
<ide> for _, repoRef := range repoRefs {
<del> parsedRef, err := daemon.removeImageRef(repoRef)
<add> parsedRef, err := daemon.removeImageRef(platform, repoRef)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> func (daemon *Daemon) ImageDelete(imageRef string, force, prune bool) ([]types.I
<ide> }
<ide> }
<ide>
<del> if err := daemon.imageDeleteHelper(imgID, &records, force, prune, removedRepositoryRef); err != nil {
<add> if err := daemon.imageDeleteHelper(imgID, platform, &records, force, prune, removedRepositoryRef); err != nil {
<ide> return nil, err
<ide> }
<ide>
<ide> func (daemon *Daemon) getContainerUsingImage(imageID image.ID) *container.Contai
<ide> // repositoryRef must not be an image ID but a repository name followed by an
<ide> // optional tag or digest reference. If tag or digest is omitted, the default
<ide> // tag is used. Returns the resolved image reference and an error.
<del>func (daemon *Daemon) removeImageRef(ref reference.Named) (reference.Named, error) {
<add>func (daemon *Daemon) removeImageRef(platform string, ref reference.Named) (reference.Named, error) {
<ide> ref = reference.TagNameOnly(ref)
<ide>
<ide> // Ignore the boolean value returned, as far as we're concerned, this
<ide> // is an idempotent operation and it's okay if the reference didn't
<ide> // exist in the first place.
<del> _, err := daemon.referenceStore.Delete(ref)
<add> _, err := daemon.stores[platform].referenceStore.Delete(ref)
<ide>
<ide> return ref, err
<ide> }
<ide> func (daemon *Daemon) removeImageRef(ref reference.Named) (reference.Named, erro
<ide> // on the first encountered error. Removed references are logged to this
<ide> // daemon's event service. An "Untagged" types.ImageDeleteResponseItem is added to the
<ide> // given list of records.
<del>func (daemon *Daemon) removeAllReferencesToImageID(imgID image.ID, records *[]types.ImageDeleteResponseItem) error {
<del> imageRefs := daemon.referenceStore.References(imgID.Digest())
<add>func (daemon *Daemon) removeAllReferencesToImageID(imgID image.ID, platform string, records *[]types.ImageDeleteResponseItem) error {
<add> imageRefs := daemon.stores[platform].referenceStore.References(imgID.Digest())
<ide>
<ide> for _, imageRef := range imageRefs {
<del> parsedRef, err := daemon.removeImageRef(imageRef)
<add> parsedRef, err := daemon.removeImageRef(platform, imageRef)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> func (idc *imageDeleteConflict) Error() string {
<ide> // conflict is encountered, it will be returned immediately without deleting
<ide> // the image. If quiet is true, any encountered conflicts will be ignored and
<ide> // the function will return nil immediately without deleting the image.
<del>func (daemon *Daemon) imageDeleteHelper(imgID image.ID, records *[]types.ImageDeleteResponseItem, force, prune, quiet bool) error {
<add>func (daemon *Daemon) imageDeleteHelper(imgID image.ID, platform string, records *[]types.ImageDeleteResponseItem, force, prune, quiet bool) error {
<ide> // First, determine if this image has any conflicts. Ignore soft conflicts
<ide> // if force is true.
<ide> c := conflictHard
<ide> if !force {
<ide> c |= conflictSoft
<ide> }
<del> if conflict := daemon.checkImageDeleteConflict(imgID, c); conflict != nil {
<del> if quiet && (!daemon.imageIsDangling(imgID) || conflict.used) {
<add> if conflict := daemon.checkImageDeleteConflict(imgID, platform, c); conflict != nil {
<add> if quiet && (!daemon.imageIsDangling(imgID, platform) || conflict.used) {
<ide> // Ignore conflicts UNLESS the image is "dangling" or not being used in
<ide> // which case we want the user to know.
<ide> return nil
<ide> func (daemon *Daemon) imageDeleteHelper(imgID image.ID, records *[]types.ImageDe
<ide> return conflict
<ide> }
<ide>
<del> parent, err := daemon.imageStore.GetParent(imgID)
<add> parent, err := daemon.stores[platform].imageStore.GetParent(imgID)
<ide> if err != nil {
<ide> // There may be no parent
<ide> parent = ""
<ide> }
<ide>
<ide> // Delete all repository tag/digest references to this image.
<del> if err := daemon.removeAllReferencesToImageID(imgID, records); err != nil {
<add> if err := daemon.removeAllReferencesToImageID(imgID, platform, records); err != nil {
<ide> return err
<ide> }
<ide>
<del> removedLayers, err := daemon.imageStore.Delete(imgID)
<add> removedLayers, err := daemon.stores[platform].imageStore.Delete(imgID)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> func (daemon *Daemon) imageDeleteHelper(imgID image.ID, records *[]types.ImageDe
<ide> // either running or stopped).
<ide> // Do not force prunings, but do so quietly (stopping on any encountered
<ide> // conflicts).
<del> return daemon.imageDeleteHelper(parent, records, false, true, true)
<add> return daemon.imageDeleteHelper(parent, platform, records, false, true, true)
<ide> }
<ide>
<ide> // checkImageDeleteConflict determines whether there are any conflicts
<ide> func (daemon *Daemon) imageDeleteHelper(imgID image.ID, records *[]types.ImageDe
<ide> // using the image. A soft conflict is any tags/digest referencing the given
<ide> // image or any stopped container using the image. If ignoreSoftConflicts is
<ide> // true, this function will not check for soft conflict conditions.
<del>func (daemon *Daemon) checkImageDeleteConflict(imgID image.ID, mask conflictType) *imageDeleteConflict {
<add>func (daemon *Daemon) checkImageDeleteConflict(imgID image.ID, platform string, mask conflictType) *imageDeleteConflict {
<ide> // Check if the image has any descendant images.
<del> if mask&conflictDependentChild != 0 && len(daemon.imageStore.Children(imgID)) > 0 {
<add> if mask&conflictDependentChild != 0 && len(daemon.stores[platform].imageStore.Children(imgID)) > 0 {
<ide> return &imageDeleteConflict{
<ide> hard: true,
<ide> imgID: imgID,
<ide> func (daemon *Daemon) checkImageDeleteConflict(imgID image.ID, mask conflictType
<ide> }
<ide>
<ide> // Check if any repository tags/digest reference this image.
<del> if mask&conflictActiveReference != 0 && len(daemon.referenceStore.References(imgID.Digest())) > 0 {
<add> if mask&conflictActiveReference != 0 && len(daemon.stores[platform].referenceStore.References(imgID.Digest())) > 0 {
<ide> return &imageDeleteConflict{
<ide> imgID: imgID,
<ide> message: "image is referenced in multiple repositories",
<ide> func (daemon *Daemon) checkImageDeleteConflict(imgID image.ID, mask conflictType
<ide> // imageIsDangling returns whether the given image is "dangling" which means
<ide> // that there are no repository references to the given image and it has no
<ide> // child images.
<del>func (daemon *Daemon) imageIsDangling(imgID image.ID) bool {
<del> return !(len(daemon.referenceStore.References(imgID.Digest())) > 0 || len(daemon.imageStore.Children(imgID)) > 0)
<add>func (daemon *Daemon) imageIsDangling(imgID image.ID, platform string) bool {
<add> return !(len(daemon.stores[platform].referenceStore.References(imgID.Digest())) > 0 || len(daemon.stores[platform].imageStore.Children(imgID)) > 0)
<ide> }
<ide><path>daemon/image_exporter.go
<ide> package daemon
<ide>
<ide> import (
<ide> "io"
<add> "runtime"
<ide>
<ide> "github.com/docker/docker/image/tarexport"
<ide> )
<ide> import (
<ide> // the same tag are exported. names is the set of tags to export, and
<ide> // outStream is the writer which the images are written to.
<ide> func (daemon *Daemon) ExportImage(names []string, outStream io.Writer) error {
<del> imageExporter := tarexport.NewTarExporter(daemon.imageStore, daemon.layerStore, daemon.referenceStore, daemon)
<add> // TODO @jhowardmsft LCOW. For now, assume it is the OS of the host
<add> imageExporter := tarexport.NewTarExporter(daemon.stores[runtime.GOOS].imageStore, daemon.stores[runtime.GOOS].layerStore, daemon.stores[runtime.GOOS].referenceStore, daemon)
<ide> return imageExporter.Save(names, outStream)
<ide> }
<ide>
<ide> // LoadImage uploads a set of images into the repository. This is the
<ide> // complement of ImageExport. The input stream is an uncompressed tar
<ide> // ball containing images and metadata.
<ide> func (daemon *Daemon) LoadImage(inTar io.ReadCloser, outStream io.Writer, quiet bool) error {
<del> imageExporter := tarexport.NewTarExporter(daemon.imageStore, daemon.layerStore, daemon.referenceStore, daemon)
<add> // TODO @jhowardmsft LCOW. For now, assume it is the OS of the host
<add> imageExporter := tarexport.NewTarExporter(daemon.stores[runtime.GOOS].imageStore, daemon.stores[runtime.GOOS].layerStore, daemon.stores[runtime.GOOS].referenceStore, daemon)
<ide> return imageExporter.Load(inTar, outStream, quiet)
<ide> }
<ide><path>daemon/image_history.go
<ide> package daemon
<ide>
<ide> import (
<ide> "fmt"
<add> "runtime"
<ide> "time"
<ide>
<ide> "github.com/docker/distribution/reference"
<ide> func (daemon *Daemon) ImageHistory(name string) ([]*image.HistoryResponseItem, e
<ide> return nil, err
<ide> }
<ide>
<add> // If the image OS isn't set, assume it's the host OS
<add> platform := img.OS
<add> if platform == "" {
<add> platform = runtime.GOOS
<add> }
<add>
<ide> history := []*image.HistoryResponseItem{}
<ide>
<ide> layerCounter := 0
<ide> func (daemon *Daemon) ImageHistory(name string) ([]*image.HistoryResponseItem, e
<ide> }
<ide>
<ide> rootFS.Append(img.RootFS.DiffIDs[layerCounter])
<del> l, err := daemon.layerStore.Get(rootFS.ChainID())
<add> l, err := daemon.stores[platform].layerStore.Get(rootFS.ChainID())
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> layerSize, err = l.DiffSize()
<del> layer.ReleaseAndLog(daemon.layerStore, l)
<add> layer.ReleaseAndLog(daemon.stores[platform].layerStore, l)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> func (daemon *Daemon) ImageHistory(name string) ([]*image.HistoryResponseItem, e
<ide> h.ID = id.String()
<ide>
<ide> var tags []string
<del> for _, r := range daemon.referenceStore.References(id.Digest()) {
<add> for _, r := range daemon.stores[platform].referenceStore.References(id.Digest()) {
<ide> if _, ok := r.(reference.NamedTagged); ok {
<ide> tags = append(tags, reference.FamiliarString(r))
<ide> }
<ide><path>daemon/image_inspect.go
<ide> package daemon
<ide>
<ide> import (
<add> "runtime"
<ide> "time"
<ide>
<ide> "github.com/docker/distribution/reference"
<ide> func (daemon *Daemon) LookupImage(name string) (*types.ImageInspect, error) {
<ide> return nil, errors.Wrapf(err, "no such image: %s", name)
<ide> }
<ide>
<del> refs := daemon.referenceStore.References(img.ID().Digest())
<add> // If the image OS isn't set, assume it's the host OS
<add> platform := img.OS
<add> if platform == "" {
<add> platform = runtime.GOOS
<add> }
<add>
<add> refs := daemon.stores[platform].referenceStore.References(img.ID().Digest())
<ide> repoTags := []string{}
<ide> repoDigests := []string{}
<ide> for _, ref := range refs {
<ide> func (daemon *Daemon) LookupImage(name string) (*types.ImageInspect, error) {
<ide> var layerMetadata map[string]string
<ide> layerID := img.RootFS.ChainID()
<ide> if layerID != "" {
<del> l, err := daemon.layerStore.Get(layerID)
<add> l, err := daemon.stores[platform].layerStore.Get(layerID)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<del> defer layer.ReleaseAndLog(daemon.layerStore, l)
<add> defer layer.ReleaseAndLog(daemon.stores[platform].layerStore, l)
<ide> size, err = l.Size()
<ide> if err != nil {
<ide> return nil, err
<ide> func (daemon *Daemon) LookupImage(name string) (*types.ImageInspect, error) {
<ide> Author: img.Author,
<ide> Config: img.Config,
<ide> Architecture: img.Architecture,
<del> Os: img.OS,
<add> Os: platform,
<ide> OsVersion: img.OSVersion,
<ide> Size: size,
<ide> VirtualSize: size, // TODO: field unused, deprecate
<ide> RootFS: rootFSToAPIType(img.RootFS),
<ide> }
<ide>
<del> imageInspect.GraphDriver.Name = daemon.GraphDriverName()
<del>
<add> imageInspect.GraphDriver.Name = daemon.GraphDriverName(platform)
<ide> imageInspect.GraphDriver.Data = layerMetadata
<ide>
<ide> return imageInspect, nil
<ide><path>daemon/image_pull.go
<ide> package daemon
<ide>
<ide> import (
<ide> "io"
<add> "runtime"
<ide> "strings"
<ide>
<ide> dist "github.com/docker/distribution"
<ide> func (daemon *Daemon) pullImageWithReference(ctx context.Context, ref reference.
<ide> close(writesDone)
<ide> }()
<ide>
<add> // ------------------------------------------------------------------------------
<add> // TODO @jhowardmsft LCOW. For now, use just the store for the host OS. This will
<add> // need some work to complete - we won't know the platform until after metadata
<add> // is pulled from the repository. This affects plugin as well to complete.
<add> // ------------------------------------------------------------------------------
<add>
<ide> imagePullConfig := &distribution.ImagePullConfig{
<ide> Config: distribution.Config{
<ide> MetaHeaders: metaHeaders,
<ide> AuthConfig: authConfig,
<ide> ProgressOutput: progress.ChanOutput(progressChan),
<ide> RegistryService: daemon.RegistryService,
<ide> ImageEventLogger: daemon.LogImageEvent,
<del> MetadataStore: daemon.distributionMetadataStore,
<del> ImageStore: distribution.NewImageConfigStoreFromStore(daemon.imageStore),
<del> ReferenceStore: daemon.referenceStore,
<add> MetadataStore: daemon.stores[runtime.GOOS].distributionMetadataStore,
<add> ImageStore: distribution.NewImageConfigStoreFromStore(daemon.stores[runtime.GOOS].imageStore),
<add> ReferenceStore: daemon.stores[runtime.GOOS].referenceStore,
<ide> },
<ide> DownloadManager: daemon.downloadManager,
<ide> Schema2Types: distribution.ImageTypes,
<ide><path>daemon/image_push.go
<ide> package daemon
<ide>
<ide> import (
<ide> "io"
<add> "runtime"
<ide>
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> "github.com/docker/distribution/reference"
<ide> func (daemon *Daemon) PushImage(ctx context.Context, image, tag string, metaHead
<ide> close(writesDone)
<ide> }()
<ide>
<add> // ------------------------------------------------------------------------------
<add> // TODO @jhowardmsft LCOW. For now, use just the store for the host OS. This will
<add> // need some work to complete.
<add> // ------------------------------------------------------------------------------
<add>
<ide> imagePushConfig := &distribution.ImagePushConfig{
<ide> Config: distribution.Config{
<ide> MetaHeaders: metaHeaders,
<ide> AuthConfig: authConfig,
<ide> ProgressOutput: progress.ChanOutput(progressChan),
<ide> RegistryService: daemon.RegistryService,
<ide> ImageEventLogger: daemon.LogImageEvent,
<del> MetadataStore: daemon.distributionMetadataStore,
<del> ImageStore: distribution.NewImageConfigStoreFromStore(daemon.imageStore),
<del> ReferenceStore: daemon.referenceStore,
<add> MetadataStore: daemon.stores[runtime.GOOS].distributionMetadataStore,
<add> ImageStore: distribution.NewImageConfigStoreFromStore(daemon.stores[runtime.GOOS].imageStore),
<add> ReferenceStore: daemon.stores[runtime.GOOS].referenceStore,
<ide> },
<ide> ConfigMediaType: schema2.MediaTypeImageConfig,
<del> LayerStore: distribution.NewLayerProviderFromStore(daemon.layerStore),
<add> LayerStore: distribution.NewLayerProviderFromStore(daemon.stores[runtime.GOOS].layerStore),
<ide> TrustKey: daemon.trustKey,
<ide> UploadManager: daemon.uploadManager,
<ide> }
<ide><path>daemon/image_tag.go
<ide> import (
<ide> // TagImage creates the tag specified by newTag, pointing to the image named
<ide> // imageName (alternatively, imageName can also be an image ID).
<ide> func (daemon *Daemon) TagImage(imageName, repository, tag string) error {
<del> imageID, err := daemon.GetImageID(imageName)
<add> imageID, platform, err := daemon.GetImageIDAndPlatform(imageName)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> func (daemon *Daemon) TagImage(imageName, repository, tag string) error {
<ide> }
<ide> }
<ide>
<del> return daemon.TagImageWithReference(imageID, newTag)
<add> return daemon.TagImageWithReference(imageID, platform, newTag)
<ide> }
<ide>
<ide> // TagImageWithReference adds the given reference to the image ID provided.
<del>func (daemon *Daemon) TagImageWithReference(imageID image.ID, newTag reference.Named) error {
<del> if err := daemon.referenceStore.AddTag(newTag, imageID.Digest(), true); err != nil {
<add>func (daemon *Daemon) TagImageWithReference(imageID image.ID, platform string, newTag reference.Named) error {
<add> if err := daemon.stores[platform].referenceStore.AddTag(newTag, imageID.Digest(), true); err != nil {
<ide> return err
<ide> }
<ide>
<ide><path>daemon/images.go
<ide> import (
<ide> "github.com/docker/docker/container"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/layer"
<add> "github.com/docker/docker/pkg/system"
<ide> )
<ide>
<ide> var acceptedImageFilterTags = map[string]bool{
<ide> func (r byCreated) Less(i, j int) bool { return r[i].Created < r[j].Created }
<ide>
<ide> // Map returns a map of all images in the ImageStore
<ide> func (daemon *Daemon) Map() map[image.ID]*image.Image {
<del> return daemon.imageStore.Map()
<add> // TODO @jhowardmsft LCOW. This will need work to enumerate the stores for all platforms.
<add> platform := runtime.GOOS
<add> if platform == "windows" && system.LCOWSupported() {
<add> platform = "linux"
<add> }
<add> return daemon.stores[platform].imageStore.Map()
<ide> }
<ide>
<ide> // Images returns a filtered list of images. filterArgs is a JSON-encoded set
<ide> func (daemon *Daemon) Map() map[image.ID]*image.Image {
<ide> // named all controls whether all images in the graph are filtered, or just
<ide> // the heads.
<ide> func (daemon *Daemon) Images(imageFilters filters.Args, all bool, withExtraAttrs bool) ([]*types.ImageSummary, error) {
<add>
<add> // TODO @jhowardmsft LCOW. This will need work to enumerate the stores for all platforms.
<add> platform := runtime.GOOS
<add> if platform == "windows" && system.LCOWSupported() {
<add> platform = "linux"
<add> }
<add>
<ide> var (
<ide> allImages map[image.ID]*image.Image
<ide> err error
<ide> func (daemon *Daemon) Images(imageFilters filters.Args, all bool, withExtraAttrs
<ide> }
<ide> }
<ide> if danglingOnly {
<del> allImages = daemon.imageStore.Heads()
<add> allImages = daemon.stores[platform].imageStore.Heads()
<ide> } else {
<del> allImages = daemon.imageStore.Map()
<add> allImages = daemon.stores[platform].imageStore.Map()
<ide> }
<ide>
<ide> var beforeFilter, sinceFilter *image.Image
<ide> func (daemon *Daemon) Images(imageFilters filters.Args, all bool, withExtraAttrs
<ide> layerID := img.RootFS.ChainID()
<ide> var size int64
<ide> if layerID != "" {
<del> l, err := daemon.layerStore.Get(layerID)
<add> l, err := daemon.stores[platform].layerStore.Get(layerID)
<ide> if err != nil {
<ide> // The layer may have been deleted between the call to `Map()` or
<ide> // `Heads()` and the call to `Get()`, so we just ignore this error
<ide> func (daemon *Daemon) Images(imageFilters filters.Args, all bool, withExtraAttrs
<ide> }
<ide>
<ide> size, err = l.Size()
<del> layer.ReleaseAndLog(daemon.layerStore, l)
<add> layer.ReleaseAndLog(daemon.stores[platform].layerStore, l)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> }
<ide>
<ide> newImage := newImage(img, size)
<ide>
<del> for _, ref := range daemon.referenceStore.References(id.Digest()) {
<add> for _, ref := range daemon.stores[platform].referenceStore.References(id.Digest()) {
<ide> if imageFilters.Include("reference") {
<ide> var found bool
<ide> var matchErr error
<ide> func (daemon *Daemon) Images(imageFilters filters.Args, all bool, withExtraAttrs
<ide> }
<ide> }
<ide> if newImage.RepoDigests == nil && newImage.RepoTags == nil {
<del> if all || len(daemon.imageStore.Children(id)) == 0 {
<add> if all || len(daemon.stores[platform].imageStore.Children(id)) == 0 {
<ide>
<ide> if imageFilters.Include("dangling") && !danglingOnly {
<ide> //dangling=false case, so dangling image is not needed
<ide> func (daemon *Daemon) Images(imageFilters filters.Args, all bool, withExtraAttrs
<ide> // lazily init variables
<ide> if imagesMap == nil {
<ide> allContainers = daemon.List()
<del> allLayers = daemon.layerStore.Map()
<add> allLayers = daemon.stores[platform].layerStore.Map()
<ide> imagesMap = make(map[*image.Image]*types.ImageSummary)
<ide> layerRefs = make(map[layer.ChainID]int)
<ide> }
<ide> func (daemon *Daemon) Images(imageFilters filters.Args, all bool, withExtraAttrs
<ide> // The existing image(s) is not destroyed.
<ide> // If no parent is specified, a new image with the diff of all the specified image's layers merged into a new layer that has no parents.
<ide> func (daemon *Daemon) SquashImage(id, parent string) (string, error) {
<del> img, err := daemon.imageStore.Get(image.ID(id))
<add>
<add> var (
<add> img *image.Image
<add> err error
<add> )
<add> for _, ds := range daemon.stores {
<add> if img, err = ds.imageStore.Get(image.ID(id)); err == nil {
<add> break
<add> }
<add> }
<ide> if err != nil {
<ide> return "", err
<ide> }
<ide>
<ide> var parentImg *image.Image
<ide> var parentChainID layer.ChainID
<ide> if len(parent) != 0 {
<del> parentImg, err = daemon.imageStore.Get(image.ID(parent))
<add> parentImg, err = daemon.stores[img.Platform()].imageStore.Get(image.ID(parent))
<ide> if err != nil {
<ide> return "", errors.Wrap(err, "error getting specified parent layer")
<ide> }
<ide> func (daemon *Daemon) SquashImage(id, parent string) (string, error) {
<ide> parentImg = &image.Image{RootFS: rootFS}
<ide> }
<ide>
<del> l, err := daemon.layerStore.Get(img.RootFS.ChainID())
<add> l, err := daemon.stores[img.Platform()].layerStore.Get(img.RootFS.ChainID())
<ide> if err != nil {
<ide> return "", errors.Wrap(err, "error getting image layer")
<ide> }
<del> defer daemon.layerStore.Release(l)
<add> defer daemon.stores[img.Platform()].layerStore.Release(l)
<ide>
<ide> ts, err := l.TarStreamFrom(parentChainID)
<ide> if err != nil {
<ide> return "", errors.Wrapf(err, "error getting tar stream to parent")
<ide> }
<ide> defer ts.Close()
<ide>
<del> // To support LCOW, Windows needs to pass the platform into the store when registering the layer.
<del> platform := layer.Platform("")
<del> if runtime.GOOS == "windows" {
<del> platform = l.Platform()
<del> }
<del>
<del> newL, err := daemon.layerStore.Register(ts, parentChainID, platform)
<add> newL, err := daemon.stores[img.Platform()].layerStore.Register(ts, parentChainID, layer.Platform(img.Platform()))
<ide> if err != nil {
<ide> return "", errors.Wrap(err, "error registering layer")
<ide> }
<del> defer daemon.layerStore.Release(newL)
<add> defer daemon.stores[img.Platform()].layerStore.Release(newL)
<ide>
<ide> var newImage image.Image
<ide> newImage = *img
<ide> func (daemon *Daemon) SquashImage(id, parent string) (string, error) {
<ide> return "", errors.Wrap(err, "error marshalling image config")
<ide> }
<ide>
<del> newImgID, err := daemon.imageStore.Create(b)
<add> newImgID, err := daemon.stores[img.Platform()].imageStore.Create(b)
<ide> if err != nil {
<ide> return "", errors.Wrap(err, "error creating new image after squash")
<ide> }
<ide><path>daemon/import.go
<ide> func (daemon *Daemon) ImportImage(src string, repository, tag string, msg string
<ide> // but for Linux images, there's no reason it couldn't. However it
<ide> // would need another CLI flag as there's no meta-data indicating
<ide> // the OS of the thing being imported.
<del> l, err := daemon.layerStore.Register(inflatedLayerData, "", "")
<add> l, err := daemon.stores[runtime.GOOS].layerStore.Register(inflatedLayerData, "", "")
<ide> if err != nil {
<ide> return err
<ide> }
<del> defer layer.ReleaseAndLog(daemon.layerStore, l)
<add> defer layer.ReleaseAndLog(daemon.stores[runtime.GOOS].layerStore, l) // TODO LCOW @jhowardmsft as for above comment
<ide>
<ide> created := time.Now().UTC()
<ide> imgConfig, err := json.Marshal(&image.Image{
<ide> V1Image: image.V1Image{
<ide> DockerVersion: dockerversion.Version,
<ide> Config: config,
<ide> Architecture: runtime.GOARCH,
<del> OS: runtime.GOOS,
<add> OS: runtime.GOOS, // TODO LCOW @jhowardmsft as for above commment
<ide> Created: created,
<ide> Comment: msg,
<ide> },
<ide> func (daemon *Daemon) ImportImage(src string, repository, tag string, msg string
<ide> return err
<ide> }
<ide>
<del> id, err := daemon.imageStore.Create(imgConfig)
<add> // TODO @jhowardmsft LCOW - Again, assume the OS of the host for now
<add> id, err := daemon.stores[runtime.GOOS].imageStore.Create(imgConfig)
<ide> if err != nil {
<ide> return err
<ide> }
<ide>
<ide> // FIXME: connect with commit code and call refstore directly
<ide> if newRef != nil {
<del> if err := daemon.TagImageWithReference(id, newRef); err != nil {
<add> // TODO @jhowardmsft LCOW - Again, assume the OS of the host for now
<add> if err := daemon.TagImageWithReference(id, runtime.GOOS, newRef); err != nil {
<ide> return err
<ide> }
<ide> }
<ide><path>daemon/info.go
<ide> import (
<ide> "fmt"
<ide> "os"
<ide> "runtime"
<add> "strings"
<ide> "time"
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> func (daemon *Daemon) SystemInfo() (*types.Info, error) {
<ide> securityOptions = append(securityOptions, "name=userns")
<ide> }
<ide>
<add> imageCount := 0
<add> drivers := ""
<add> for p, ds := range daemon.stores {
<add> imageCount += len(ds.imageStore.Map())
<add> drivers += daemon.GraphDriverName(p)
<add> if len(daemon.stores) > 1 {
<add> drivers += fmt.Sprintf(" (%s) ", p)
<add> }
<add> }
<add>
<add> // TODO @jhowardmsft LCOW support. For now, hard-code the platform shown for the driver status
<add> p := runtime.GOOS
<add> if p == "windows" && system.LCOWSupported() {
<add> p = "linux"
<add> }
<add>
<add> drivers = strings.TrimSpace(drivers)
<ide> v := &types.Info{
<ide> ID: daemon.ID,
<ide> Containers: int(cRunning + cPaused + cStopped),
<ide> ContainersRunning: int(cRunning),
<ide> ContainersPaused: int(cPaused),
<ide> ContainersStopped: int(cStopped),
<del> Images: len(daemon.imageStore.Map()),
<del> Driver: daemon.GraphDriverName(),
<del> DriverStatus: daemon.layerStore.DriverStatus(),
<add> Images: imageCount,
<add> Driver: drivers,
<add> DriverStatus: daemon.stores[p].layerStore.DriverStatus(),
<ide> Plugins: daemon.showPluginsInfo(),
<ide> IPv4Forwarding: !sysInfo.IPv4ForwardingDisabled,
<ide> BridgeNfIptables: !sysInfo.BridgeNFCallIPTablesDisabled,
<ide><path>daemon/inspect.go
<ide> func (daemon *Daemon) getInspectData(container *container.Container) (*types.Con
<ide> Name: container.Name,
<ide> RestartCount: container.RestartCount,
<ide> Driver: container.Driver,
<add> Platform: container.Platform,
<ide> MountLabel: container.MountLabel,
<ide> ProcessLabel: container.ProcessLabel,
<ide> ExecIDs: container.GetExecIDs(),
<ide><path>daemon/list.go
<ide> func (daemon *Daemon) foldFilter(config *types.ContainerListOptions) (*listConte
<ide> if psFilters.Include("ancestor") {
<ide> ancestorFilter = true
<ide> psFilters.WalkValues("ancestor", func(ancestor string) error {
<del> id, err := daemon.GetImageID(ancestor)
<add> id, platform, err := daemon.GetImageIDAndPlatform(ancestor)
<ide> if err != nil {
<ide> logrus.Warnf("Error while looking up for image %v", ancestor)
<ide> return nil
<ide> func (daemon *Daemon) foldFilter(config *types.ContainerListOptions) (*listConte
<ide> return nil
<ide> }
<ide> // Then walk down the graph and put the imageIds in imagesFilter
<del> populateImageFilterByParents(imagesFilter, id, daemon.imageStore.Children)
<add> populateImageFilterByParents(imagesFilter, id, daemon.stores[platform].imageStore.Children)
<ide> return nil
<ide> })
<ide> }
<ide> func (daemon *Daemon) transformContainer(container *container.Container, ctx *li
<ide>
<ide> image := container.Config.Image // if possible keep the original ref
<ide> if image != container.ImageID.String() {
<del> id, err := daemon.GetImageID(image)
<add> id, _, err := daemon.GetImageIDAndPlatform(image)
<ide> if _, isDNE := err.(ErrImageDoesNotExist); err != nil && !isDNE {
<ide> return nil, err
<ide> }
<ide><path>daemon/prune.go
<ide> package daemon
<ide> import (
<ide> "fmt"
<ide> "regexp"
<add> "runtime"
<ide> "sync/atomic"
<ide> "time"
<ide>
<ide> import (
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/directory"
<add> "github.com/docker/docker/pkg/system"
<ide> "github.com/docker/docker/runconfig"
<ide> "github.com/docker/docker/volume"
<ide> "github.com/docker/libnetwork"
<ide> func (daemon *Daemon) VolumesPrune(ctx context.Context, pruneFilters filters.Arg
<ide>
<ide> // ImagesPrune removes unused images
<ide> func (daemon *Daemon) ImagesPrune(ctx context.Context, pruneFilters filters.Args) (*types.ImagesPruneReport, error) {
<add> // TODO @jhowardmsft LCOW Support: This will need revisiting later.
<add> platform := runtime.GOOS
<add> if platform == "windows" && system.LCOWSupported() {
<add> platform = "linux"
<add> }
<add>
<ide> if !atomic.CompareAndSwapInt32(&daemon.pruneRunning, 0, 1) {
<ide> return nil, errPruneRunning
<ide> }
<ide> func (daemon *Daemon) ImagesPrune(ctx context.Context, pruneFilters filters.Args
<ide>
<ide> var allImages map[image.ID]*image.Image
<ide> if danglingOnly {
<del> allImages = daemon.imageStore.Heads()
<add> allImages = daemon.stores[platform].imageStore.Heads()
<ide> } else {
<del> allImages = daemon.imageStore.Map()
<add> allImages = daemon.stores[platform].imageStore.Map()
<ide> }
<ide> allContainers := daemon.List()
<ide> imageRefs := map[string]bool{}
<ide> func (daemon *Daemon) ImagesPrune(ctx context.Context, pruneFilters filters.Args
<ide> }
<ide>
<ide> // Filter intermediary images and get their unique size
<del> allLayers := daemon.layerStore.Map()
<add> allLayers := daemon.stores[platform].layerStore.Map()
<ide> topImages := map[image.ID]*image.Image{}
<ide> for id, img := range allImages {
<ide> select {
<ide> case <-ctx.Done():
<ide> return nil, ctx.Err()
<ide> default:
<ide> dgst := digest.Digest(id)
<del> if len(daemon.referenceStore.References(dgst)) == 0 && len(daemon.imageStore.Children(id)) != 0 {
<add> if len(daemon.stores[platform].referenceStore.References(dgst)) == 0 && len(daemon.stores[platform].imageStore.Children(id)) != 0 {
<ide> continue
<ide> }
<ide> if !until.IsZero() && img.Created.After(until) {
<ide> deleteImagesLoop:
<ide> }
<ide>
<ide> deletedImages := []types.ImageDeleteResponseItem{}
<del> refs := daemon.referenceStore.References(dgst)
<add> refs := daemon.stores[platform].referenceStore.References(dgst)
<ide> if len(refs) > 0 {
<ide> shouldDelete := !danglingOnly
<ide> if !shouldDelete {
<ide><path>daemon/start.go
<ide> func (daemon *Daemon) Cleanup(container *container.Container) {
<ide> if err := daemon.conditionalUnmountOnCleanup(container); err != nil {
<ide> // FIXME: remove once reference counting for graphdrivers has been refactored
<ide> // Ensure that all the mounts are gone
<del> if mountid, err := daemon.layerStore.GetMountID(container.ID); err == nil {
<add> if mountid, err := daemon.stores[container.Platform].layerStore.GetMountID(container.ID); err == nil {
<ide> daemon.cleanupMountsByID(mountid)
<ide> }
<ide> }
<ide><path>daemon/start_windows.go
<ide> func (daemon *Daemon) getLibcontainerdCreateOptions(container *container.Contain
<ide> layerOpts.LayerFolderPath = m["dir"]
<ide>
<ide> // Generate the layer paths of the layer options
<del> img, err := daemon.imageStore.Get(container.ImageID)
<add> img, err := daemon.stores[container.Platform].imageStore.Get(container.ImageID)
<ide> if err != nil {
<ide> return nil, fmt.Errorf("failed to graph.Get on ImageID %s - %s", container.ImageID, err)
<ide> }
<ide> // Get the layer path for each layer.
<ide> max := len(img.RootFS.DiffIDs)
<ide> for i := 1; i <= max; i++ {
<ide> img.RootFS.DiffIDs = img.RootFS.DiffIDs[:i]
<del> layerPath, err := layer.GetLayerPath(daemon.layerStore, img.RootFS.ChainID())
<add> layerPath, err := layer.GetLayerPath(daemon.stores[container.Platform].layerStore, img.RootFS.ChainID())
<ide> if err != nil {
<del> return nil, fmt.Errorf("failed to get layer path from graphdriver %s for ImageID %s - %s", daemon.layerStore, img.RootFS.ChainID(), err)
<add> return nil, fmt.Errorf("failed to get layer path from graphdriver %s for ImageID %s - %s", daemon.stores[container.Platform].layerStore, img.RootFS.ChainID(), err)
<ide> }
<ide> // Reverse order, expecting parent most first
<ide> layerOpts.LayerPaths = append([]string{layerPath}, layerOpts.LayerPaths...)
<ide><path>distribution/metadata/metadata.go
<ide> type Store interface {
<ide> type FSMetadataStore struct {
<ide> sync.RWMutex
<ide> basePath string
<add> platform string
<ide> }
<ide>
<ide> // NewFSMetadataStore creates a new filesystem-based metadata store.
<del>func NewFSMetadataStore(basePath string) (*FSMetadataStore, error) {
<add>func NewFSMetadataStore(basePath, platform string) (*FSMetadataStore, error) {
<ide> if err := os.MkdirAll(basePath, 0700); err != nil {
<ide> return nil, err
<ide> }
<ide> return &FSMetadataStore{
<ide> basePath: basePath,
<add> platform: platform,
<ide> }, nil
<ide> }
<ide>
<ide><path>distribution/metadata/v1_id_service_test.go
<ide> package metadata
<ide> import (
<ide> "io/ioutil"
<ide> "os"
<add> "runtime"
<ide> "testing"
<ide>
<ide> "github.com/docker/docker/layer"
<ide> func TestV1IDService(t *testing.T) {
<ide> }
<ide> defer os.RemoveAll(tmpDir)
<ide>
<del> metadataStore, err := NewFSMetadataStore(tmpDir)
<add> metadataStore, err := NewFSMetadataStore(tmpDir, runtime.GOOS)
<ide> if err != nil {
<ide> t.Fatalf("could not create metadata store: %v", err)
<ide> }
<ide><path>distribution/metadata/v2_metadata_service_test.go
<ide> import (
<ide> "math/rand"
<ide> "os"
<ide> "reflect"
<add> "runtime"
<ide> "testing"
<ide>
<ide> "github.com/docker/docker/layer"
<ide> func TestV2MetadataService(t *testing.T) {
<ide> }
<ide> defer os.RemoveAll(tmpDir)
<ide>
<del> metadataStore, err := NewFSMetadataStore(tmpDir)
<add> metadataStore, err := NewFSMetadataStore(tmpDir, runtime.GOOS)
<ide> if err != nil {
<ide> t.Fatalf("could not create metadata store: %v", err)
<ide> }
<ide><path>distribution/xfer/download.go
<ide> import (
<ide> "errors"
<ide> "fmt"
<ide> "io"
<add> "runtime"
<ide> "time"
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> const maxDownloadAttempts = 5
<ide> // registers and downloads those, taking into account dependencies between
<ide> // layers.
<ide> type LayerDownloadManager struct {
<del> layerStore layer.Store
<add> layerStores map[string]layer.Store
<ide> tm TransferManager
<ide> waitDuration time.Duration
<ide> }
<ide> func (ldm *LayerDownloadManager) SetConcurrency(concurrency int) {
<ide> }
<ide>
<ide> // NewLayerDownloadManager returns a new LayerDownloadManager.
<del>func NewLayerDownloadManager(layerStore layer.Store, concurrencyLimit int, options ...func(*LayerDownloadManager)) *LayerDownloadManager {
<add>func NewLayerDownloadManager(layerStores map[string]layer.Store, concurrencyLimit int, options ...func(*LayerDownloadManager)) *LayerDownloadManager {
<ide> manager := LayerDownloadManager{
<del> layerStore: layerStore,
<add> layerStores: layerStores,
<ide> tm: NewTransferManager(concurrencyLimit),
<ide> waitDuration: time.Second,
<ide> }
<ide> func (ldm *LayerDownloadManager) Download(ctx context.Context, initialRootFS ima
<ide> downloadsByKey = make(map[string]*downloadTransfer)
<ide> )
<ide>
<add> // Assume that the platform is the host OS if blank
<add> if platform == "" {
<add> platform = layer.Platform(runtime.GOOS)
<add> }
<add>
<ide> rootFS := initialRootFS
<ide> for _, descriptor := range layers {
<ide> key := descriptor.Key()
<ide> func (ldm *LayerDownloadManager) Download(ctx context.Context, initialRootFS ima
<ide> if err == nil {
<ide> getRootFS := rootFS
<ide> getRootFS.Append(diffID)
<del> l, err := ldm.layerStore.Get(getRootFS.ChainID())
<add> l, err := ldm.layerStores[string(platform)].Get(getRootFS.ChainID())
<ide> if err == nil {
<ide> // Layer already exists.
<ide> logrus.Debugf("Layer already exists: %s", descriptor.ID())
<ide> progress.Update(progressOutput, descriptor.ID(), "Already exists")
<ide> if topLayer != nil {
<del> layer.ReleaseAndLog(ldm.layerStore, topLayer)
<add> layer.ReleaseAndLog(ldm.layerStores[string(platform)], topLayer)
<ide> }
<ide> topLayer = l
<ide> missingLayer = false
<ide> func (ldm *LayerDownloadManager) Download(ctx context.Context, initialRootFS ima
<ide> if topDownload == nil {
<ide> return rootFS, func() {
<ide> if topLayer != nil {
<del> layer.ReleaseAndLog(ldm.layerStore, topLayer)
<add> layer.ReleaseAndLog(ldm.layerStores[string(platform)], topLayer)
<ide> }
<ide> }, nil
<ide> }
<ide> func (ldm *LayerDownloadManager) Download(ctx context.Context, initialRootFS ima
<ide>
<ide> defer func() {
<ide> if topLayer != nil {
<del> layer.ReleaseAndLog(ldm.layerStore, topLayer)
<add> layer.ReleaseAndLog(ldm.layerStores[string(platform)], topLayer)
<ide> }
<ide> }()
<ide>
<ide> func (ldm *LayerDownloadManager) makeDownloadFunc(descriptor DownloadDescriptor,
<ide> return func(progressChan chan<- progress.Progress, start <-chan struct{}, inactive chan<- struct{}) Transfer {
<ide> d := &downloadTransfer{
<ide> Transfer: NewTransfer(),
<del> layerStore: ldm.layerStore,
<add> layerStore: ldm.layerStores[string(platform)],
<ide> }
<ide>
<ide> go func() {
<ide> func (ldm *LayerDownloadManager) makeDownloadFuncFromDownload(descriptor Downloa
<ide> return func(progressChan chan<- progress.Progress, start <-chan struct{}, inactive chan<- struct{}) Transfer {
<ide> d := &downloadTransfer{
<ide> Transfer: NewTransfer(),
<del> layerStore: ldm.layerStore,
<add> layerStore: ldm.layerStores[string(platform)],
<ide> }
<ide>
<ide> go func() {
<ide><path>reference/store.go
<ide> type store struct {
<ide> // referencesByIDCache is a cache of references indexed by ID, to speed
<ide> // up References.
<ide> referencesByIDCache map[digest.Digest]map[string]reference.Named
<add> // platform is the container target platform for this store (which may be
<add> // different to the host operating system
<add> platform string
<ide> }
<ide>
<ide> // Repository maps tags to digests. The key is a stringified Reference,
<ide> func (a lexicalAssociations) Less(i, j int) bool {
<ide>
<ide> // NewReferenceStore creates a new reference store, tied to a file path where
<ide> // the set of references are serialized in JSON format.
<del>func NewReferenceStore(jsonPath string) (Store, error) {
<add>func NewReferenceStore(jsonPath, platform string) (Store, error) {
<ide> abspath, err := filepath.Abs(jsonPath)
<ide> if err != nil {
<ide> return nil, err
<ide> func NewReferenceStore(jsonPath string) (Store, error) {
<ide> jsonPath: abspath,
<ide> Repositories: make(map[string]repository),
<ide> referencesByIDCache: make(map[digest.Digest]map[string]reference.Named),
<add> platform: platform,
<ide> }
<ide> // Load the json file if it exists, otherwise create it.
<ide> if err := store.reload(); os.IsNotExist(err) {
<ide><path>reference/store_test.go
<ide> import (
<ide> "io/ioutil"
<ide> "os"
<ide> "path/filepath"
<add> "runtime"
<ide> "strings"
<ide> "testing"
<ide>
<ide> func TestLoad(t *testing.T) {
<ide> }
<ide> jsonFile.Close()
<ide>
<del> store, err := NewReferenceStore(jsonFile.Name())
<add> store, err := NewReferenceStore(jsonFile.Name(), runtime.GOOS)
<ide> if err != nil {
<ide> t.Fatalf("error creating tag store: %v", err)
<ide> }
<ide> func TestSave(t *testing.T) {
<ide> jsonFile.Close()
<ide> defer os.RemoveAll(jsonFile.Name())
<ide>
<del> store, err := NewReferenceStore(jsonFile.Name())
<add> store, err := NewReferenceStore(jsonFile.Name(), runtime.GOOS)
<ide> if err != nil {
<ide> t.Fatalf("error creating tag store: %v", err)
<ide> }
<ide> func TestAddDeleteGet(t *testing.T) {
<ide> jsonFile.Close()
<ide> defer os.RemoveAll(jsonFile.Name())
<ide>
<del> store, err := NewReferenceStore(jsonFile.Name())
<add> store, err := NewReferenceStore(jsonFile.Name(), runtime.GOOS)
<ide> if err != nil {
<ide> t.Fatalf("error creating tag store: %v", err)
<ide> }
<ide> func TestInvalidTags(t *testing.T) {
<ide> tmpDir, err := ioutil.TempDir("", "tag-store-test")
<ide> defer os.RemoveAll(tmpDir)
<ide>
<del> store, err := NewReferenceStore(filepath.Join(tmpDir, "repositories.json"))
<add> store, err := NewReferenceStore(filepath.Join(tmpDir, "repositories.json"), runtime.GOOS)
<ide> if err != nil {
<ide> t.Fatalf("error creating tag store: %v", err)
<ide> } | 40 |
Javascript | Javascript | add back accessibilitystates until next release | 69020a8e877d68af2fec0becd008f3a48c3af8e7 | <ide><path>Libraries/Components/View/ReactNativeViewViewConfig.js
<ide> const ReactNativeViewConfig = {
<ide> accessibilityLabel: true,
<ide> accessibilityLiveRegion: true,
<ide> accessibilityRole: true,
<add> accessibilityStates: true, // TODO: Can be removed after next release
<ide> accessibilityState: true,
<ide> accessibilityViewIsModal: true,
<ide> accessible: true, | 1 |
Ruby | Ruby | use errors[field] instead of errors.on(field) | 5267addd4f986c89df3d31f35e046abc3b1fbe26 | <ide><path>actionpack/lib/action_view/helpers/active_record_helper.rb
<ide> def error_message_on(object, method, *args)
<ide> options.reverse_merge!(:prepend_text => '', :append_text => '', :css_class => 'formError')
<ide>
<ide> if (obj = (object.respond_to?(:errors) ? object : instance_variable_get("@#{object}"))) &&
<del> (errors = obj.errors.on(method))
<add> (errors = obj.errors[method])
<ide> content_tag("div",
<del> "#{options[:prepend_text]}#{ERB::Util.html_escape(errors.is_a?(Array) ? errors.first : errors)}#{options[:append_text]}",
<add> "#{options[:prepend_text]}#{ERB::Util.html_escape(errors.first)}#{options[:append_text]}",
<ide> :class => options[:css_class]
<ide> )
<ide> else
<ide> def to_tag(options = {})
<ide> end
<ide> end
<ide>
<del> alias_method :tag_without_error_wrapping, :tag
<del> def tag(name, options)
<del> if object.respond_to?(:errors) && object.errors.respond_to?(:on)
<del> error_wrapping(tag_without_error_wrapping(name, options), object.errors.on(@method_name))
<del> else
<del> tag_without_error_wrapping(name, options)
<del> end
<del> end
<del>
<del> alias_method :content_tag_without_error_wrapping, :content_tag
<del> def content_tag(name, value, options)
<del> if object.respond_to?(:errors) && object.errors.respond_to?(:on)
<del> error_wrapping(content_tag_without_error_wrapping(name, value, options), object.errors.on(@method_name))
<del> else
<del> content_tag_without_error_wrapping(name, value, options)
<del> end
<del> end
<del>
<del> alias_method :to_date_select_tag_without_error_wrapping, :to_date_select_tag
<del> def to_date_select_tag(options = {}, html_options = {})
<del> if object.respond_to?(:errors) && object.errors.respond_to?(:on)
<del> error_wrapping(to_date_select_tag_without_error_wrapping(options, html_options), object.errors.on(@method_name))
<del> else
<del> to_date_select_tag_without_error_wrapping(options, html_options)
<del> end
<del> end
<del>
<del> alias_method :to_datetime_select_tag_without_error_wrapping, :to_datetime_select_tag
<del> def to_datetime_select_tag(options = {}, html_options = {})
<del> if object.respond_to?(:errors) && object.errors.respond_to?(:on)
<del> error_wrapping(to_datetime_select_tag_without_error_wrapping(options, html_options), object.errors.on(@method_name))
<del> else
<del> to_datetime_select_tag_without_error_wrapping(options, html_options)
<add> %w(tag content_tag to_date_select_tag to_datetime_select_tag to_time_select_tag).each do |meth|
<add> without = "#{meth}_without_error_wrapping"
<add> define_method "#{meth}_with_error_wrapping" do |*args|
<add> error_wrapping(send(without, *args))
<ide> end
<add> alias_method_chain meth, :error_wrapping
<ide> end
<ide>
<del> alias_method :to_time_select_tag_without_error_wrapping, :to_time_select_tag
<del> def to_time_select_tag(options = {}, html_options = {})
<del> if object.respond_to?(:errors) && object.errors.respond_to?(:on)
<del> error_wrapping(to_time_select_tag_without_error_wrapping(options, html_options), object.errors.on(@method_name))
<add> def error_wrapping(html_tag)
<add> if object.respond_to?(:errors) && object.errors.respond_to?(:full_messages) && object.errors[@method_name].any?
<add> Base.field_error_proc.call(html_tag, self)
<ide> else
<del> to_time_select_tag_without_error_wrapping(options, html_options)
<add> html_tag
<ide> end
<ide> end
<ide>
<del> def error_wrapping(html_tag, has_error)
<del> has_error ? Base.field_error_proc.call(html_tag, self) : html_tag
<del> end
<del>
<del> def error_message
<del> object.errors.on(@method_name)
<del> end
<del>
<ide> def column_type
<ide> object.send(:column_for_attribute, @method_name).type
<ide> end
<ide><path>actionpack/test/template/active_record_helper_test.rb
<ide> def full_messages
<ide> ["Author name can't be <em>empty</em>"]
<ide> end
<ide>
<del> def on(field)
<del> "can't be <em>empty</em>"
<add> def [](field)
<add> ["can't be <em>empty</em>"]
<ide> end
<ide> end
<ide>
<ide> def setup_post
<ide> @post = Post.new
<ide> def @post.errors
<ide> Class.new {
<del> def on(field)
<add> def [](field)
<ide> case field.to_s
<ide> when "author_name"
<del> "can't be empty"
<add> ["can't be empty"]
<ide> when "body"
<del> true
<add> ['foo']
<ide> else
<del> false
<add> []
<ide> end
<ide> end
<ide> def empty?() false end
<ide> def setup_user
<ide> @user = User.new
<ide> def @user.errors
<ide> Class.new {
<del> def on(field) field == "email" end
<add> def [](field) field == "email" ? ['nonempty'] : [] end
<ide> def empty?() false end
<ide> def count() 1 end
<ide> def full_messages() [ "User email can't be empty" ] end
<ide><path>actionpack/test/template/form_helper_test.rb
<ide> def setup
<ide> @comment = Comment.new
<ide> def @post.errors()
<ide> Class.new{
<del> def on(field); "can't be empty" if field == "author_name"; end
<add> def [](field); field == "author_name" ? ["can't be empty"] : [] end
<ide> def empty?() false end
<ide> def count() 1 end
<ide> def full_messages() [ "Author name can't be empty" ] end
<ide> def post_path(post)
<ide> def protect_against_forgery?
<ide> false
<ide> end
<del>end
<ide>\ No newline at end of file
<add>end | 3 |
Python | Python | fix migration for mssql | a16088694908b45eefb5b1ab294dd2d706d6e540 | <ide><path>airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py
<ide> def upgrade():
<ide> # versions, check for the function existing.
<ide> try:
<ide> conn.execute("SELECT JSON_VALID(1)").fetchone()
<del> except sa.exc.OperationalError:
<add> except (sa.exc.OperationalError, sa.exc.ProgrammingError):
<ide> json_type = sa.Text
<ide>
<ide> op.create_table( | 1 |
Text | Text | add trailing slash | 9043df6be7617cdb3b24206cb8ba38d1b9942454 | <ide><path>docs/tutorial/2-requests-and-responses.md
<ide> Now update the `snippets/urls.py` file slightly, to append a set of `format_suff
<ide>
<ide> urlpatterns = [
<ide> path('snippets/', views.snippet_list),
<del> path('snippets/<int:pk>', views.snippet_detail),
<add> path('snippets/<int:pk>/', views.snippet_detail),
<ide> ]
<ide>
<ide> urlpatterns = format_suffix_patterns(urlpatterns) | 1 |
Text | Text | add table of contents to readme.md | f62d9fc89fa988742f10a8bad338bc4132ef5525 | <ide><path>README.md
<ide> policies, and releases are managed under an
<ide> If you need help using or installing Node.js, please use the
<ide> [nodejs/help](https://github.com/nodejs/help) issue tracker.
<ide>
<add>
<add># Table of Contents
<add>
<add>* [Resources for Newcomers](#resources-for-newcomers)
<add>* [Release Types](#release-types)
<add> * [Download](#download)
<add> * [Current and LTS Releases](#current-and-lts-releases)
<add> * [Nightly Releases](#nightly-releases)
<add> * [API Documentation](#api-documentation)
<add> * [Verifying Binaries](#verifying-binaries)
<add>* [Building Node.js](#building-nodejs)
<add> * [Security](#security)
<add> * [Current Project Team Members](#current-project-team-members)
<add> * [CTC (Core Technical Committee)](#ctc-core-technical-committee)
<add> * [Collaborators](#collaborators)
<add> * [Release Team](#release-team)
<add>
<ide> ## Resources for Newcomers
<ide>
<ide> ### Official Resources
<ide> The Node.js project maintains multiple types of releases:
<ide> Binaries, installers, and source tarballs are available at
<ide> <https://nodejs.org>.
<ide>
<add>#### Current and LTS Releases
<ide> **Current** and **LTS** releases are available at
<ide> <https://nodejs.org/download/release/>, listed under their version strings.
<ide> The [latest](https://nodejs.org/download/release/latest/) directory is an
<ide> alias for the latest Current release. The latest LTS release from an LTS
<ide> line is available in the form: latest-_codename_. For example:
<ide> <https://nodejs.org/download/release/latest-argon>
<ide>
<add>#### Nightly Releases
<ide> **Nightly** builds are available at
<ide> <https://nodejs.org/download/nightly/>, listed under their version
<ide> string which includes their date (in UTC time) and the commit SHA at
<ide> the HEAD of the release.
<ide>
<add>#### API Documentation
<ide> **API documentation** is available in each release and nightly
<ide> directory under _docs_. <https://nodejs.org/api/> points to the API
<ide> documentation of the latest stable version. | 1 |
Go | Go | fix issues in pkg/nat | 15d01d6e6c57f4b8a39dddd2676a2d6914c62c77 | <ide><path>api/client/port.go
<ide> func (cli *DockerCli) CmdPort(args ...string) error {
<ide> }
<ide> if frontends, exists := c.NetworkSettings.Ports[newP]; exists && frontends != nil {
<ide> for _, frontend := range frontends {
<del> fmt.Fprintf(cli.out, "%s:%s\n", frontend.HostIp, frontend.HostPort)
<add> fmt.Fprintf(cli.out, "%s:%s\n", frontend.HostIP, frontend.HostPort)
<ide> }
<ide> return nil
<ide> }
<ide> func (cli *DockerCli) CmdPort(args ...string) error {
<ide>
<ide> for from, frontends := range c.NetworkSettings.Ports {
<ide> for _, frontend := range frontends {
<del> fmt.Fprintf(cli.out, "%s -> %s:%s\n", from, frontend.HostIp, frontend.HostPort)
<add> fmt.Fprintf(cli.out, "%s -> %s:%s\n", from, frontend.HostIP, frontend.HostPort)
<ide> }
<ide> }
<ide>
<ide><path>daemon/container_unix.go
<ide> func (container *Container) buildPortMapInfo(n libnetwork.Network, ep libnetwork
<ide> if err != nil {
<ide> return nil, err
<ide> }
<del> natBndg := nat.PortBinding{HostIp: pp.HostIP.String(), HostPort: strconv.Itoa(int(pp.HostPort))}
<add> natBndg := nat.PortBinding{HostIP: pp.HostIP.String(), HostPort: strconv.Itoa(int(pp.HostPort))}
<ide> networkSettings.Ports[natPort] = append(networkSettings.Ports[natPort], natBndg)
<ide> }
<ide> }
<ide> func (container *Container) buildCreateEndpointOptions() ([]libnetwork.EndpointO
<ide> bindings[p] = []nat.PortBinding{}
<ide> for _, bb := range b {
<ide> bindings[p] = append(bindings[p], nat.PortBinding{
<del> HostIp: bb.HostIp,
<add> HostIP: bb.HostIP,
<ide> HostPort: bb.HostPort,
<ide> })
<ide> }
<ide> func (container *Container) buildCreateEndpointOptions() ([]libnetwork.EndpointO
<ide> return nil, fmt.Errorf("Error parsing HostPort value(%s):%v", binding[i].HostPort, err)
<ide> }
<ide> pbCopy.HostPort = uint16(newP.Int())
<del> pbCopy.HostIP = net.ParseIP(binding[i].HostIp)
<add> pbCopy.HostIP = net.ParseIP(binding[i].HostIP)
<ide> pbList = append(pbList, pbCopy)
<ide> }
<ide>
<ide><path>daemon/list.go
<ide> func (daemon *Daemon) Containers(config *ContainersConfig) ([]*types.Container,
<ide> PrivatePort: p,
<ide> PublicPort: h,
<ide> Type: port.Proto(),
<del> IP: binding.HostIp,
<add> IP: binding.HostIP,
<ide> })
<ide> }
<ide> }
<ide><path>integration-cli/docker_api_containers_test.go
<ide> func (s *DockerSuite) TestContainerApiBadPort(c *check.C) {
<ide> "PortBindings": map[string]interface{}{
<ide> "8080/tcp": []map[string]interface{}{
<ide> {
<del> "HostIp": "",
<add> "HostIP": "",
<ide> "HostPort": "aa80",
<ide> },
<ide> },
<ide><path>pkg/nat/nat.go
<ide> import (
<ide> )
<ide>
<ide> const (
<del> PortSpecTemplate = "ip:hostPort:containerPort"
<del> PortSpecTemplateFormat = "ip:hostPort:containerPort | ip::containerPort | hostPort:containerPort | containerPort"
<add> // portSpecTemplate is the expected format for port specifications
<add> portSpecTemplate = "ip:hostPort:containerPort"
<ide> )
<ide>
<add>// PortBinding represents a binding between a Host IP address and a Host Port
<ide> type PortBinding struct {
<del> HostIp string
<add> // HostIP is the host IP Address
<add> HostIP string `json:"HostIp"`
<add> // HostPort is the host port number
<ide> HostPort string
<ide> }
<ide>
<add>// PortMap is a collection of PortBinding indexed by Port
<ide> type PortMap map[Port][]PortBinding
<ide>
<add>// PortSet is a collection of structs indexed by Port
<ide> type PortSet map[Port]struct{}
<ide>
<del>// 80/tcp
<add>// Port is a string containing port number and protocol in the format "80/tcp"
<ide> type Port string
<ide>
<add>// NewPort creates a new instance of a Port given a protocol and port number
<ide> func NewPort(proto, port string) (Port, error) {
<ide> // Check for parsing issues on "port" now so we can avoid having
<ide> // to check it later on.
<ide> func NewPort(proto, port string) (Port, error) {
<ide> return Port(fmt.Sprintf("%d/%s", portInt, proto)), nil
<ide> }
<ide>
<add>// ParsePort parses the port number string and returns an int
<ide> func ParsePort(rawPort string) (int, error) {
<ide> if len(rawPort) == 0 {
<ide> return 0, nil
<ide> func ParsePort(rawPort string) (int, error) {
<ide> return int(port), nil
<ide> }
<ide>
<add>// Proto returns the protocol of a Port
<ide> func (p Port) Proto() string {
<ide> proto, _ := SplitProtoPort(string(p))
<ide> return proto
<ide> }
<ide>
<add>// Port returns the port number of a Port
<ide> func (p Port) Port() string {
<ide> _, port := SplitProtoPort(string(p))
<ide> return port
<ide> }
<ide>
<add>// Int returns the port number of a Port as an int
<ide> func (p Port) Int() int {
<ide> portStr := p.Port()
<ide> if len(portStr) == 0 {
<ide> func (p Port) Int() int {
<ide> return int(port)
<ide> }
<ide>
<del>// Splits a port in the format of proto/port
<add>// SplitProtoPort splits a port in the format of proto/port
<ide> func SplitProtoPort(rawPort string) (string, string) {
<ide> parts := strings.Split(rawPort, "/")
<ide> l := len(parts)
<ide> func validateProto(proto string) bool {
<ide> return false
<ide> }
<ide>
<del>// We will receive port specs in the format of ip:public:private/proto and these need to be
<del>// parsed in the internal types
<add>// ParsePortSpecs receives port specs in the format of ip:public:private/proto and parses
<add>// these in to the internal types
<ide> func ParsePortSpecs(ports []string) (map[Port]struct{}, map[Port][]PortBinding, error) {
<ide> var (
<ide> exposedPorts = make(map[Port]struct{}, len(ports))
<ide> func ParsePortSpecs(ports []string) (map[Port]struct{}, map[Port][]PortBinding,
<ide> rawPort = fmt.Sprintf(":%s", rawPort)
<ide> }
<ide>
<del> parts, err := parsers.PartParser(PortSpecTemplate, rawPort)
<add> parts, err := parsers.PartParser(portSpecTemplate, rawPort)
<ide> if err != nil {
<ide> return nil, nil, err
<ide> }
<ide>
<ide> var (
<ide> containerPort = parts["containerPort"]
<del> rawIp = parts["ip"]
<add> rawIP = parts["ip"]
<ide> hostPort = parts["hostPort"]
<ide> )
<ide>
<del> if rawIp != "" && net.ParseIP(rawIp) == nil {
<del> return nil, nil, fmt.Errorf("Invalid ip address: %s", rawIp)
<add> if rawIP != "" && net.ParseIP(rawIP) == nil {
<add> return nil, nil, fmt.Errorf("Invalid ip address: %s", rawIP)
<ide> }
<ide> if containerPort == "" {
<ide> return nil, nil, fmt.Errorf("No port specified: %s<empty>", rawPort)
<ide> func ParsePortSpecs(ports []string) (map[Port]struct{}, map[Port][]PortBinding,
<ide> }
<ide>
<ide> binding := PortBinding{
<del> HostIp: rawIp,
<add> HostIP: rawIP,
<ide> HostPort: hostPort,
<ide> }
<ide> bslice, exists := bindings[port]
<ide><path>pkg/nat/nat_test.go
<ide> func TestParsePortSpecs(t *testing.T) {
<ide> t.Fatalf("%s should have exactly one binding", portspec)
<ide> }
<ide>
<del> if bindings[0].HostIp != "" {
<del> t.Fatalf("HostIp should not be set for %s", portspec)
<add> if bindings[0].HostIP != "" {
<add> t.Fatalf("HostIP should not be set for %s", portspec)
<ide> }
<ide>
<ide> if bindings[0].HostPort != "" {
<ide> func TestParsePortSpecs(t *testing.T) {
<ide> t.Fatalf("%s should have exactly one binding", portspec)
<ide> }
<ide>
<del> if bindings[0].HostIp != "" {
<del> t.Fatalf("HostIp should not be set for %s", portspec)
<add> if bindings[0].HostIP != "" {
<add> t.Fatalf("HostIP should not be set for %s", portspec)
<ide> }
<ide>
<ide> if bindings[0].HostPort != port {
<ide> func TestParsePortSpecs(t *testing.T) {
<ide> t.Fatalf("%s should have exactly one binding", portspec)
<ide> }
<ide>
<del> if bindings[0].HostIp != "0.0.0.0" {
<del> t.Fatalf("HostIp is not 0.0.0.0 for %s", portspec)
<add> if bindings[0].HostIP != "0.0.0.0" {
<add> t.Fatalf("HostIP is not 0.0.0.0 for %s", portspec)
<ide> }
<ide>
<ide> if bindings[0].HostPort != port {
<ide> func TestParsePortSpecsWithRange(t *testing.T) {
<ide> t.Fatalf("%s should have exactly one binding", portspec)
<ide> }
<ide>
<del> if bindings[0].HostIp != "" {
<del> t.Fatalf("HostIp should not be set for %s", portspec)
<add> if bindings[0].HostIP != "" {
<add> t.Fatalf("HostIP should not be set for %s", portspec)
<ide> }
<ide>
<ide> if bindings[0].HostPort != "" {
<ide> func TestParsePortSpecsWithRange(t *testing.T) {
<ide> t.Fatalf("%s should have exactly one binding", portspec)
<ide> }
<ide>
<del> if bindings[0].HostIp != "" {
<del> t.Fatalf("HostIp should not be set for %s", portspec)
<add> if bindings[0].HostIP != "" {
<add> t.Fatalf("HostIP should not be set for %s", portspec)
<ide> }
<ide>
<ide> if bindings[0].HostPort != port {
<ide> func TestParsePortSpecsWithRange(t *testing.T) {
<ide>
<ide> for portspec, bindings := range bindingMap {
<ide> _, port := SplitProtoPort(string(portspec))
<del> if len(bindings) != 1 || bindings[0].HostIp != "0.0.0.0" || bindings[0].HostPort != port {
<add> if len(bindings) != 1 || bindings[0].HostIP != "0.0.0.0" || bindings[0].HostPort != port {
<ide> t.Fatalf("Expect single binding to port %s but found %s", port, bindings)
<ide> }
<ide> }
<ide> func TestParseNetworkOptsPrivateOnly(t *testing.T) {
<ide> t.Logf("Expected \"\" got %s", s.HostPort)
<ide> t.Fail()
<ide> }
<del> if s.HostIp != "192.168.1.100" {
<add> if s.HostIP != "192.168.1.100" {
<ide> t.Fail()
<ide> }
<ide> }
<ide> func TestParseNetworkOptsPublic(t *testing.T) {
<ide> t.Logf("Expected 8080 got %s", s.HostPort)
<ide> t.Fail()
<ide> }
<del> if s.HostIp != "192.168.1.100" {
<add> if s.HostIP != "192.168.1.100" {
<ide> t.Fail()
<ide> }
<ide> }
<ide> func TestParseNetworkOptsUdp(t *testing.T) {
<ide> t.Logf("Expected \"\" got %s", s.HostPort)
<ide> t.Fail()
<ide> }
<del> if s.HostIp != "192.168.1.100" {
<add> if s.HostIP != "192.168.1.100" {
<ide> t.Fail()
<ide> }
<ide> }
<ide><path>pkg/nat/sort.go
<ide> func (s *portSorter) Less(i, j int) bool {
<ide> return s.by(ip, jp)
<ide> }
<ide>
<add>// Sort sorts a list of ports using the provided predicate
<add>// This function should compare `i` and `j`, returning true if `i` is
<add>// considered to be less than `j`
<ide> func Sort(ports []Port, predicate func(i, j Port) bool) {
<ide> s := &portSorter{ports, predicate}
<ide> sort.Sort(s)
<ide><path>pkg/nat/sort_test.go
<ide> func TestSortPortMap(t *testing.T) {
<ide> },
<ide> Port("6379/tcp"): []PortBinding{
<ide> {},
<del> {HostIp: "0.0.0.0", HostPort: "32749"},
<add> {HostIP: "0.0.0.0", HostPort: "32749"},
<ide> },
<ide> Port("9999/tcp"): []PortBinding{
<del> {HostIp: "0.0.0.0", HostPort: "40000"},
<add> {HostIP: "0.0.0.0", HostPort: "40000"},
<ide> },
<ide> }
<ide>
<ide> func TestSortPortMap(t *testing.T) {
<ide> t.Errorf("failed to prioritize port with explicit mappings, got %v", ports)
<ide> }
<ide> if pm := portMap[Port("6379/tcp")]; !reflect.DeepEqual(pm, []PortBinding{
<del> {HostIp: "0.0.0.0", HostPort: "32749"},
<add> {HostIP: "0.0.0.0", HostPort: "32749"},
<ide> {},
<ide> }) {
<ide> t.Errorf("failed to prioritize bindings with explicit mappings, got %v", pm) | 8 |
Ruby | Ruby | remove unnecessary method | b329fbb5f2706e4b3c5d9e098881f4e90d3c9a44 | <ide><path>actionpack/lib/action_controller/metal/testing.rb
<ide> module Testing
<ide>
<ide> # Behavior specific to functional tests
<ide> module Functional # :nodoc:
<del> def set_response!(request)
<del> end
<del>
<ide> def recycle!
<ide> @_url_options = nil
<ide> self.formats = nil | 1 |
Mixed | Ruby | add helper method (#37) | bc98fd37882c64c896dc2243fcc6e129f170a32a | <ide><path>Library/Homebrew/cleanup.rb
<ide> def self.cleanup_cache
<ide> cleanup_path(path) { path.unlink }
<ide> next
<ide> end
<del> if path.basename.to_s == "java_cache" && path.directory?
<add> if %w[java_cache npm_cache].include?(path.basename.to_s) && path.directory?
<ide> cleanup_path(path) { FileUtils.rm_rf path }
<ide> next
<ide> end
<ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_text
<ide> if text =~ /def plist/ && text !~ /plist_options/
<ide> problem "Please set plist_options when using a formula-defined plist."
<ide> end
<del>
<del> if text =~ /system "npm", "install"/ && text !~ %r[opt_libexec\}/npm/bin] && formula.name !~ /^kibana(\d{2})?$/
<del> need_npm = "\#{Formula[\"node\"].opt_libexec\}/npm/bin"
<del> problem <<-EOS.undent
<del> Please add ENV.prepend_path \"PATH\", \"#{need_npm}"\ to def install
<del> EOS
<del> end
<ide> end
<ide>
<ide> def audit_line(line, lineno)
<ide> def audit_line(line, lineno)
<ide> problem "Use `assert_match` instead of `assert ...include?`"
<ide> end
<ide>
<add> if line =~ /system "npm", "install"/ && line !~ /Language::Node/
<add> problem "Use Language::Node for npm install args"
<add> end
<add>
<ide> if @strict
<ide> if line =~ /system (["'][^"' ]*(?:\s[^"' ]*)+["'])/
<ide> bad_system = $1
<ide><path>Library/Homebrew/language/node.rb
<add>module Language
<add> module Node
<add> def self.npm_cache_config
<add> "cache=#{HOMEBREW_CACHE}/npm_cache\n"
<add> end
<add>
<add> def self.setup_npm_environment
<add> npmrc = Pathname.new("#{ENV["HOME"]}/.npmrc")
<add> # only run setup_npm_environment once per formula
<add> return if npmrc.exist?
<add> # explicitly set npm's cache path to HOMEBREW_CACHE/npm_cache to fix
<add> # issues caused by overriding $HOME (long build times, high disk usage)
<add> # https://github.com/Homebrew/brew/pull/37#issuecomment-208840366
<add> npmrc.write npm_cache_config
<add> # explicitly use our npm and node-gyp executables instead of the user
<add> # managed ones in HOMEBREW_PREFIX/lib/node_modules which might be broken
<add> ENV.prepend_path "PATH", Formula["node"].opt_libexec/"npm/bin"
<add> end
<add>
<add> def self.std_npm_install_args(libexec)
<add> setup_npm_environment
<add> # tell npm to not install .brew_home by adding it to the .npmignore file
<add> # (or creating a new one if no .npmignore file already exists)
<add> open(".npmignore", "a") { |f| f.write( "\n.brew_home\n") }
<add> # npm install args for global style module format installed into libexec
<add> ["--verbose", "--global", "--prefix=#{libexec}", "."]
<add> end
<add>
<add> def self.local_npm_install_args
<add> setup_npm_environment
<add> # npm install args for local style module format
<add> ["--verbose"]
<add> end
<add> end
<add>end
<ide><path>Library/Homebrew/test/test_cleanup.rb
<ide> def test_cleanup_cache_java_cache
<ide> shutup { Homebrew::Cleanup.cleanup_cache }
<ide> refute_predicate java_cache, :exist?
<ide> end
<add>
<add> def test_cleanup_cache_npm_cache
<add> npm_cache = (HOMEBREW_CACHE/"npm_cache")
<add> npm_cache.mkpath
<add> shutup { Homebrew::Cleanup.cleanup_cache }
<add> refute_predicate npm_cache, :exist?
<add> end
<ide> end
<ide><path>share/doc/homebrew/Node-for-Formula-Authors.md
<add># Node for formula authors
<add>
<add>This document explains how to successfully use Node and npm in a Node module based Homebrew formula.
<add>
<add># Running `npm install`
<add>
<add>Homebrew provides two helper methods in a `Language::Node` module, `std_npm_install_args` and `local_npm_install_args`. They both set up the correct environment for npm and return arguments for `npm install` for their specific use cases. Please use them instead of invoking `npm install` explicitly. The syntax for a standard Node module installation is:
<add>
<add>```ruby
<add>system "npm", "install", *Language::Node.std_npm_install_args(libexec)
<add>```
<add>
<add>where `libexec` is the destination prefix (usually the `libexec` variable).
<add>
<add># Download URL
<add>
<add>If the Node module is also available on the npm registry, we prefer npm hosted release tarballs over GitHub (or elsewhere) hosted source tarballs. The advantages of these tarballs are that they doesn't include the files from the `.npmignore` (such as tests) resulting in a smaller download size and that a possibly transpilation step is already done (e.g. no need to compile CoffeeScript files as a build step).
<add>
<add>The npm registry URLs have usually the format of:
<add>
<add>```
<add>https://registry.npmjs.org/<name>/-/<name>-<version>.tgz
<add>```
<add>
<add>Alternatively you could curl the JSON at `https://registry.npmjs.org/<name>` and look for the value of `versions[<version>].dist.tarball` for the correct tarball URL.
<add>
<add># Dependencies
<add>
<add>Node modules, which are compatible with the latest Node version should declare a dependencies on the `node` formula.
<add>
<add>```ruby
<add>depends_on "node"
<add>```
<add>
<add>If your formula requires to be executed with an older Node version you must vendor this older Node version as done in the [`kibana` formula](https://github.com/Homebrew/homebrew-core/blob/c6202f91a129e2f994d904f299a308cc6fbd58e5/Formula/kibana.rb).
<add>
<add>### Special requirements for native addons
<add>
<add>If your node module is a native addon or has a native addon somewhere in it's dependency tree you have to declare an additional dependency. Since the compilation of the native addon results in a invocation of `node-gyp` we need an additional build time dependency on `:python` (because gyp depends on Python 2.7).
<add>
<add>```ruby
<add>depends_on :python => :build
<add>```
<add>
<add>Please also note, that such a formula would only be compatible with the same Node major version it originally was compiled with. This means that we need to revision every formula with a Node native addon with every major version bump of the `node` formula. To make sure we don't overlook your formula on a Node major version bump, write a meaningful test which would fail in such a case (invoked with an ABI incompatible Node version).
<add>
<add># Installation
<add>
<add>Node modules should be installed to `libexec`. This prevents the Node modules from contaminating the global `node_modules`, which is important so that npm doesn't try to manage Homebrew-installed Node modules.
<add>
<add>In the following we distinguish between 2 type of Node module using formulae:
<add>* formulae for standard Node modules compatible with npm's global module format which should use [`std_npm_install_args`](#installing-global-style-modules-with-std_npm_install_args-to-libexec) (like [`azure-cli`](https://github.com/Homebrew/homebrew-core/blob/d93fe9ba3bcc9071b699c8da4e7d733518d3337e/Formula/azure-cli.rb) or [`autocode`](https://github.com/Homebrew/homebrew-core/blob/1a670a6269e1e07f86683c2d164977c9bd8a3fb6/Formula/autocode.rb)) and
<add>* formulae were the `npm install` step is only one of multiple not exclusively Node related install steps (not compatible with npm's global module format) which have to use [`local_npm_install_args`](#installing-module-dependencies-locally-with-local_npm_install_args) (like [`elixirscript`](https://github.com/Homebrew/homebrew-core/blob/ec1e40d37e81af63122a354f0101c377f6a4e66d/Formula/elixirscript.rb) or [`kibana`](https://github.com/Homebrew/homebrew-core/blob/c6202f91a129e2f994d904f299a308cc6fbd58e5/Formula/kibana.rb))
<add>
<add>Both methods have in common, that they are setting the correct environment for using npm inside Homebrew up and returning the arguments for invoking `npm install` for their specific use cases. This includes fixing an important edge case with the npm cache (Caused by Homebrew's redirection of `$HOME` during the build and test process) by using our own custom `npm_cache` inside `HOMEBREW_CACHE`, which would otherwise result in very long build times and high disk space usage.
<add>
<add>To use them you have to require the Node language module at the beginning of your formula file with:
<add>
<add>```ruby
<add>require "language/node"
<add>```
<add>
<add>### Installing global style modules with `std_npm_install_args` to libexec
<add>
<add>In your formula's `install` method, simply cd to the top level of your Node module if necessary and than use `system` to invoke `npm install` with `Language::Node.std_npm_install_args` like:
<add>
<add>```ruby
<add>system "npm", "install", *Language::Node.std_npm_install_args(libexec)
<add>```
<add>
<add>This will install your Node module in npm's global module style with a custom prefix to `libexec`. All your modules executable will be automatically resolved by npm into `libexec/"bin"` for you, which is not symlinked into Homebrew's prefix. We need to make sure these are installed. Do this with we need to symlink all executables to `bin` with:
<add>
<add>```ruby
<add>bin.install_symlink Dir["#{libexec}/bin/*"]
<add>```
<add>
<add>### Installing module dependencies locally with `local_npm_install_args`
<add>
<add>In your formula's `install` method, do any installation steps which need to be done before the `npm install` step and than cd to the top level of the included Node module. Then, use `system` with `Language::Node.local_npm_install_args` to invoke `npm install` like:
<add>
<add>```ruby
<add>system "npm", "install", *Language::Node.local_npm_install_args
<add>```
<add>
<add>This will install all of your Node modules dependencies to your local build path. You can now continue with your build steps and take care of the installation into the Homebrew `prefix` by your own, following the [general Homebrew formula instructions](https://github.com/Homebrew/brew/blob/master/share/doc/homebrew/Formula-Cookbook.md).
<add>
<add># Example
<add>
<add>Installing a standard Node module based formula would look like this:
<add>
<add>```ruby
<add>require "language/node"
<add>
<add>class Foo < Formula
<add> desc "..."
<add> homepage "..."
<add> url "https://registry.npmjs.org/foo/-/foo-1.4.2.tgz"
<add> sha256 "..."
<add>
<add> depends_on "node"
<add> # uncomment if there is a native addon inside the dependency tree
<add> # depends_on :python => :build
<add>
<add> def install
<add> system "npm", "install", *Language::Node.std_npm_install_args(libexec)
<add> bin.install_symlink Dir["#{libexec}/bin/*"]
<add> end
<add>
<add> test do
<add> # add a meaningful test here
<add> end
<add>end
<add>```
<add>
<add>For examples using the `local_npm_install_args` method look at the [`elixirscript`](https://github.com/Homebrew/homebrew-core/blob/ec1e40d37e81af63122a354f0101c377f6a4e66d/Formula/elixirscript.rb) or [`kibana`](https://github.com/Homebrew/homebrew-core/blob/c6202f91a129e2f994d904f299a308cc6fbd58e5/Formula/kibana.rb) formula. | 5 |
Javascript | Javascript | fix wrong variable name | 90452836fe63bf47b88a5ee04e449600453c95f5 | <ide><path>spec/text-editor-component-spec.js
<ide> function setScrollTop (component, scrollTop) {
<ide> return component.getNextUpdatePromise()
<ide> }
<ide>
<del>function setScrollLeft (component, scrollTop) {
<del> component.setScrollLeft(scrollTop)
<add>function setScrollLeft (component, scrollLeft) {
<add> component.setScrollLeft(scrollLeft)
<ide> component.scheduleUpdate()
<ide> return component.getNextUpdatePromise()
<ide> } | 1 |
Javascript | Javascript | fix dock hiding on drag | 9259372f23573485c5f7bec696560aadce825e96 | <ide><path>src/dock.js
<ide> module.exports = class Dock {
<ide> this.show()
<ide> this.didActivate(this)
<ide> }),
<del> this.paneContainer.onDidDestroyPaneItem(this.handleDidRemovePaneItem.bind(this)),
<add> this.paneContainer.observePanes(pane => {
<add> pane.onDidRemoveItem(this.handleDidRemovePaneItem.bind(this))
<add> }),
<ide> this.paneContainer.onDidChangeActivePane((item) => params.didChangeActivePane(this, item)),
<ide> this.paneContainer.onDidChangeActivePaneItem((item) => params.didChangeActivePaneItem(this, item)),
<ide> this.paneContainer.onDidDestroyPaneItem((item) => params.didDestroyPaneItem(item)) | 1 |
Text | Text | remove extraneous line for local setup | 8d0c027fa4ed710eb529f6510ddc12e2f1b3bd7b | <ide><path>CONTRIBUTING.md
<ide> Contributing to this requires some understanding of APIs, ES6 Syntax, and a lot
<ide>
<ide> Essentially, we expect basic familiarity with some of the aforementioned technologies, tools, and libraries. With that being said, you are not required to be an expert on them in order to contribute.
<ide>
<del>**If you want to help us improve our codebase, here's [how to setup freeCodeCamp locally](/docs/how-to-setup-freecodecamp-locally.md).**
<del>
<del>If you want to help us improve our codebase, you can either [setup freeCodeCamp locally](/docs/how-to-setup-freecodecamp-locally.md) or use Gitpod, a free online dev environment.
<add>**If you want to help us improve our codebase, you can either [setup freeCodeCamp locally](/docs/how-to-setup-freecodecamp-locally.md) or use Gitpod, a free online dev environment.**
<ide>
<ide> [](https://gitpod.io/#https://github.com/freeCodeCamp/freeCodeCamp)
<ide> | 1 |
Python | Python | fix static checks | b0b25910713dd39e0193bdcd95b2cfd9e3fed5e7 | <ide><path>airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py
<ide> class SparkKubernetesOperator(BaseOperator):
<ide> For more detail about Spark Application Object have a look at the reference:
<ide> https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/v1beta2-1.1.0-2.4.5/docs/api-docs.md#sparkapplication
<ide>
<del> :param application_file: Defines Kubernetes 'custom_resource_definition' of 'sparkApplication' as either a path to a '.json' file or a JSON string.
<add> :param application_file: Defines Kubernetes 'custom_resource_definition' of 'sparkApplication' as either a
<add> path to a '.json' file or a JSON string.
<ide> :type application_file: str
<ide> :param namespace: kubernetes namespace to put sparkApplication
<ide> :type namespace: str | 1 |
Ruby | Ruby | remove unused variable | 2b92069a4985ea21ba12ca726eb15b2daa7bd162 | <ide><path>activesupport/test/file_update_checker_shared_tests.rb
<ide> def run(*args)
<ide>
<ide> FileUtils.touch(tmpfiles)
<ide>
<del> now = Time.now
<ide> time = Time.at(0) # wrong mtime from the future
<ide> File.utime(time, time, tmpfiles[0])
<ide> | 1 |
Javascript | Javascript | handle null as valid value for reduce. #108 | ddf5a77451a386f5f7e6def153448db6bd0e89e1 | <ide><path>dist/Immutable.js
<ide> var $Sequence = Sequence;
<ide> reduce: function(reducer, initialReduction, thisArg) {
<ide> var reduction;
<ide> var useFirst;
<del> if (arguments.length < 2 || initialReduction === null) {
<add> if (arguments.length < 2) {
<ide> useFirst = true;
<ide> } else {
<ide> reduction = initialReduction;
<ide><path>dist/Immutable.min.js
<ide> function t(){function t(t,e,n,r){var i;if(r){var u=r.prototype;i=de.create(u)}el
<ide> u&&r.push(Array.isArray(u)?We(u).fromEntrySeq():We(u))}return T(t,e,r)}function Q(t){return function(e,n){return e&&e.mergeDeepWith?e.mergeDeepWith(t,n):t?t(e,n):n}}function T(t,e,n){return 0===n.length?t:t.withMutations(function(t){for(var r=e?function(n,r){var i=t.get(r,ke);t.set(r,i===ke?n:e(i,n))}:function(e,n){t.set(n,e)},i=0;n.length>i;i++)n[i].forEach(r)})}function X(t,e,n,r,i){var u=e.length;if(i===u)return r(t);o(t.set,"updateIn with invalid keyPath");var a=i===u-1?n:Qe.empty(),s=e[i],h=t.get(s,a),c=X(h,e,n,r,i+1);return c===h?t:t.set(s,c)}function Y(t){return t-=t>>1&1431655765,t=(858993459&t)+(t>>2&858993459),t=t+(t>>4)&252645135,t+=t>>8,t+=t>>16,127&t}function Z(t,e,n,r){var i=r?t:a(t);return i[e]=n,i}function $(t,e,n,r){var i=t.length+1;if(r&&e+1===i)return t[e]=n,t;for(var u=Array(i),a=0,s=0;i>s;s++)s===e?(u[s]=n,a=-1):u[s]=t[s+a];return u}function te(t,e,n){var r=t.length-1;if(n&&e===r)return t.pop(),t;for(var i=Array(r),u=0,a=0;r>a;a++)a===e&&(u=1),i[a]=t[a+u];return i}function ee(t,e,n,r,i,u){if(t){var a,s=t.array,h=s.length-1;if(0===e)for(a=0;h>=a;a++){var o=u?h-a:a;if(s.hasOwnProperty(o)){var c=o+n;if(c>=0&&r>c&&i(s[o],c)===!1)return!1}}else{var f=1<<e,l=e-we;for(a=0;h>=a;a++){var _=u?h-a:a,v=n+_*f;if(r>v&&v+f>0){var g=s[_];if(g&&!ee(g,l,v,r,i,u))return!1}}}}return!0}function ne(t,e,n,r,i){return{array:t,level:e,offset:n,max:r,rawMax:r-n>>e,index:0,__prev:i}}function re(t,e,n,r,i,u,a){var s=Object.create(ln);return s.length=e-t,s._origin=t,s._size=e,s._level=n,s._root=r,s._tail=i,s.__ownerID=u,s.__hash=a,s.__altered=!1,s}function ie(t,e,n){if(e=C(t,e),e>=t.length||0>e)return n===ke?t:t.withMutations(function(t){0>e?he(t,e).set(0,n):he(t,0,e+1).set(e,n)});e+=t._origin;var i=t._tail,u=t._root,a=r(be);return e>=ce(t._size)?i=ue(i,t.__ownerID,0,e,n,a):u=ue(u,t.__ownerID,t._level,e,n,a),a.value?t.__ownerID?(t._root=u,t._tail=i,t.__hash=void 0,t.__altered=!0,t):re(t._origin,t._size,t._level,u,i):t}function ue(t,e,n,r,u,a){var s,h=u===ke,o=r>>>n&Ie,c=t&&t.array.length>o&&t.array.hasOwnProperty(o);if(h&&!c)return t;
<ide> if(n>0){var f=t&&t.array[o],l=ue(f,e,n-we,r,u,a);return l===f?t:(s=ae(t,e),s.array[o]=l,s)}return!h&&c&&t.array[o]===u?t:(i(a),s=ae(t,e),h?delete s.array[o]:s.array[o]=u,s)}function ae(t,e){return e&&t&&e===t.ownerID?t:new _n(t?t.array.slice():[],e)}function se(t,e){if(e>=ce(t._size))return t._tail;if(1<<t._level+we>e){for(var n=t._root,r=t._level;n&&r>0;)n=n.array[e>>>r&Ie],r-=we;return n}}function he(t,e,n){var r=t.__ownerID||new u,i=t._origin,a=t._size,s=i+e,h=null==n?a:0>n?a+n:i+n;if(s===i&&h===a)return t;if(s>=h)return t.clear();for(var o=t._level,c=t._root,f=0;0>s+f;)c=new _n(c&&c.array.length?[null,c]:[],r),o+=we,f+=1<<o;f&&(s+=f,i+=f,h+=f,a+=f);for(var l=ce(a),_=ce(h);_>=1<<o+we;)c=new _n(c&&c.array.length?[c]:[],r),o+=we;var v=t._tail,g=l>_?se(t,h-1):_>l?new _n([],r):v;if(v&&_>l&&a>s&&v.array.length){c=ae(c,r);for(var p=c,m=o;m>we;m-=we){var d=l>>>m&Ie;p=p.array[d]=ae(p.array[d],r)}p.array[l>>>we&Ie]=v}if(a>h&&(g=g&&g.removeAfter(r,0,h)),s>=_)s-=_,h-=_,o=we,c=null,g=g&&g.removeBefore(r,0,s);else if(s>i||l>_){var y,w;f=0;do y=s>>>o&Ie,w=_-1>>>o&Ie,y===w&&(y&&(f+=(1<<o)*y),o-=we,c=c&&c.array[y]);while(c&&y===w);c&&s>i&&(c=c&&c.removeBefore(r,o,s-f)),c&&l>_&&(c=c&&c.removeAfter(r,o,_-f)),f&&(s-=f,h-=f)}return t.__ownerID?(t.length=h-s,t._origin=s,t._size=h,t._level=o,t._root=c,t._tail=g,t.__hash=void 0,t.__altered=!0,t):re(s,h,o,c,g)}function oe(t,e,n){for(var r=[],i=0;n.length>i;i++){var u=n[i];u&&r.push(We(u))}var a=Math.max.apply(null,r.map(function(t){return t.length||0}));return a>t.length&&(t=t.setLength(a)),T(t,e,r)}function ce(t){return Se>t?0:t-1>>>we<<we}function fe(t,e){var n=Object.create(yn);return n.length=t?t.length:0,n._map=t,n.__ownerID=e,n}function le(t,e,n,r){var i=Object.create(Sn.prototype);return i.length=t?t.length:0,i._map=t,i._vector=e,i.__ownerID=n,i.__hash=r,i}function _e(t,e,n){var r=t._map,i=t._vector,u=r.get(e),a=void 0!==u,s=n===ke;if(!a&&s||a&&n===i.get(u)[1])return t;a||(u=i.length);var h=s?r.remove(e):a?r:r.set(e,u),o=s?i.remove(u):i.set(u,[e,n]);return t.__ownerID?(t.length=h.length,t._map=h,t._vector=o,t.__hash=void 0,t):le(h,o)
<ide> }function ve(t,e,n){var r=Object.create(Object.getPrototypeOf(t));return r._map=e,r.__ownerID=n,r}function ge(t,e){return e?pe(e,t,"",{"":t}):me(t)}function pe(t,e,n,r){return e&&(Array.isArray(e)||e.constructor===Object)?t.call(r,n,We(e).map(function(n,r){return pe(t,n,r,e)})):e}function me(t){if(t){if(Array.isArray(t))return We(t).map(me).toVector();if(t.constructor===Object)return We(t).map(me).toMap()}return t}var de=Object,ye={};ye.createClass=t,ye.superCall=e,ye.defaultSuperCall=n;var we=5,Se=1<<we,Ie=Se-1,ke={},Me={value:!1},be={value:!1},De={value:void 0,done:!1},qe="delete",Oe="undefined"!=typeof Symbol?Symbol.iterator:"@@iterator",Ae=2147483647,xe=0,Ce="__immutablehash__";"undefined"!=typeof Symbol&&(Ce=Symbol(Ce));var Ee=!1,je=16,Re=255,Ue=0,Pe={},We=function(t){return Je.from(1===arguments.length?t:Array.prototype.slice.call(arguments))},Je=We;ye.createClass(We,{toString:function(){return this.__toString("Seq {","}")},__toString:function(t,e){return 0===this.length?t+e:t+" "+this.map(this.__toStringMapper).join(", ")+" "+e},__toStringMapper:function(t,e){return e+": "+O(t)},toJS:function(){return this.map(function(t){return t instanceof Je?t.toJS():t}).__toJS()},toArray:function(){E(this.length);var t=Array(this.length||0);return this.valueSeq().forEach(function(e,n){t[n]=e}),t},toObject:function(){E(this.length);var t={};return this.forEach(function(e,n){t[n]=e}),t},toVector:function(){return E(this.length),cn.from(this)},toMap:function(){return E(this.length),Qe.from(this)},toOrderedMap:function(){return E(this.length),Sn.from(this)},toSet:function(){return E(this.length),mn.from(this)},hashCode:function(){return this.__hash||(this.__hash=1/0===this.length?0:this.reduce(function(t,e,n){return t+(c(e)^(e===n?0:c(n)))&Ae},0))},equals:function(t){if(this===t)return!0;if(!(t instanceof Je))return!1;if(null!=this.length&&null!=t.length){if(this.length!==t.length)return!1;if(0===this.length&&0===t.length)return!0}return null!=this.__hash&&null!=t.__hash&&this.__hash!==t.__hash?!1:this.__deepEquals(t)},__deepEquals:function(t){var e=this.cacheResult().entrySeq().toArray(),n=0;
<del>return t.every(function(t,r){var i=e[n++];return W(r,i[0])&&W(t,i[1])})},join:function(t){t=void 0!==t?""+t:",";var e="",n=!0;return this.forEach(function(r){n?(n=!1,e+=null!=r?r:""):e+=t+(null!=r?r:"")}),e},count:function(t,e){return t?this.filter(t,e).count():(null==this.length&&(this.length=this.forEach(k)),this.length)},countBy:function(t){var e=this;return Sn.empty().withMutations(function(n){e.forEach(function(e,r,i){n.update(t(e,r,i),b)})})},concat:function(){for(var t=[],e=0;arguments.length>e;e++)t[e]=arguments[e];var n=[this].concat(t.map(function(t){return Je(t)})),r=this.__makeSequence();return r.length=n.reduce(function(t,e){return null!=t&&null!=e.length?t+e.length:void 0},0),r.__iterateUncached=function(t,e){for(var r,i=0,u=n.length-1,a=0;u>=a&&!r;a++){var s=n[e?u-a:a];i+=s.__iterate(function(e,n,i){return t(e,n,i)===!1?(r=!0,!1):void 0},e)}return i},r},reverse:function(){var t=this,e=t.__makeSequence();return e.length=t.length,e.__iterateUncached=function(e,n){return t.__iterate(e,!n)},e.reverse=function(){return t},e},keySeq:function(){return this.flip().valueSeq()},valueSeq:function(){var t=this,e=g(t);return e.length=t.length,e.valueSeq=M,e.__iterateUncached=function(e,n,r){if(r&&null==this.length)return this.cacheResult().__iterate(e,n,r);var i,u=0;return r?(u=this.length-1,i=function(t,n,r){return e(t,u--,r)!==!1}):i=function(t,n,r){return e(t,u++,r)!==!1},t.__iterate(i,n),r?this.length:u},e},entrySeq:function(){var t=this;if(t._cache)return Je(t._cache);var e=t.map(I).valueSeq();return e.fromEntries=function(){return t},e},forEach:function(t,e){return this.__iterate(e?t.bind(e):t)},reduce:function(t,e,n){var r,i;return 2>arguments.length||null===e?i=!0:r=e,this.forEach(function(e,u,a){i?(i=!1,r=e):r=t.call(n,r,e,u,a)}),r},reduceRight:function(){var t=this.reverse(!0);return t.reduce.apply(t,arguments)},every:function(t,e){var n=!0;return this.forEach(function(r,i,u){return t.call(e,r,i,u)?void 0:(n=!1,!1)}),n},some:function(t,e){return!this.every(q(t),e)},first:function(){return this.find(k)},last:function(){return this.findLast(k)
<add>return t.every(function(t,r){var i=e[n++];return W(r,i[0])&&W(t,i[1])})},join:function(t){t=void 0!==t?""+t:",";var e="",n=!0;return this.forEach(function(r){n?(n=!1,e+=null!=r?r:""):e+=t+(null!=r?r:"")}),e},count:function(t,e){return t?this.filter(t,e).count():(null==this.length&&(this.length=this.forEach(k)),this.length)},countBy:function(t){var e=this;return Sn.empty().withMutations(function(n){e.forEach(function(e,r,i){n.update(t(e,r,i),b)})})},concat:function(){for(var t=[],e=0;arguments.length>e;e++)t[e]=arguments[e];var n=[this].concat(t.map(function(t){return Je(t)})),r=this.__makeSequence();return r.length=n.reduce(function(t,e){return null!=t&&null!=e.length?t+e.length:void 0},0),r.__iterateUncached=function(t,e){for(var r,i=0,u=n.length-1,a=0;u>=a&&!r;a++){var s=n[e?u-a:a];i+=s.__iterate(function(e,n,i){return t(e,n,i)===!1?(r=!0,!1):void 0},e)}return i},r},reverse:function(){var t=this,e=t.__makeSequence();return e.length=t.length,e.__iterateUncached=function(e,n){return t.__iterate(e,!n)},e.reverse=function(){return t},e},keySeq:function(){return this.flip().valueSeq()},valueSeq:function(){var t=this,e=g(t);return e.length=t.length,e.valueSeq=M,e.__iterateUncached=function(e,n,r){if(r&&null==this.length)return this.cacheResult().__iterate(e,n,r);var i,u=0;return r?(u=this.length-1,i=function(t,n,r){return e(t,u--,r)!==!1}):i=function(t,n,r){return e(t,u++,r)!==!1},t.__iterate(i,n),r?this.length:u},e},entrySeq:function(){var t=this;if(t._cache)return Je(t._cache);var e=t.map(I).valueSeq();return e.fromEntries=function(){return t},e},forEach:function(t,e){return this.__iterate(e?t.bind(e):t)},reduce:function(t,e,n){var r,i;return 2>arguments.length?i=!0:r=e,this.forEach(function(e,u,a){i?(i=!1,r=e):r=t.call(n,r,e,u,a)}),r},reduceRight:function(){var t=this.reverse(!0);return t.reduce.apply(t,arguments)},every:function(t,e){var n=!0;return this.forEach(function(r,i,u){return t.call(e,r,i,u)?void 0:(n=!1,!1)}),n},some:function(t,e){return!this.every(q(t),e)},first:function(){return this.find(k)},last:function(){return this.findLast(k)
<ide> },rest:function(){return this.slice(1)},butLast:function(){return this.slice(0,-1)},has:function(t){return this.get(t,ke)!==ke},get:function(t,e){return this.find(function(e,n){return W(n,t)},null,e)},getIn:function(t,e){return t&&0!==t.length?p(this,t,e,0):this},contains:function(t){return this.find(function(e){return W(e,t)},null,ke)!==ke},find:function(t,e,n){var r=n;return this.forEach(function(n,i,u){return t.call(e,n,i,u)?(r=n,!1):void 0}),r},findKey:function(t,e){var n;return this.forEach(function(r,i,u){return t.call(e,r,i,u)?(n=i,!1):void 0}),n},findLast:function(t,e,n){return this.reverse(!0).find(t,e,n)},findLastKey:function(t,e){return this.reverse(!0).findKey(t,e)},flip:function(){var t=this,e=v();return e.length=t.length,e.flip=function(){return t},e.__iterateUncached=function(e,n){return t.__iterate(function(t,n,r){return e(n,t,r)!==!1},n)},e},map:function(t,e){var n=this,r=n.__makeSequence();return r.length=n.length,r.__iterateUncached=function(r,i){return n.__iterate(function(n,i,u){return r(t.call(e,n,i,u),i,u)!==!1},i)},r},mapKeys:function(t,e){var n=this,r=n.__makeSequence();return r.length=n.length,r.__iterateUncached=function(r,i){return n.__iterate(function(n,i,u){return r(n,t.call(e,i,n,u),u)!==!1},i)},r},filter:function(t,e){return D(this,t,e,!0,!1)},slice:function(t,e){if(m(t,e,this.length))return this;var n=d(t,this.length),r=y(e,this.length);if(n!==n||r!==r)return this.entrySeq().slice(t,e).fromEntrySeq();var i=0===n?this:this.skip(n);return null==r||r===this.length?i:i.take(r-n)},take:function(t){var e=this;if(t>e.length)return e;var n=e.__makeSequence();return n.__iterateUncached=function(n,r,i){if(r)return this.cacheResult().__iterate(n,r,i);var u=0;return e.__iterate(function(e,r,i){return t>u&&n(e,r,i)!==!1?void u++:!1},r,i),u},n.length=this.length&&Math.min(this.length,t),n},takeLast:function(t,e){return this.reverse(e).take(t).reverse(e)},takeWhile:function(t,e){var n=this,r=n.__makeSequence();return r.__iterateUncached=function(r,i,u){if(i)return this.cacheResult().__iterate(r,i,u);
<ide> var a=0;return n.__iterate(function(n,i,u){return t.call(e,n,i,u)&&r(n,i,u)!==!1?void a++:!1},i,u),a},r},takeUntil:function(t,e,n){return this.takeWhile(q(t),e,n)},skip:function(t){var e=this;if(0===t)return e;var n=e.__makeSequence();return n.__iterateUncached=function(n,r,i){if(r)return this.cacheResult().__iterate(n,r,i);var u=!0,a=0,s=0;return e.__iterate(function(e,r,i){if(!u||!(u=s++<t)){if(n(e,r,i)===!1)return!1;a++}},r,i),a},n.length=this.length&&Math.max(0,this.length-t),n},skipLast:function(t,e){return this.reverse(e).skip(t).reverse(e)},skipWhile:function(t,e){var n=this,r=n.__makeSequence();return r.__iterateUncached=function(r,i,u){if(i)return this.cacheResult().__iterate(r,i,u);var a=!0,s=0;return n.__iterate(function(n,i,u){if(!a||!(a=t.call(e,n,i,u))){if(r(n,i,u)===!1)return!1;s++}},i,u),s},r},skipUntil:function(t,e,n){return this.skipWhile(q(t),e,n)},groupBy:function(t){var e=this,n=Sn.empty().withMutations(function(n){e.forEach(function(e,r,i){var u=t(e,r,i),a=n.get(u,ke);a===ke&&(a=[],n.set(u,a)),a.push([r,e])})});return n.map(function(t){return Je(t).fromEntrySeq()})},sort:function(t,e){return this.sortBy(S,t,e)},sortBy:function(t,e){e=e||x;var n=this;return Je(this.entrySeq().entrySeq().toArray().sort(function(r,i){return e(t(r[1][1],r[1][0],n),t(i[1][1],i[1][0],n))||r[0]-i[0]})).fromEntrySeq().valueSeq().fromEntrySeq()},cacheResult:function(){return!this._cache&&this.__iterateUncached&&(E(this.length),this._cache=this.entrySeq().toArray(),null==this.length&&(this.length=this._cache.length)),this},__iterate:function(t,e,n){if(!this._cache)return this.__iterateUncached(t,e,n);var r=this.length-1,i=this._cache,u=this;if(e)for(var a=i.length-1;a>=0;a--){var s=i[a];if(t(s[1],n?s[0]:r-s[0],u)===!1)break}else i.every(n?function(e){return t(e[1],r-e[0],u)!==!1}:function(e){return t(e[1],e[0],u)!==!1});return this.length},__makeSequence:function(){return v()}},{from:function(t){if(t instanceof Je)return t;if(!Array.isArray(t)){if(t&&t.constructor===Object)return new Ke(t);t=[t]}return new Ne(t)}});var ze=We.prototype;
<ide> ze.toJSON=ze.toJS,ze.__toJS=ze.toObject,ze.inspect=ze.toSource=function(){return""+this};var Be=function(){ye.defaultSuperCall(this,Le.prototype,arguments)},Le=Be;ye.createClass(Be,{toString:function(){return this.__toString("Seq [","]")},toArray:function(){E(this.length);var t=Array(this.length||0);return t.length=this.forEach(function(e,n){t[n]=e}),t},fromEntrySeq:function(){var t=this,e=v();return e.length=t.length,e.entrySeq=function(){return t},e.__iterateUncached=function(e,n,r){return t.__iterate(function(t,n,r){return e(t[1],t[0],r)},n,r)},e},join:function(t){t=void 0!==t?""+t:",";var e="",n=0;return this.forEach(function(r,i){var u=i-n;n=i,e+=(1===u?t:A(t,u))+(null!=r?r:"")}),this.length&&this.length-1>n&&(e+=A(t,this.length-1-n)),e},concat:function(){for(var t=[],e=0;arguments.length>e;e++)t[e]=arguments[e];var n=[this].concat(t).map(function(t){return We(t)}),r=this.__makeSequence();return r.length=n.reduce(function(t,e){return null!=t&&null!=e.length?t+e.length:void 0},0),r.__iterateUncached=function(t,e,r){if(r&&!this.length)return this.cacheResult().__iterate(t,e,r);for(var i,u=0,a=r&&this.length-1,s=n.length-1,h=0;s>=h&&!i;h++){var o=n[e?s-h:h];o instanceof Le||(o=o.valueSeq()),u+=o.__iterate(function(e,n,s){return n+=u,t(e,r?a-n:n,s)===!1?(i=!0,!1):void 0},e)}return u},r},reverse:function(t){var e=this,n=e.__makeSequence();return n.length=e.length,n.__reversedIndices=!!(t^e.__reversedIndices),n.__iterateUncached=function(n,r,i){return e.__iterate(n,!r,i^t)},n.reverse=function(n){return t===n?e:Ve.reverse.call(this,n)},n},valueSeq:function(){var t=ye.superCall(this,Le.prototype,"valueSeq",[]);return t.length=void 0,t},filter:function(t,e,n){var r=D(this,t,e,n,n);return n&&(r.length=this.length),r},get:function(t,e){return t=C(this,t),this.find(function(e,n){return n===t},null,e)},indexOf:function(t){return this.findIndex(function(e){return W(e,t)})},lastIndexOf:function(t){return this.reverse(!0).indexOf(t)},findIndex:function(t,e){var n=this.findKey(t,e);return null==n?-1:n},findLastIndex:function(t,e){return this.reverse(!0).findIndex(t,e)
<ide><path>src/Sequence.js
<ide> class Sequence {
<ide> reduce(reducer, initialReduction, thisArg) {
<ide> var reduction;
<ide> var useFirst;
<del> // Note: for some reason, the explicitly provided value `undefined` is to be
<del> // used as the initialReduction, while not providing a value, or providing
<del> // it as `null` implies there is no initialReduction. This seems universal
<del> // behavior by all modern browsers, although the ES5 spec is unclear:
<del> // http://es5.github.io/#x15.4.4.21
<del> if (arguments.length < 2 || initialReduction === null) {
<add> if (arguments.length < 2) {
<ide> useFirst = true;
<ide> } else {
<ide> reduction = initialReduction; | 3 |
Text | Text | add api change to docs | bfe711bed342202ed0918a309b2000bb158c4eb7 | <ide><path>docs/reference/api/docker_remote_api.md
<ide> This section lists each version from latest to oldest. Each listing includes a
<ide> [Docker Remote API v1.22](docker_remote_api_v1.22.md) documentation
<ide>
<ide> * `GET /containers/json` supports filter `isolation` on Windows.
<add>* `GET /networks/(name)` now returns a `Name` field for each container attached to the network.
<ide>
<ide> ### v1.21 API changes
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.22.md
<ide> Content-Type: application/json
<ide> },
<ide> "Containers": {
<ide> "39b69226f9d79f5634485fb236a23b2fe4e96a0a94128390a7fbbcc167065867": {
<add> "Name": "mad_mclean",
<ide> "EndpointID": "ed2419a97c1d9954d05b46e462e7002ea552f216e9b136b80a7db8d98b442eda",
<ide> "MacAddress": "02:42:ac:11:00:02",
<ide> "IPv4Address": "172.17.0.2/16",
<ide><path>docs/reference/commandline/network_inspect.md
<ide> The `network inspect` command shows the containers, by id, in its results.
<ide> ```bash
<ide> $ sudo docker network inspect bridge
<ide> [
<del> {
<del> "name": "bridge",
<del> "id": "7fca4eb8c647e57e9d46c32714271e0c3f8bf8d17d346629e2820547b2d90039",
<del> "driver": "bridge",
<del> "containers": {
<del> "bda12f8922785d1f160be70736f26c1e331ab8aaf8ed8d56728508f2e2fd4727": {
<del> "endpoint": "e0ac95934f803d7e36384a2029b8d1eeb56cb88727aa2e8b7edfeebaa6dfd758",
<del> "mac_address": "02:42:ac:11:00:03",
<del> "ipv4_address": "172.17.0.3/16",
<del> "ipv6_address": ""
<del> },
<del> "f2870c98fd504370fb86e59f32cd0753b1ac9b69b7d80566ffc7192a82b3ed27": {
<del> "endpoint": "31de280881d2a774345bbfb1594159ade4ae4024ebfb1320cb74a30225f6a8ae",
<del> "mac_address": "02:42:ac:11:00:02",
<del> "ipv4_address": "172.17.0.2/16",
<del> "ipv6_address": ""
<del> }
<del> }
<del> }
<add> {
<add> "Name": "bridge",
<add> "Id": "b2b1a2cba717161d984383fd68218cf70bbbd17d328496885f7c921333228b0f",
<add> "Scope": "local",
<add> "Driver": "bridge",
<add> "IPAM": {
<add> "Driver": "default",
<add> "Config": [
<add> {
<add> "Subnet": "172.17.42.1/16",
<add> "Gateway": "172.17.42.1"
<add> }
<add> ]
<add> },
<add> "Containers": {
<add> "bda12f8922785d1f160be70736f26c1e331ab8aaf8ed8d56728508f2e2fd4727": {
<add> "Name": "container2",
<add> "EndpointID": "0aebb8fcd2b282abe1365979536f21ee4ceaf3ed56177c628eae9f706e00e019",
<add> "MacAddress": "02:42:ac:11:00:02",
<add> "IPv4Address": "172.17.0.2/16",
<add> "IPv6Address": ""
<add> },
<add> "f2870c98fd504370fb86e59f32cd0753b1ac9b69b7d80566ffc7192a82b3ed27": {
<add> "Name": "container1",
<add> "EndpointID": "a00676d9c91a96bbe5bcfb34f705387a33d7cc365bac1a29e4e9728df92d10ad",
<add> "MacAddress": "02:42:ac:11:00:01",
<add> "IPv4Address": "172.17.0.1/16",
<add> "IPv6Address": ""
<add> }
<add> },
<add> "Options": {
<add> "com.docker.network.bridge.default_bridge": "true",
<add> "com.docker.network.bridge.enable_icc": "true",
<add> "com.docker.network.bridge.enable_ip_masquerade": "true",
<add> "com.docker.network.bridge.host_binding_ipv4": "0.0.0.0",
<add> "com.docker.network.bridge.name": "docker0",
<add> "com.docker.network.driver.mtu": "1500"
<add> }
<add> }
<ide> ]
<ide> ```
<ide>
<ide><path>man/docker-network-inspect.1.md
<ide> The `network inspect` command shows the containers, by id, in its results.
<ide> ```bash
<ide> $ sudo docker network inspect bridge
<ide> [
<del> {
<del> "name": "bridge",
<del> "id": "7fca4eb8c647e57e9d46c32714271e0c3f8bf8d17d346629e2820547b2d90039",
<del> "driver": "bridge",
<del> "containers": {
<del> "bda12f8922785d1f160be70736f26c1e331ab8aaf8ed8d56728508f2e2fd4727": {
<del> "endpoint": "e0ac95934f803d7e36384a2029b8d1eeb56cb88727aa2e8b7edfeebaa6dfd758",
<del> "mac_address": "02:42:ac:11:00:03",
<del> "ipv4_address": "172.17.0.3/16",
<del> "ipv6_address": ""
<del> },
<del> "f2870c98fd504370fb86e59f32cd0753b1ac9b69b7d80566ffc7192a82b3ed27": {
<del> "endpoint": "31de280881d2a774345bbfb1594159ade4ae4024ebfb1320cb74a30225f6a8ae",
<del> "mac_address": "02:42:ac:11:00:02",
<del> "ipv4_address": "172.17.0.2/16",
<del> "ipv6_address": ""
<del> }
<del> }
<del> }
<add> {
<add> "Name": "bridge",
<add> "Id": "b2b1a2cba717161d984383fd68218cf70bbbd17d328496885f7c921333228b0f",
<add> "Scope": "local",
<add> "Driver": "bridge",
<add> "IPAM": {
<add> "Driver": "default",
<add> "Config": [
<add> {
<add> "Subnet": "172.17.42.1/16",
<add> "Gateway": "172.17.42.1"
<add> }
<add> ]
<add> },
<add> "Containers": {
<add> "bda12f8922785d1f160be70736f26c1e331ab8aaf8ed8d56728508f2e2fd4727": {
<add> "Name": "container2",
<add> "EndpointID": "0aebb8fcd2b282abe1365979536f21ee4ceaf3ed56177c628eae9f706e00e019",
<add> "MacAddress": "02:42:ac:11:00:02",
<add> "IPv4Address": "172.17.0.2/16",
<add> "IPv6Address": ""
<add> },
<add> "f2870c98fd504370fb86e59f32cd0753b1ac9b69b7d80566ffc7192a82b3ed27": {
<add> "Name": "container1",
<add> "EndpointID": "a00676d9c91a96bbe5bcfb34f705387a33d7cc365bac1a29e4e9728df92d10ad",
<add> "MacAddress": "02:42:ac:11:00:01",
<add> "IPv4Address": "172.17.0.1/16",
<add> "IPv6Address": ""
<add> }
<add> },
<add> "Options": {
<add> "com.docker.network.bridge.default_bridge": "true",
<add> "com.docker.network.bridge.enable_icc": "true",
<add> "com.docker.network.bridge.enable_ip_masquerade": "true",
<add> "com.docker.network.bridge.host_binding_ipv4": "0.0.0.0",
<add> "com.docker.network.bridge.name": "docker0",
<add> "com.docker.network.driver.mtu": "1500"
<add> }
<add> }
<ide> ]
<ide> ```
<ide>
<del>
<ide> # OPTIONS
<ide>
<ide> **--help** | 4 |
Mixed | Text | add allowany permission | d995742afc09ff8d387751a6fe47b9686845740b | <ide><path>docs/api-guide/permissions.md
<ide> The default permission policy may be set globally, using the `DEFAULT_PERMISSION
<ide> )
<ide> }
<ide>
<add>If not specified, this setting defaults to allowing unrestricted access:
<add>
<add> 'DEFAULT_PERMISSION_CLASSES': (
<add> 'rest_framework.permissions.AllowAny',
<add> )
<add>
<ide> You can also set the authentication policy on a per-view basis, using the `APIView` class based views.
<ide>
<ide> class ExampleView(APIView):
<ide> Or, if you're using the `@api_view` decorator with function based views.
<ide>
<ide> # API Reference
<ide>
<add>## AllowAny
<add>
<add>The `AllowAny` permission class will allow unrestricted access, **regardless of if the request was authenticated or unauthenticated**.
<add>
<add>This permission is not strictly required, since you can achieve the same result by using an empty list or tuple for the permissions setting, but you may find it useful to specify this class because it makes the intention explicit.
<add>
<ide> ## IsAuthenticated
<ide>
<ide> The `IsAuthenticated` permission class will deny permission to any unauthenticated user, and allow permission otherwise.
<ide><path>docs/api-guide/settings.md
<ide> Default:
<ide>
<ide> A list or tuple of permission classes, that determines the default set of permissions checked at the start of a view.
<ide>
<del>Default: `()`
<add>Default:
<add>
<add> (
<add> 'rest_framework.permissions.AllowAny',
<add> )
<ide>
<ide> ## DEFAULT_THROTTLE_CLASSES
<ide>
<ide><path>rest_framework/settings.py
<ide> 'rest_framework.authentication.SessionAuthentication',
<ide> 'rest_framework.authentication.BasicAuthentication'
<ide> ),
<del> 'DEFAULT_PERMISSION_CLASSES': (),
<del> 'DEFAULT_THROTTLE_CLASSES': (),
<add> 'DEFAULT_PERMISSION_CLASSES': (
<add> 'rest_framework.permissions.AllowAny',
<add> ),
<add> 'DEFAULT_THROTTLE_CLASSES': (
<add> ),
<add>
<ide> 'DEFAULT_CONTENT_NEGOTIATION_CLASS':
<ide> 'rest_framework.negotiation.DefaultContentNegotiation',
<del>
<ide> 'DEFAULT_MODEL_SERIALIZER_CLASS':
<ide> 'rest_framework.serializers.ModelSerializer',
<ide> 'DEFAULT_PAGINATION_SERIALIZER_CLASS': | 3 |
Go | Go | enable fluentd logdriver | cff9679c6cdd0b7f3dc691ed00f294ac93274c1d | <ide><path>daemon/logdrivers_windows.go
<ide> import (
<ide> // therefore they register themselves to the logdriver factory.
<ide> _ "github.com/docker/docker/daemon/logger/awslogs"
<ide> _ "github.com/docker/docker/daemon/logger/etwlogs"
<add> _ "github.com/docker/docker/daemon/logger/fluentd"
<ide> _ "github.com/docker/docker/daemon/logger/jsonfilelog"
<ide> _ "github.com/docker/docker/daemon/logger/logentries"
<ide> _ "github.com/docker/docker/daemon/logger/splunk" | 1 |
Python | Python | create copy of inputs if list | c2321e61e1732f7a27841eca36cdaf34ed5d26dd | <ide><path>keras/engine/topology.py
<ide> def __call__(self, inputs, **kwargs):
<ide> ValueError: in case the layer is missing shape information
<ide> for its `build` call.
<ide> """
<add> if type(inputs) is list:
<add> inputs = inputs[:]
<ide> with K.name_scope(self.name):
<ide> # Handle laying building (weight creating, input spec locking).
<ide> if not self.built: | 1 |
Ruby | Ruby | fix github version parser with multidigit number | 04855ddd484389bc8292a4d82106185466a3eb89 | <ide><path>Library/Homebrew/extend/pathname.rb
<ide> def version
<ide>
<ide> # github tarballs are special
<ide> # we only support numbered tagged downloads
<del> %r[github.com/.*/tarball/v?((\d\.)+\d)$].match to_s
<add> %r[github.com/.*/tarball/v?((\d\.)+\d+)$].match to_s
<ide> return $1 if $1
<ide>
<ide> # eg. boost_1_39_0
<ide><path>Library/Homebrew/test/test_versions.rb
<ide> def test_version_github
<ide> assert_equal '1.0.5', r.version
<ide> end
<ide>
<add> def test_version_github_with_high_patch_number
<add> r=MockFormula.new "http://github.com/lloyd/yajl/tarball/v1.2.34"
<add> assert_equal '1.2.34', r.version
<add> end
<add>
<ide> def test_yet_another_version
<ide> r=MockFormula.new "http://example.com/mad-0.15.1b.tar.gz"
<ide> assert_equal '0.15.1b', r.version | 2 |
Javascript | Javascript | fix a typo and remove unwanted code | e3d68ff401fc21f99757bde29355c577dab2847d | <ide><path>server/index.js
<ide> import http, { STATUS_CODES } from 'http'
<ide> import {
<ide> renderToHTML,
<ide> renderErrorToHTML,
<del> renderJSON,
<del> renderErrorJSON,
<ide> sendHTML,
<ide> serveStatic,
<ide> renderScript,
<ide> export default class Server {
<ide> const error = new Error('INVALID_BUILD_ID')
<ide> const customFields = { buildIdMismatched: true }
<ide>
<del> await renderScriptError(req, res, page, error, customFields, this.renderOpts)
<del> return
<add> return await renderScriptError(req, res, page, error, customFields, this.renderOpts)
<ide> }
<ide>
<ide> if (this.dev) {
<add> try {
<add> await this.hotReloader.ensurePage(page)
<add> } catch (error) {
<add> return await renderScriptError(req, res, page, error, {}, this.renderOpts)
<add> }
<add>
<ide> const compilationErr = this.getCompilationError(page)
<ide> if (compilationErr) {
<ide> const customFields = { buildError: true }
<del> await renderScriptError(req, res, page, compilationErr, customFields, this.renderOpts)
<del> return
<add> return await renderScriptError(req, res, page, compilationErr, customFields, this.renderOpts)
<ide> }
<ide> }
<ide>
<ide> export default class Server {
<ide> return this.renderError(null, req, res, pathname, query)
<ide> }
<ide>
<del> async renderJSON (req, res, page) {
<del> if (this.dev) {
<del> const compilationErr = this.getCompilationError(page)
<del> if (compilationErr) {
<del> return this.renderErrorJSON(compilationErr, req, res)
<del> }
<del> }
<del>
<del> try {
<del> return await renderJSON(req, res, page, this.renderOpts)
<del> } catch (err) {
<del> if (err.code === 'ENOENT') {
<del> res.statusCode = 404
<del> return this.renderErrorJSON(null, req, res)
<del> } else {
<del> if (!this.quiet) console.error(err)
<del> res.statusCode = 500
<del> return this.renderErrorJSON(err, req, res)
<del> }
<del> }
<del> }
<del>
<del> async renderErrorJSON (err, req, res) {
<del> if (this.dev) {
<del> const compilationErr = this.getCompilationError('/_error')
<del> if (compilationErr) {
<del> res.statusCode = 500
<del> return renderErrorJSON(compilationErr, req, res, this.renderOpts)
<del> }
<del> }
<del>
<del> return renderErrorJSON(err, req, res, this.renderOpts)
<del> }
<del>
<ide> async serveStatic (req, res, path) {
<ide> try {
<ide> return await serveStatic(req, res, path)
<ide> export default class Server {
<ide> }
<ide> }
<ide>
<del> serveScript (req, res, path) {
<del> return serveStatic(req, res, path)
<del> }
<del>
<ide> readBuildId () {
<ide> const buildIdPath = join(this.dir, '.next', 'BUILD_ID')
<ide> const buildId = fs.readFileSync(buildIdPath, 'utf8')
<ide><path>server/render.js
<ide> async function doRender (req, res, pathname, query, {
<ide> return '<!DOCTYPE html>' + renderToStaticMarkup(doc)
<ide> }
<ide>
<del>export async function renderJSON (req, res, page, { dir = process.cwd(), hotReloader } = {}) {
<del> await ensurePage(page, { dir, hotReloader })
<del> const pagePath = await resolvePath(join(dir, '.next', 'bundles', 'pages', page))
<del> return serveStatic(req, res, pagePath)
<del>}
<del>
<ide> export async function renderScript (req, res, page, opts) {
<ide> try {
<del> if (opts.dev) {
<del> await opts.hotReloader.ensurePage(page)
<del> }
<del>
<ide> const path = join(opts.dir, '.next', 'client-bundles', 'pages', page)
<ide> const realPath = await resolvePath(path)
<ide> await serveStatic(req, res, realPath)
<ide> } catch (err) {
<ide> if (err.code === 'ENOENT') {
<del> res.setHeader('Content-Type', 'text/javascript')
<del> res.end(`
<del> var error = new Error('Page not exists: ${page}')
<del> error.pageNotFound = true
<del> error.statusCode = 404
<del> NEXT_PAGE_LOADER.registerPage('${page}', error)
<del> `)
<add> renderScriptError(req, res, page, err, {}, opts)
<ide> return
<ide> }
<ide>
<ide> export async function renderScript (req, res, page, opts) {
<ide> }
<ide>
<ide> export async function renderScriptError (req, res, page, error, customFields, opts) {
<add> if (error.code === 'ENOENT') {
<add> res.setHeader('Content-Type', 'text/javascript')
<add> res.end(`
<add> var error = new Error('Page not exists: ${page}')
<add> error.pageNotFound = true
<add> error.statusCode = 404
<add> NEXT_PAGE_LOADER.registerPage('${page}', error)
<add> `)
<add> return
<add> }
<add>
<ide> res.setHeader('Content-Type', 'text/javascript')
<ide> const errorJson = {
<ide> ...errorToJSON(error),
<ide> export async function renderScriptError (req, res, page, error, customFields, op
<ide> `)
<ide> }
<ide>
<del>export async function renderErrorJSON (err, req, res, { dir = process.cwd(), dev = false } = {}) {
<del> const component = await readPage(join(dir, '.next', 'bundles', 'pages', '_error'))
<del>
<del> sendJSON(res, {
<del> component,
<del> err: err && dev ? errorToJSON(err) : null
<del> }, req.method)
<del>}
<del>
<ide> export function sendHTML (res, html, method) {
<ide> if (res.finished) return
<ide> | 2 |
Text | Text | add licensing information to readme | a4a023580bc89fb34e5f80ca472fc70267546eaf | <ide><path>README.md
<ide> It is your responsibility to ensure that your use and/or transfer does not
<ide> violate applicable laws.
<ide>
<ide> For more information, please see http://www.bis.doc.gov
<add>
<add>
<add>Licensing
<add>=========
<add>Docker is licensed under the Apache License, Version 2.0. See LICENSE for full license text.
<add> | 1 |
PHP | PHP | add forge to links | c309b4fda02a3bc327bb06dd8ee1ae94207330e2 | <ide><path>resources/views/welcome.blade.php
<ide> <div class="links">
<ide> <a href="https://laravel.com/docs">Documentation</a>
<ide> <a href="https://laracasts.com">Laracasts</a>
<add> <a href="https://forge.laravel.com">Forge</a>
<ide> <a href="https://github.com/laravel/laravel">GitHub</a>
<ide> <a href="https://twitter.com/laravelphp">Twitter</a>
<ide> </div> | 1 |
PHP | PHP | fix more usage of config() | db7943d14ed05f060f35b67a86915ae24cc49031 | <ide><path>tests/TestCase/Http/ClientTest.php
<ide> public function testConstructConfig()
<ide> 'host' => 'example.org',
<ide> ];
<ide> $http = new Client($config);
<del> $result = $http->config();
<add> $result = $http->getConfig();
<ide> foreach ($config as $key => $val) {
<ide> $this->assertEquals($val, $result[$key]);
<ide> }
<ide>
<del> $result = $http->config([
<add> $result = $http->setConfig([
<ide> 'auth' => ['username' => 'mark', 'password' => 'secret']
<ide> ]);
<ide> $this->assertSame($result, $http);
<ide>
<del> $result = $http->config();
<add> $result = $http->getConfig();
<ide> $expected = [
<ide> 'scheme' => 'http',
<ide> 'host' => 'example.org',
<ide><path>tests/TestCase/Mailer/EmailTest.php
<ide> public function testConfig()
<ide> 'to' => '[email protected]',
<ide> 'from' => '[email protected]',
<ide> ];
<del> Email::config('test', $settings);
<del> $this->assertEquals($settings, Email::config('test'), 'Should be the same.');
<add> Email::setConfig('test', $settings);
<add> $this->assertEquals($settings, Email::getConfig('test'), 'Should be the same.');
<ide>
<ide> $email = new Email('test');
<ide> $this->assertContains($settings['to'], $email->getTo());
<ide> public function testConfigErrorOnDuplicate()
<ide> 'to' => '[email protected]',
<ide> 'from' => '[email protected]',
<ide> ];
<del> Email::config('test', $settings);
<del> Email::config('test', $settings);
<add> Email::setConfig('test', $settings);
<add> Email::setConfig('test', $settings);
<ide> }
<ide>
<ide> /**
<ide> public function testDefaultProfile()
<ide> {
<ide> $config = ['test' => 'ok', 'test2' => true];
<ide> Configure::write('Email.default', $config);
<del> Email::config(Configure::consume('Email'));
<add> Email::setConfig(Configure::consume('Email'));
<ide> $Email = new Email();
<ide> $this->assertSame($Email->getProfile(), $config);
<ide> Configure::delete('Email');
<ide> public function testUseConfigString()
<ide> 'theme' => 'TestTheme',
<ide> 'helpers' => ['Html', 'Form'],
<ide> ];
<del> Email::config('test', $config);
<add> Email::setConfig('test', $config);
<ide> $this->Email->setProfile('test');
<ide>
<ide> $result = $this->Email->getTo();
<ide> public function testSendWithLog()
<ide> )
<ide> );
<ide>
<del> Log::config('email', $log);
<add> Log::setConfig('email', $log);
<ide>
<ide> $this->Email->setTransport('debug');
<ide> $this->Email->setTo('[email protected]');
<ide> public function testSendWithLogAndScope()
<ide> )
<ide> );
<ide>
<del> Log::config('email', $log);
<add> Log::setConfig('email', $log);
<ide>
<ide> $this->Email->setTransport('debug');
<ide> $this->Email->setTo('[email protected]');
<ide> public function testSendRenderNoLayout()
<ide> $this->Email->setFrom('[email protected]');
<ide> $this->Email->setTo(['[email protected]' => 'You']);
<ide> $this->Email->setSubject('My title');
<del> $this->Email->config(['empty']);
<add> $this->Email->setConfig(['empty']);
<ide> $this->Email->setTemplate('default');
<ide> $this->Email->setLayout(null);
<ide> $result = $this->Email->send('message body.');
<ide> public function testConstructWithConfigString()
<ide> 'subject' => 'Test mail subject',
<ide> 'transport' => 'debug',
<ide> ];
<del> Email::config('test', $configs);
<add> Email::setConfig('test', $configs);
<ide>
<ide> $this->Email = new Email('test');
<ide>
<ide> public function testMockTransport()
<ide> $mock = $this->getMockBuilder('\Cake\Mailer\AbstractTransport')->getMock();
<ide> $config = ['from' => '[email protected]', 'transport' => 'default'];
<ide>
<del> Email::config('default', $config);
<add> Email::setConfig('default', $config);
<ide> Email::setConfigTransport('default', $mock);
<ide>
<ide> $em = new Email('default');
<ide><path>tests/TestCase/Mailer/Transport/MailTransportTest.php
<ide> public function setUp()
<ide> $this->MailTransport = $this->getMockBuilder('Cake\Mailer\Transport\MailTransport')
<ide> ->setMethods(['_mail'])
<ide> ->getMock();
<del> $this->MailTransport->config(['additionalParameters' => '-f']);
<add> $this->MailTransport->setConfig(['additionalParameters' => '-f']);
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Mailer/Transport/SmtpTransportTest.php
<ide> public function setUp()
<ide>
<ide> $this->SmtpTransport = new SmtpTestTransport();
<ide> $this->SmtpTransport->setSocket($this->socket);
<del> $this->SmtpTransport->config(['client' => 'localhost']);
<add> $this->SmtpTransport->setConfig(['client' => 'localhost']);
<ide> }
<ide>
<ide> /**
<ide> public function testConnectEhlo()
<ide> */
<ide> public function testConnectEhloTls()
<ide> {
<del> $this->SmtpTransport->config(['tls' => true]);
<add> $this->SmtpTransport->setConfig(['tls' => true]);
<ide> $this->socket->expects($this->any())->method('connect')->will($this->returnValue(true));
<ide> $this->socket->expects($this->at(1))->method('read')->will($this->returnValue("220 Welcome message\r\n"));
<ide> $this->socket->expects($this->at(2))->method('write')->with("EHLO localhost\r\n");
<ide> public function testConnectEhloTls()
<ide> */
<ide> public function testConnectEhloTlsOnNonTlsServer()
<ide> {
<del> $this->SmtpTransport->config(['tls' => true]);
<add> $this->SmtpTransport->setConfig(['tls' => true]);
<ide> $this->socket->expects($this->any())->method('connect')->will($this->returnValue(true));
<ide> $this->socket->expects($this->at(1))->method('read')->will($this->returnValue("220 Welcome message\r\n"));
<ide> $this->socket->expects($this->at(2))->method('write')->with("EHLO localhost\r\n");
<ide> public function testConnectEhloTlsOnNonTlsServer()
<ide> */
<ide> public function testConnectEhloNoTlsOnRequiredTlsServer()
<ide> {
<del> $this->SmtpTransport->config(['tls' => false, 'username' => 'user', 'password' => 'pass']);
<add> $this->SmtpTransport->setConfig(['tls' => false, 'username' => 'user', 'password' => 'pass']);
<ide> $this->socket->expects($this->any())->method('connect')->will($this->returnValue(true));
<ide> $this->socket->expects($this->at(1))->method('read')->will($this->returnValue("220 Welcome message\r\n"));
<ide> $this->socket->expects($this->at(2))->method('write')->with("EHLO localhost\r\n");
<ide> public function testAuth()
<ide> $this->socket->expects($this->at(3))->method('read')->will($this->returnValue("334 Pass\r\n"));
<ide> $this->socket->expects($this->at(4))->method('write')->with("c3Rvcnk=\r\n");
<ide> $this->socket->expects($this->at(5))->method('read')->will($this->returnValue("235 OK\r\n"));
<del> $this->SmtpTransport->config(['username' => 'mark', 'password' => 'story']);
<add> $this->SmtpTransport->setConfig(['username' => 'mark', 'password' => 'story']);
<ide> $this->SmtpTransport->auth();
<ide> }
<ide>
<ide> public function testAuthNotRecognized()
<ide> $this->socket->expects($this->at(0))->method('write')->with("AUTH LOGIN\r\n");
<ide> $this->socket->expects($this->at(1))->method('read')
<ide> ->will($this->returnValue("500 5.3.3 Unrecognized command\r\n"));
<del> $this->SmtpTransport->config(['username' => 'mark', 'password' => 'story']);
<add> $this->SmtpTransport->setConfig(['username' => 'mark', 'password' => 'story']);
<ide> $this->SmtpTransport->auth();
<ide> }
<ide>
<ide> public function testAuthNotImplemented()
<ide> $this->socket->expects($this->at(0))->method('write')->with("AUTH LOGIN\r\n");
<ide> $this->socket->expects($this->at(1))->method('read')
<ide> ->will($this->returnValue("502 5.3.3 Command not implemented\r\n"));
<del> $this->SmtpTransport->config(['username' => 'mark', 'password' => 'story']);
<add> $this->SmtpTransport->setConfig(['username' => 'mark', 'password' => 'story']);
<ide> $this->SmtpTransport->auth();
<ide> }
<ide>
<ide> public function testAuthBadSequence()
<ide> $this->socket->expects($this->at(0))->method('write')->with("AUTH LOGIN\r\n");
<ide> $this->socket->expects($this->at(1))
<ide> ->method('read')->will($this->returnValue("503 5.5.1 Already authenticated\r\n"));
<del> $this->SmtpTransport->config(['username' => 'mark', 'password' => 'story']);
<add> $this->SmtpTransport->setConfig(['username' => 'mark', 'password' => 'story']);
<ide> $this->SmtpTransport->auth();
<ide> }
<ide>
<ide> public function testAuthBadUsername()
<ide> $this->socket->expects($this->at(2))->method('write')->with("bWFyaw==\r\n");
<ide> $this->socket->expects($this->at(3))->method('read')
<ide> ->will($this->returnValue("535 5.7.8 Authentication failed\r\n"));
<del> $this->SmtpTransport->config(['username' => 'mark', 'password' => 'story']);
<add> $this->SmtpTransport->setConfig(['username' => 'mark', 'password' => 'story']);
<ide>
<ide> $e = null;
<ide> try {
<ide> public function testAuthBadPassword()
<ide> $this->socket->expects($this->at(3))->method('read')->will($this->returnValue("334 Pass\r\n"));
<ide> $this->socket->expects($this->at(4))->method('write')->with("c3Rvcnk=\r\n");
<ide> $this->socket->expects($this->at(5))->method('read')->will($this->returnValue("535 5.7.8 Authentication failed\r\n"));
<del> $this->SmtpTransport->config(['username' => 'mark', 'password' => 'story']);
<add> $this->SmtpTransport->setConfig(['username' => 'mark', 'password' => 'story']);
<ide>
<ide> $e = null;
<ide> try {
<ide> public function testQuit()
<ide> */
<ide> public function testEmptyConfigArray()
<ide> {
<del> $this->SmtpTransport->config([
<add> $this->SmtpTransport->setConfig([
<ide> 'client' => 'myhost.com',
<ide> 'port' => 666
<ide> ]);
<del> $expected = $this->SmtpTransport->config();
<add> $expected = $this->SmtpTransport->getConfig();
<ide>
<ide> $this->assertEquals(666, $expected['port']);
<ide>
<del> $this->SmtpTransport->config([]);
<del> $result = $this->SmtpTransport->config();
<add> $this->SmtpTransport->setConfig([]);
<add> $result = $this->SmtpTransport->getConfig();
<ide> $this->assertEquals($expected, $result);
<ide> }
<ide>
<ide> public function testExplicitDisconnectNotConnected()
<ide> */
<ide> public function testKeepAlive()
<ide> {
<del> $this->SmtpTransport->config(['keepAlive' => true]);
<add> $this->SmtpTransport->setConfig(['keepAlive' => true]);
<ide>
<ide> $email = $this->getMockBuilder('Cake\Mailer\Email')
<ide> ->setMethods(['message'])
<ide><path>tests/TestCase/Network/Session/CacheSessionTest.php
<ide> class CacheSessionTest extends TestCase
<ide> public function setUp()
<ide> {
<ide> parent::setUp();
<del> Cache::config(['session_test' => ['engine' => 'File']]);
<add> Cache::setConfig(['session_test' => ['engine' => 'File']]);
<ide> $this->storage = new CacheSession(['config' => 'session_test']);
<ide> }
<ide>
<ide><path>tests/TestCase/Network/SocketTest.php
<ide> public function tearDown()
<ide> public function testConstruct()
<ide> {
<ide> $this->Socket = new Socket();
<del> $config = $this->Socket->config();
<add> $config = $this->Socket->getConfig();
<ide> $this->assertSame($config, [
<ide> 'persistent' => false,
<ide> 'host' => 'localhost',
<ide> public function testConstruct()
<ide> $this->Socket->reset();
<ide> $this->Socket->__construct(['host' => 'foo-bar']);
<ide> $config['host'] = 'foo-bar';
<del> $this->assertSame($this->Socket->config(), $config);
<add> $this->assertSame($this->Socket->getConfig(), $config);
<ide>
<ide> $this->Socket = new Socket(['host' => 'www.cakephp.org', 'port' => 23, 'protocol' => 'udp']);
<del> $config = $this->Socket->config();
<add> $config = $this->Socket->getConfig();
<ide>
<ide> $config['host'] = 'www.cakephp.org';
<ide> $config['port'] = 23;
<ide> $config['protocol'] = 'udp';
<ide>
<del> $this->assertSame($this->Socket->config(), $config);
<add> $this->assertSame($this->Socket->getConfig(), $config);
<ide> }
<ide>
<ide> /**
<ide> public static function invalidConnections()
<ide> */
<ide> public function testInvalidConnection($data)
<ide> {
<del> $this->Socket->config($data);
<add> $this->Socket->setConfig($data);
<ide> $this->Socket->connect();
<ide> }
<ide>
<ide> public function testReset()
<ide> ];
<ide> $this->assertEquals(
<ide> $expected,
<del> $anotherSocket->config(),
<add> $anotherSocket->getConfig(),
<ide> 'Reset should cause config to return the defaults defined in _defaultConfig'
<ide> );
<ide> }
<ide> public function testConnectProtocolInHost()
<ide> $socket = new Socket($configSslTls);
<ide> try {
<ide> $socket->connect();
<del> $this->assertEquals('smtp.gmail.com', $socket->config('host'));
<del> $this->assertEquals('ssl', $socket->config('protocol'));
<add> $this->assertEquals('smtp.gmail.com', $socket->getConfig('host'));
<add> $this->assertEquals('ssl', $socket->getConfig('protocol'));
<ide> } catch (SocketException $e) {
<ide> $this->markTestSkipped('Cannot test network, skipping.');
<ide> }
<ide> public function testConfigContext()
<ide> $this->assertFalse($result['ssl']['allow_self_signed']);
<ide> $this->assertEquals(5, $result['ssl']['verify_depth']);
<ide> $this->assertEquals('smtp.gmail.com', $result['ssl']['CN_match']);
<del> $this->assertArrayNotHasKey('ssl_verify_peer', $socket->config());
<del> $this->assertArrayNotHasKey('ssl_allow_self_signed', $socket->config());
<del> $this->assertArrayNotHasKey('ssl_verify_host', $socket->config());
<del> $this->assertArrayNotHasKey('ssl_verify_depth', $socket->config());
<add> $this->assertArrayNotHasKey('ssl_verify_peer', $socket->getConfig());
<add> $this->assertArrayNotHasKey('ssl_allow_self_signed', $socket->getConfig());
<add> $this->assertArrayNotHasKey('ssl_verify_host', $socket->getConfig());
<add> $this->assertArrayNotHasKey('ssl_verify_depth', $socket->getConfig());
<ide> }
<ide> }
<ide><path>tests/TestCase/Routing/DispatcherFactoryTest.php
<ide> public function testAddFilterWithOptions()
<ide> $config = ['config' => 'value', 'priority' => 999];
<ide> $result = DispatcherFactory::add('Routing', $config);
<ide> $this->assertInstanceOf('Cake\Routing\Filter\RoutingFilter', $result);
<del> $this->assertEquals($config['config'], $result->config('config'));
<del> $this->assertEquals($config['priority'], $result->config('priority'));
<add> $this->assertEquals($config['config'], $result->getConfig('config'));
<add> $this->assertEquals($config['priority'], $result->getConfig('priority'));
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Routing/DispatcherFilterTest.php
<ide> class DispatcherFilterTest extends TestCase
<ide> public function testConstructConfig()
<ide> {
<ide> $filter = new DispatcherFilter(['one' => 'value', 'on' => '/blog']);
<del> $this->assertEquals('value', $filter->config('one'));
<add> $this->assertEquals('value', $filter->getConfig('one'));
<ide> }
<ide>
<ide> /**
<ide> public function testConstructConfig()
<ide> public function testConstructPriority()
<ide> {
<ide> $filter = new DispatcherFilter();
<del> $this->assertEquals(10, $filter->config('priority'));
<add> $this->assertEquals(10, $filter->getConfig('priority'));
<ide>
<ide> $filter = new DispatcherFilter(['priority' => 100]);
<del> $this->assertEquals(100, $filter->config('priority'));
<add> $this->assertEquals(100, $filter->getConfig('priority'));
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Shell/CacheShellTest.php
<ide> class CacheShellTest extends ConsoleIntegrationTestCase
<ide> public function setUp()
<ide> {
<ide> parent::setUp();
<del> Cache::config('test', ['engine' => 'File', 'path' => CACHE]);
<add> Cache::setConfig('test', ['engine' => 'File', 'path' => CACHE]);
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Shell/Helper/TableHelperTest.php
<ide> public function testOutputWithoutHeaderStyle()
<ide> ['short', 'Longish thing', 'short'],
<ide> ['Longer thing', 'short', 'Longest Value'],
<ide> ];
<del> $this->helper->config(['headerStyle' => false]);
<add> $this->helper->setConfig(['headerStyle' => false]);
<ide> $this->helper->output($data);
<ide> $expected = [
<ide> '+--------------+---------------+---------------+',
<ide> public function testOutputWithDifferentHeaderStyle()
<ide> ['short', 'Longish thing', 'short'],
<ide> ['Longer thing', 'short', 'Longest Value'],
<ide> ];
<del> $this->helper->config(['headerStyle' => 'error']);
<add> $this->helper->setConfig(['headerStyle' => 'error']);
<ide> $this->helper->output($data);
<ide> $expected = [
<ide> '+--------------+---------------+---------------+',
<ide> public function testOutputWithoutHeaders()
<ide> ['short', 'Longish thing', 'short'],
<ide> ['Longer thing', 'short', 'Longest Value'],
<ide> ];
<del> $this->helper->config(['headers' => false]);
<add> $this->helper->setConfig(['headers' => false]);
<ide> $this->helper->output($data);
<ide> $expected = [
<ide> '+--------------+---------------+---------------+',
<ide> public function testOutputWithRowSeparator()
<ide> ['short', 'Longish thing', 'short'],
<ide> ['Longer thing', 'short', 'Longest Value']
<ide> ];
<del> $this->helper->config(['rowSeparator' => true]);
<add> $this->helper->setConfig(['rowSeparator' => true]);
<ide> $this->helper->output($data);
<ide> $expected = [
<ide> '+--------------+---------------+---------------+',
<ide> public function testOutputWithRowSeparatorAndHeaders()
<ide> ['short', 'Longish thing', 'short'],
<ide> ['Longer thing', 'short', 'Longest Value'],
<ide> ];
<del> $this->helper->config(['rowSeparator' => true]);
<add> $this->helper->setConfig(['rowSeparator' => true]);
<ide> $this->helper->output($data);
<ide> $expected = [
<ide> '+--------------+---------------+---------------+',
<ide> public function testOutputWithHeaderAndNoData()
<ide> */
<ide> public function testOutputHeaderDisabledNoData()
<ide> {
<del> $this->helper->config(['header' => false]);
<add> $this->helper->setConfig(['header' => false]);
<ide> $this->helper->output([]);
<ide> $this->assertEquals([], $this->stub->messages());
<ide> }
<ide><path>tests/TestCase/TestSuite/FixtureManagerTest.php
<ide> public function testLogSchemaWithDebug()
<ide>
<ide> $this->manager->setDebug(true);
<ide> $buffer = new ConsoleOutput();
<del> Log::config('testQueryLogger', [
<add> Log::setConfig('testQueryLogger', [
<ide> 'className' => 'Console',
<ide> 'stream' => $buffer
<ide> ]);
<ide> public function testResetDbIfTableExists()
<ide>
<ide> $this->manager->setDebug(true);
<ide> $buffer = new ConsoleOutput();
<del> Log::config('testQueryLogger', [
<add> Log::setConfig('testQueryLogger', [
<ide> 'className' => 'Console',
<ide> 'stream' => $buffer
<ide> ]);
<ide> public function testLoadConnectionAliasUsage()
<ide> ->method('execute')
<ide> ->will($this->returnValue($statement));
<ide>
<del> ConnectionManager::config('other', $other);
<del> ConnectionManager::config('test_other', $testOther);
<add> ConnectionManager::setConfig('other', $other);
<add> ConnectionManager::setConfig('test_other', $testOther);
<ide>
<ide> // Connect the alias making test_other an alias of other.
<ide> ConnectionManager::alias('test_other', 'other');
<ide><path>tests/test_app/TestApp/Controller/CookieComponentTestController.php
<ide> class CookieComponentTestController extends Controller
<ide> public function view($key = null)
<ide> {
<ide> if (isset($key)) {
<del> $this->Cookie->config('key', $key);
<add> $this->Cookie->setConfig('key', $key);
<ide> }
<ide> $this->set('ValueFromRequest', $this->request->cookie('NameOfCookie'));
<ide> $this->set('ValueFromCookieComponent', $this->Cookie->read('NameOfCookie'));
<ide> public function set_cookie($key = null)
<ide> {
<ide> $this->autoRender = false;
<ide> if (isset($key)) {
<del> $this->Cookie->config('key', $key);
<add> $this->Cookie->setConfig('key', $key);
<ide> }
<ide> $this->Cookie->write('NameOfCookie', 'abc');
<ide> } | 12 |
Text | Text | add v3.17.0-beta.2 to changelog | 99e796a0ae52bc161b8541d0e481d27b7f21164c | <ide><path>CHANGELOG.md
<ide> # Ember Changelog
<ide>
<add>### v3.17.0-beta.2 (January 29, 2020)
<add>
<add>- [#18691](https://github.com/emberjs/ember.js/pull/18691) [BUGFIX] Updated blueprints for component and helper tests to output the correct hbs import statement
<add>- [#18694](https://github.com/emberjs/ember.js/pull/18694) [BUGFIX] Ensure tag updates are buffered, remove error message
<add>- [#18709](https://github.com/emberjs/ember.js/pull/18709) [BUGFIX] Fix `this` in `@tracked` initializer
<add>
<ide> ### v3.17.0-beta.1 (January 20, 2020)
<ide>
<ide> - [#18688](https://github.com/emberjs/ember.js/pull/18688) / [#18621](https://github.com/emberjs/ember.js/pull/18621) Updates Glimmer-VM to v0.46 | 1 |
PHP | PHP | use array instead of boolean | d0a26a3565cd67b306452deef4877f86cd352750 | <ide><path>src/View/Helper/PaginatorHelper.php
<ide> public function sort($key, $title = null, array $options = [])
<ide> *
<ide> * @param array $options Pagination/URL options array
<ide> * @param string|null $model Which model to paginate on
<del> * @param array|bool $urlOptions Array of options or bool `fullBase` for BC reasons.
<add> * @param array $urlOptions Array of options
<ide> * The bool version of this argument is *deprecated* and will be removed in 4.0.0
<ide> * @return string By default, returns a full pagination URL string for use in non-standard contexts (i.e. JavaScript)
<ide> * @link https://book.cakephp.org/3.0/en/views/helpers/paginator.html#generating-pagination-urls
<ide> */
<del> public function generateUrl(array $options = [], $model = null, $urlOptions = false)
<add> public function generateUrl(array $options = [], $model = null, $urlOptions = [])
<ide> {
<del> if (!is_array($urlOptions)) {
<add> if (is_bool($urlOptions)) {
<ide> $urlOptions = ['fullBase' => $urlOptions];
<del> if (func_num_args() == 3) {
<del> deprecationWarning(
<del> 'Passing an bool as third argument into PaginatorHelper::generateUrl() is deprecated ' .
<del> 'and will be removed in 4.0.0 . ' .
<del> 'Pass an array instead.'
<del> );
<del> }
<add> deprecationWarning(
<add> 'Passing a boolean value as third argument into PaginatorHelper::generateUrl() is deprecated ' .
<add> 'and will be removed in 4.0.0 . ' .
<add> 'Pass an array instead.'
<add> );
<ide> }
<ide> $urlOptions += [
<ide> 'escape' => true, | 1 |
Javascript | Javascript | fix amd mode for the new showhide module | 32cfc38a9c76d1f9163759c811cb9c82eb47d565 | <ide><path>src/css/showHide.js
<ide> define( [
<add> "../core",
<ide> "../data/var/dataPriv",
<ide> "../css/var/isHidden"
<del>], function( dataPriv, isHidden ) {
<add>], function( jQuery, dataPriv, isHidden ) {
<ide>
<ide> function showHide( elements, show ) {
<ide> var display, elem, | 1 |
Python | Python | fix converter cli | 2b3b937a04622d13e30204ff4553d6815a841289 | <ide><path>spacy/cli/convert.py
<ide> from .converters import conllu2json, iob2json
<ide> from ..util import prints
<ide>
<del>
<ide> # Converters are matched by file extension. To add a converter, add a new entry
<ide> # to this dict with the file extension mapped to the converter function imported
<ide> # from /converters.
<ide> n_sents=("Number of sentences per doc", "option", "n", float),
<ide> morphology=("Enable appending morphology to tags", "flag", "m", bool)
<ide> )
<del>def convert(input_file, output_dir, n_sents, morphology):
<add>def convert(_, input_file, output_dir, n_sents, morphology):
<ide> """Convert files into JSON format for use with train command and other
<ide> experiment management functions.
<ide> """
<ide> def convert(input_file, output_dir, n_sents, morphology):
<ide> if not file_ext in CONVERTERS:
<ide> prints("Can't find converter for %s" % input_path.parts[-1],
<ide> title="Unknown format", exits=1)
<del> CONVERTERS[file_ext](input_path, output_path, *args)
<add> CONVERTERS[file_ext](input_path, output_path,
<add> n_sents=n_sents, morphology=morphology) | 1 |
Javascript | Javascript | add noodler to showcase | 3714029a6798066065355628146ed4f2c2401d95 | <ide><path>website/src/react-native/showcase.js
<ide> var apps = [
<ide> link: 'https://itunes.apple.com/ca/app/ncredible/id1019662810?mt=8',
<ide> author: 'NBC News Digital, LLC',
<ide> },
<add> {
<add> name: 'Noodler',
<add> icon: 'http://a5.mzstatic.com/us/r30/Purple6/v4/d9/9a/69/d99a6919-7f11-35ad-76ea-f1741643d875/icon175x175.png',
<add> link: 'https://itunes.apple.com/us/app/noodler-noodle-soup-oracle/id1013183002?mt=8',
<add> author: 'Michele Humes & Joshua Sierles',
<add> },
<ide> {
<ide> name: 'Night Light',
<ide> icon: 'http://is3.mzstatic.com/image/pf/us/r30/Purple7/v4/5f/50/5f/5f505fe5-0a30-6bbf-6ed9-81ef09351aba/mzl.lkeqxyeo.png', | 1 |
Ruby | Ruby | fix install script for ruby 1.9.x | c1568ab27746d993ec2d25276c625e42297d7555 | <ide><path>install_homebrew.rb
<ide> def sudo *args
<ide> args = if args.length > 1
<ide> args.unshift "/usr/bin/sudo"
<ide> else
<del> "/usr/bin/sudo #{args}"
<add> "/usr/bin/sudo #{args.first}"
<ide> end
<ide> ohai *args
<ide> system *args | 1 |
Mixed | Python | increase str conversion limit where required | d407476531dd85db79e58aa2dd13d3b3031d8185 | <ide><path>DIRECTORY.md
<ide> * [Modular Division](blockchain/modular_division.py)
<ide>
<ide> ## Boolean Algebra
<add> * [And Gate](boolean_algebra/and_gate.py)
<ide> * [Norgate](boolean_algebra/norgate.py)
<ide> * [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py)
<ide>
<ide> * [Sol1](project_euler/problem_101/sol1.py)
<ide> * Problem 102
<ide> * [Sol1](project_euler/problem_102/sol1.py)
<add> * Problem 104
<add> * [Sol1](project_euler/problem_104/sol1.py)
<ide> * Problem 107
<ide> * [Sol1](project_euler/problem_107/sol1.py)
<ide> * Problem 109
<ide> * [Quantum Random](quantum/quantum_random.py)
<ide> * [Ripple Adder Classic](quantum/ripple_adder_classic.py)
<ide> * [Single Qubit Measure](quantum/single_qubit_measure.py)
<add> * [Superdense Coding](quantum/superdense_coding.py)
<ide>
<ide> ## Scheduling
<ide> * [First Come First Served](scheduling/first_come_first_served.py)
<ide> * [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py)
<ide> * [Capitalize](strings/capitalize.py)
<ide> * [Check Anagrams](strings/check_anagrams.py)
<del> * [Check Pangram](strings/check_pangram.py)
<ide> * [Credit Card Validator](strings/credit_card_validator.py)
<ide> * [Detecting English Programmatically](strings/detecting_english_programmatically.py)
<ide> * [Dna](strings/dna.py)
<ide> * [Indian Phone Validator](strings/indian_phone_validator.py)
<ide> * [Is Contains Unique Chars](strings/is_contains_unique_chars.py)
<ide> * [Is Palindrome](strings/is_palindrome.py)
<add> * [Is Pangram](strings/is_pangram.py)
<add> * [Is Spain National Id](strings/is_spain_national_id.py)
<ide> * [Jaro Winkler](strings/jaro_winkler.py)
<ide> * [Join](strings/join.py)
<ide> * [Knuth Morris Pratt](strings/knuth_morris_pratt.py)
<ide> * [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py)
<ide> * [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py)
<ide> * [Get Imdbtop](web_programming/get_imdbtop.py)
<add> * [Get Top Billioners](web_programming/get_top_billioners.py)
<ide> * [Get Top Hn Posts](web_programming/get_top_hn_posts.py)
<ide> * [Get User Tweets](web_programming/get_user_tweets.py)
<ide> * [Giphy](web_programming/giphy.py)
<add><path>project_euler/problem_104/sol1.py
<del><path>project_euler/problem_104/sol.py.FIXME
<ide> the last nine digits are 1-9 pandigital, find k.
<ide> """
<ide>
<add>import sys
<add>
<add>sys.set_int_max_str_digits(0) # type: ignore
<add>
<ide>
<ide> def check(number: int) -> bool:
<ide> """
<ide> def check(number: int) -> bool:
<ide> check_front = [0] * 11
<ide>
<ide> # mark last 9 numbers
<del> for x in range(9):
<add> for _ in range(9):
<ide> check_last[int(number % 10)] = 1
<ide> number = number // 10
<ide> # flag
<ide> def check(number: int) -> bool:
<ide> # mark first 9 numbers
<ide> number = int(str(number)[:9])
<ide>
<del> for x in range(9):
<add> for _ in range(9):
<ide> check_front[int(number % 10)] = 1
<ide> number = number // 10
<ide>
<ide> def check1(number: int) -> bool:
<ide> check_last = [0] * 11
<ide>
<ide> # mark last 9 numbers
<del> for x in range(9):
<add> for _ in range(9):
<ide> check_last[int(number % 10)] = 1
<ide> number = number // 10
<ide> # flag | 2 |
Javascript | Javascript | use strict mode for js files | f4282f98026f317312e7f7ada1eade17cf13d36c | <ide><path>fonts.js
<ide> /* -*- Mode: Java; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- /
<ide> /* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */
<ide>
<add>"use strict";
<add>
<ide> /**
<ide> * Maximum file size of the font.
<ide> */
<ide> var Type1Parser = function() {
<ide> this.extractFontProgram = function t1_extractFontProgram(aStream) {
<ide> var eexecString = decrypt(aStream, kEexecEncryptionKey, 4);
<ide> var subrs = [], glyphs = [];
<del> var inSubrs = inGlyphs = false;
<add> var inSubrs = false;
<add> var inGlyphs = false;
<ide> var glyph = "";
<ide>
<ide> var token = "";
<ide><path>glyphlist.js
<add>"use strict";
<add>
<ide> var GlyphsUnicode = {
<ide> A: 0x0041,
<ide> AE: 0x00C6,
<ide><path>pdf.js
<ide> /* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- /
<ide> /* vim: set shiftwidth=4 tabstop=8 autoindent cindent expandtab: */
<ide>
<add>"use strict";
<add>
<ide> var ERRORS = 0, WARNINGS = 1, TODOS = 5;
<ide> var verbosity = WARNINGS;
<ide>
<ide> var FlateStream = (function() {
<ide> this.eof = false;
<ide> this.codeSize = 0;
<ide> this.codeBuf = 0;
<del>
<add>
<ide> this.pos = 0;
<ide> this.bufferLength = 0;
<ide> }
<ide> var FlateStream = (function() {
<ide> litCodeTable = fixedLitCodeTab;
<ide> distCodeTable = fixedDistCodeTab;
<ide> } else if (hdr == 2) { // compressed block, dynamic codes
<add> var repeat = function repeat(stream, array, len, offset, what) {
<add> var repeat = stream.getBits(len) + offset;
<add> while (repeat-- > 0)
<add> array[i++] = what;
<add> }
<ide> var numLitCodes = this.getBits(5) + 257;
<ide> var numDistCodes = this.getBits(5) + 1;
<ide> var numCodeLenCodes = this.getBits(4) + 4;
<ide> var FlateStream = (function() {
<ide> var codes = numLitCodes + numDistCodes;
<ide> var codeLengths = new Array(codes);
<ide> while (i < codes) {
<del> function repeat(stream, array, len, offset, what) {
<del> var repeat = stream.getBits(len) + offset;
<del> while (repeat-- > 0)
<del> array[i++] = what;
<del> }
<ide> var code = this.getCode(codeLenCodeTab);
<ide> if (code == 16) {
<ide> repeat(this, codeLengths, 2, 3, len);
<ide> var Lexer = (function() {
<ide> var done = false;
<ide> var str = "";
<ide> var stream = this.stream;
<add> var ch = null;
<ide> do {
<ide> switch (ch = stream.getChar()) {
<ide> case undefined:
<ide> var Catalog = (function() {
<ide> return shadow(this, "toplevelPagesDict", obj);
<ide> },
<ide> get numPages() {
<del> obj = this.toplevelPagesDict.get("Count");
<add> var obj = this.toplevelPagesDict.get("Count");
<ide> assertWellFormed(IsInt(obj),
<ide> "page count in top level pages object is not an integer");
<ide> // shadow the prototype getter
<ide><path>utils/fonts_utils.js
<ide> * CharString or to understand the structure of the CFF format.
<ide> */
<ide>
<add>"use strict";
<ide>
<ide> /**
<ide> * Build a charset by assigning the glyph name and the human readable form
<ide><path>viewer.js
<ide> /* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- /
<ide> /* vim: set shiftwidth=4 tabstop=8 autoindent cindent expandtab: */
<ide>
<del>var pdfDocument, canvas, pageDisplay, pageNum, pageInterval;
<add>"use strict";
<add>
<add>var pdfDocument, canvas, pageDisplay, pageNum, numPages, pageInterval;
<ide> function load(userInput) {
<ide> canvas = document.getElementById("canvas");
<ide> canvas.mozOpaque = true;
<ide> pageNum = parseInt(queryParams().page) || 1;
<del> fileName = userInput;
<add> var fileName = userInput;
<ide> if (!userInput) {
<ide> fileName = queryParams().file || "compressed.tracemonkey-pldi-09.pdf";
<ide> }
<ide> function queryParams() {
<ide>
<ide> function open(url) {
<ide> document.title = url;
<del> req = new XMLHttpRequest();
<add> var req = new XMLHttpRequest();
<ide> req.open("GET", url);
<ide> req.mozResponseType = req.responseType = "arraybuffer";
<ide> req.expected = (document.URL.indexOf("file:") == 0) ? 0 : 200; | 5 |
Ruby | Ruby | add reason argument | 9f382bfd538cfdefd17712a19cd426ba4917175e | <ide><path>Library/Homebrew/dev-cmd/boneyard-formula-pr.rb
<ide> #: @hide_from_man_page
<del>#: * `boneyard-formula-pr` [`--dry-run`] [`--local`] <formula-name>:
<add>#: * `boneyard-formula-pr` [`--dry-run`] [`--local`] [`--reason=<reason>`] <formula-name> :
<ide> #: Creates a pull request to boneyard a formula.
<ide> #:
<ide> #: If `--dry-run` is passed, print what would be done rather than doing it.
<ide> #:
<ide> #: If `--local` is passed, perform only local operations (i.e. don't push or create PR).
<add>#:
<add>#: If `--reason=<reason>` is passed, append this to the commit/PR message.
<ide>
<ide> require "formula"
<ide> require "utils/json"
<ide> module Homebrew
<ide> def boneyard_formula_pr
<ide> local_only = ARGV.include?("--local")
<ide> formula = ARGV.formulae.first
<add> reason = ARGV.value("reason")
<ide> odie "No formula found!" unless formula
<ide>
<ide> formula_relpath = formula.path.relative_path_from(formula.tap.path)
<ide> def boneyard_formula_pr
<ide> end
<ide> end
<ide> branch = "#{formula.name}-boneyard"
<add>
<add> reason = " because #{reason}" if reason
<add>
<ide> if ARGV.dry_run?
<ide> puts "cd #{formula.tap.path}"
<ide> puts "git checkout --no-track -b #{branch} origin/master"
<ide> def boneyard_formula_pr
<ide> puts "hub fork"
<ide> puts "hub fork (to read $HUB_REMOTE)"
<ide> puts "git push $HUB_REMOTE #{branch}:#{branch}"
<del> puts "hub pull-request -m $'#{formula.name}: migrate to boneyard\\n\\nCreated with `brew boneyard-formula-pr`.'"
<add> puts "hub pull-request -m $'#{formula.name}: migrate to boneyard\\n\\nCreated with `brew boneyard-formula-pr`#{reason}.'"
<ide> end
<ide> else
<ide> cd formula.tap.path
<ide> def boneyard_formula_pr
<ide> pr_message = <<-EOS.undent
<ide> #{formula.name}: migrate to boneyard
<ide>
<del> Created with `brew boneyard-formula-pr`.
<add> Created with `brew boneyard-formula-pr`#{reason}.
<ide> EOS
<ide> pr_url = Utils.popen_read("hub", "pull-request", "-m", pr_message).chomp
<ide> end
<ide> def boneyard_formula_pr
<ide> puts "hub fork"
<ide> puts "hub fork (to read $HUB_REMOTE)"
<ide> puts "git push $HUB_REMOTE #{branch}:#{branch}"
<del> puts "hub pull-request --browse -m $'#{formula.name}: migrate from #{formula.tap.repo}\\n\\nGoes together with $PR_URL\\n\\nCreated with `brew boneyard-formula-pr`.'"
<add> puts "hub pull-request --browse -m $'#{formula.name}: migrate from #{formula.tap.repo}\\n\\nGoes together with $PR_URL\\n\\nCreated with `brew boneyard-formula-pr`#{reason}.'"
<ide> end
<ide> else
<ide> cd boneyard_tap.formula_dir
<ide> def boneyard_formula_pr
<ide>
<ide> Goes together with #{pr_url}.
<ide>
<del> Created with `brew boneyard-formula-pr`.
<add> Created with `brew boneyard-formula-pr`#{reason}.
<ide> EOS
<ide> end
<ide> end | 1 |
Go | Go | remove unused ctx from v1pusher | a4dbbe7d898ae6db366076cbe13049a8f88fc731 | <ide><path>distribution/push_v1.go
<ide> import (
<ide> )
<ide>
<ide> type v1Pusher struct {
<del> ctx context.Context
<ide> v1IDService *metadata.V1IDService
<ide> endpoint registry.APIEndpoint
<ide> ref reference.Named | 1 |
Ruby | Ruby | improve the way we spec this | 2eea340b2269aab85cead2a5e024d6ff9abd6c91 | <ide><path>spec/arel/algebra/unit/relations/relation_spec.rb
<ide> module Arel
<ide>
<ide> describe Relation::Enumerable do
<ide> it "implements enumerable" do
<del> check @relation.collect.should == @relation.session.read(@relation).collect
<del> @relation.first.should == @relation.session.read(@relation).first
<add> @relation.map { |value| value }.should ==
<add> @relation.session.read(@relation).map { |value| value }
<ide> end
<ide> end
<ide> end | 1 |
Text | Text | remove a reference to ruby 1.9 in guides | 74dc57640d856b2a8d424db02b156ded62513eb9 | <ide><path>guides/source/active_support_core_extensions.md
<ide> Returns the substring of the string starting at position `position`:
<ide> "hello".from(0) # => "hello"
<ide> "hello".from(2) # => "llo"
<ide> "hello".from(-2) # => "lo"
<del>"hello".from(10) # => "" if < 1.9, nil in 1.9
<add>"hello".from(10) # => nil
<ide> ```
<ide>
<ide> NOTE: Defined in `active_support/core_ext/string/access.rb`. | 1 |
Javascript | Javascript | clarify difference between value and ngvalue | dc0467879de9f5891470cfea425370874aef2541 | <ide><path>src/ng/directive/input.js
<ide> var inputType = {
<ide> * HTML radio button.
<ide> *
<ide> * @param {string} ngModel Assignable angular expression to data-bind to.
<del> * @param {string} value The value to which the expression should be set when selected.
<add> * @param {string} value The value to which the `ngModel` expression should be set when selected.
<add> * Note that `value` only supports `string` values, i.e. the scope model needs to be a string,
<add> * too. Use `ngValue` if you need complex models (`number`, `object`, ...).
<ide> * @param {string=} name Property name of the form under which the control is published.
<ide> * @param {string=} ngChange Angular expression to be executed when input changes due to user
<ide> * interaction with the input element.
<del> * @param {string} ngValue Angular expression which sets the value to which the expression should
<del> * be set when selected.
<add> * @param {string} ngValue Angular expression to which `ngModel` will be be set when the radio
<add> * is selected. Should be used instead of the `value` attribute if you need
<add> * a non-string `ngModel` (`boolean`, `array`, ...).
<ide> *
<ide> * @example
<ide> <example name="radio-input-directive" module="radioExample"> | 1 |
Text | Text | fix deprecation warnings in addon examples | 2c75a6b39f21361b9813207a11662d9d41cf6a60 | <ide><path>doc/api/addons.md
<ide> prototype:
<ide>
<ide> namespace demo {
<ide>
<add>using v8::Context;
<ide> using v8::Function;
<ide> using v8::FunctionCallbackInfo;
<ide> using v8::FunctionTemplate;
<ide> void MyObject::New(const FunctionCallbackInfo<Value>& args) {
<ide> // Invoked as plain function `MyObject(...)`, turn into construct call.
<ide> const int argc = 1;
<ide> Local<Value> argv[argc] = { args[0] };
<add> Local<Context> context = isolate->GetCurrentContext();
<ide> Local<Function> cons = Local<Function>::New(isolate, constructor);
<del> args.GetReturnValue().Set(cons->NewInstance(argc, argv));
<add> Local<Object> result =
<add> cons->NewInstance(context, argc, argv).ToLocalChecked();
<add> args.GetReturnValue().Set(result);
<ide> }
<ide> }
<ide>
<ide> The implementation in `myobject.cc` is similar to the previous example:
<ide>
<ide> namespace demo {
<ide>
<add>using v8::Context;
<ide> using v8::Function;
<ide> using v8::FunctionCallbackInfo;
<ide> using v8::FunctionTemplate;
<ide> void MyObject::New(const FunctionCallbackInfo<Value>& args) {
<ide> const int argc = 1;
<ide> Local<Value> argv[argc] = { args[0] };
<ide> Local<Function> cons = Local<Function>::New(isolate, constructor);
<del> args.GetReturnValue().Set(cons->NewInstance(argc, argv));
<add> Local<Context> context = isolate->GetCurrentContext();
<add> Local<Object> instance =
<add> cons->NewInstance(context, argc, argv).ToLocalChecked();
<add> args.GetReturnValue().Set(instance);
<ide> }
<ide> }
<ide>
<ide> void MyObject::NewInstance(const FunctionCallbackInfo<Value>& args) {
<ide> const unsigned argc = 1;
<ide> Local<Value> argv[argc] = { args[0] };
<ide> Local<Function> cons = Local<Function>::New(isolate, constructor);
<del> Local<Object> instance = cons->NewInstance(argc, argv);
<add> Local<Context> context = isolate->GetCurrentContext();
<add> Local<Object> instance =
<add> cons->NewInstance(context, argc, argv).ToLocalChecked();
<ide>
<ide> args.GetReturnValue().Set(instance);
<ide> }
<ide> The implementation of `myobject.cc` is similar to before:
<ide>
<ide> namespace demo {
<ide>
<add>using v8::Context;
<ide> using v8::Function;
<ide> using v8::FunctionCallbackInfo;
<ide> using v8::FunctionTemplate;
<ide> void MyObject::New(const FunctionCallbackInfo<Value>& args) {
<ide> // Invoked as plain function `MyObject(...)`, turn into construct call.
<ide> const int argc = 1;
<ide> Local<Value> argv[argc] = { args[0] };
<add> Local<Context> context = isolate->GetCurrentContext();
<ide> Local<Function> cons = Local<Function>::New(isolate, constructor);
<del> args.GetReturnValue().Set(cons->NewInstance(argc, argv));
<add> Local<Object> instance =
<add> cons->NewInstance(context, argc, argv).ToLocalChecked();
<add> args.GetReturnValue().Set(instance);
<ide> }
<ide> }
<ide>
<ide> void MyObject::NewInstance(const FunctionCallbackInfo<Value>& args) {
<ide> const unsigned argc = 1;
<ide> Local<Value> argv[argc] = { args[0] };
<ide> Local<Function> cons = Local<Function>::New(isolate, constructor);
<del> Local<Object> instance = cons->NewInstance(argc, argv);
<add> Local<Context> context = isolate->GetCurrentContext();
<add> Local<Object> instance =
<add> cons->NewInstance(context, argc, argv).ToLocalChecked();
<ide>
<ide> args.GetReturnValue().Set(instance);
<ide> } | 1 |
Ruby | Ruby | add tests for oldname optlink | dc4feaf56b5ba36c579277ea8fbc2f983f2bcdf0 | <ide><path>Library/Homebrew/test/test_keg.rb
<ide> def test_unlinking_keg
<ide> refute_predicate @dst, :symlink?
<ide> end
<ide>
<add> def test_oldname_opt_record
<add> assert_nil @keg.oldname_opt_record
<add> oldname_opt_record = HOMEBREW_PREFIX/"opt/oldfoo"
<add> oldname_opt_record.make_relative_symlink(HOMEBREW_CELLAR/"foo/1.0")
<add> assert_equal oldname_opt_record, @keg.oldname_opt_record
<add> end
<add>
<add> def test_optlink_relink
<add> oldname_opt_record = HOMEBREW_PREFIX/"opt/oldfoo"
<add> oldname_opt_record.make_relative_symlink(HOMEBREW_CELLAR/"foo/1.0")
<add> keg_record = HOMEBREW_CELLAR.join("foo", "2.0")
<add> keg_record.join("bin").mkpath
<add> keg = Keg.new(keg_record)
<add> keg.optlink
<add> assert_equal keg_record, oldname_opt_record.resolved_path
<add> keg.uninstall
<add> refute_predicate oldname_opt_record, :symlink?
<add> end
<add>
<add> def test_remove_oldname_opt_record
<add> oldname_opt_record = HOMEBREW_PREFIX/"opt/oldfoo"
<add> oldname_opt_record.make_relative_symlink(HOMEBREW_CELLAR/"foo/2.0")
<add> @keg.remove_oldname_opt_record
<add> assert_predicate oldname_opt_record, :symlink?
<add> oldname_opt_record.unlink
<add> oldname_opt_record.make_relative_symlink(HOMEBREW_CELLAR/"foo/1.0")
<add> @keg.remove_oldname_opt_record
<add> refute_predicate oldname_opt_record, :symlink?
<add> end
<add>
<ide> def test_link_dry_run
<ide> @mode.dry_run = true
<ide> | 1 |
Javascript | Javascript | fix minor typo | da3dc5c5fe4d7ea3c174f0df98f2aaa282eefef2 | <ide><path>src/ng/directive/ngSwitch.js
<ide> * attribute is displayed.
<ide> *
<ide> * @animations
<del> * enter - happens after the ngSwtich contents change and the matched child element is placed inside the container
<add> * enter - happens after the ngSwitch contents change and the matched child element is placed inside the container
<ide> * leave - happens just after the ngSwitch contents change and just before the former contents are removed from the DOM
<ide> *
<ide> * @usage | 1 |
Ruby | Ruby | remove run_initializers from class methods | e061a12a156791c35bba092263ad216b1b938502 | <ide><path>railties/lib/rails/initializable.rb
<ide> def initializer(name, opts = {}, &blk)
<ide> opts[:after] ||= initializers.last.name unless initializers.empty? || initializers.find { |i| i.name == opts[:before] }
<ide> initializers << Initializer.new(name, nil, opts, &blk)
<ide> end
<del>
<del> def run_initializers(*args)
<del> return if @ran
<del> initializers_chain.tsort.each do |initializer|
<del> instance_exec(*args, &initializer.block)
<del> end
<del> @ran = true
<del> end
<ide> end
<ide> end
<ide> end
<ide><path>railties/test/initializable_test.rb
<ide> module InitializableTests
<ide>
<ide> class Foo
<ide> include Rails::Initializable
<del>
<del> class << self
<del> attr_accessor :foo, :bar
<del> end
<add> attr_accessor :foo, :bar
<ide>
<ide> initializer :start do
<ide> @foo ||= 0
<ide> class Basic < ActiveSupport::TestCase
<ide> include ActiveSupport::Testing::Isolation
<ide>
<ide> test "initializers run" do
<del> Foo.run_initializers
<del> assert_equal 1, Foo.foo
<add> foo = Foo.new
<add> foo.run_initializers
<add> assert_equal 1, foo.foo
<ide> end
<ide>
<ide> test "initializers are inherited" do
<del> Bar.run_initializers
<del> assert_equal [1, 1], [Bar.foo, Bar.bar]
<add> bar = Bar.new
<add> bar.run_initializers
<add> assert_equal [1, 1], [bar.foo, bar.bar]
<ide> end
<ide>
<ide> test "initializers only get run once" do
<del> Foo.run_initializers
<del> Foo.run_initializers
<del> assert_equal 1, Foo.foo
<del> end
<del>
<del> test "running initializers on children does not effect the parent" do
<del> Bar.run_initializers
<del> assert_nil Foo.foo
<del> assert_nil Foo.bar
<del> end
<del>
<del> test "initializing with modules" do
<del> Word.run_initializers
<del> assert_equal "bird", $word
<add> foo = Foo.new
<add> foo.run_initializers
<add> foo.run_initializers
<add> assert_equal 1, foo.foo
<ide> end
<ide>
<ide> test "creating initializer without a block raises an error" do
<ide> class Basic < ActiveSupport::TestCase
<ide> class BeforeAfter < ActiveSupport::TestCase
<ide> test "running on parent" do
<ide> $arr = []
<del> Parent.run_initializers
<add> Parent.new.run_initializers
<ide> assert_equal [5, 1, 2], $arr
<ide> end
<ide>
<ide> test "running on child" do
<ide> $arr = []
<del> Child.run_initializers
<add> Child.new.run_initializers
<ide> assert_equal [5, 3, 1, 4, 2], $arr
<ide> end
<ide>
<ide> test "handles dependencies introduced before all initializers are loaded" do
<ide> $arr = []
<del> Interdependent::Application.run_initializers
<add> Interdependent::Application.new.run_initializers
<ide> assert_equal [1, 2, 3, 4], $arr
<ide> end
<ide> end | 2 |
Python | Python | update comments to reflect new functionality | 6fa1889afb62fb01e8413584aa7abc293245fcfc | <ide><path>scripts/flaskext_migrate.py
<ide> # Script which modifies source code away from the deprecated "flask.ext"
<del># format. Does not yet fully support imports in the style:
<del>#
<del># "import flask.ext.foo"
<del>#
<add># format.
<ide> #
<ide> # Run in the terminal by typing: `python flaskext_migrate.py <source_file.py>`
<ide> # | 1 |
Text | Text | remove outdated link | 1656cd2edbf566acd1c1efc299bd712df59e2847 | <ide><path>doc/api/v8.md
<ide> after the VM has started may result in unpredictable behavior, including
<ide> crashes and data loss; or it may simply do nothing.
<ide>
<ide> The V8 options available for a version of Node.js may be determined by running
<del>`node --v8-options`. An unofficial, community-maintained list of options
<del>and their effects is available [here][].
<add>`node --v8-options`.
<ide>
<ide> Usage:
<ide>
<ide> A subclass of [`Deserializer`][] corresponding to the format written by
<ide> [HTML structured clone algorithm]: https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
<ide> [V8]: https://developers.google.com/v8/
<ide> [Worker Threads]: worker_threads.html
<del>[here]: https://github.com/thlorenz/v8-flags/blob/master/flags-0.11.md | 1 |
Text | Text | improve formatting and clarity in "subset" | 2b3bd5c52d33ecd83b2f716a99ecacfc39914888 | <ide><path>curriculum/challenges/english/08-coding-interview-prep/data-structures/perform-a-subset-check-on-two-sets-of-data.english.md
<ide> forumTopicId: 301707
<ide>
<ide> ## Description
<ide> <section id='description'>
<del>In this exercise we are going to perform a subset test on 2 sets of data. We will create a method on our <code>Set</code> data structure called <code>subset</code>. This will compare the first set, against the second and if the first set is fully contained within the Second then it will return true.
<del>For example, if <code>setA = ['a','b']</code> and <code>setB = ['a','b','c','d']</code>, then the subset of setA and setB is: <code>setA.subset(setB)</code> should be <code>true</code>.
<add>In this exercise, we are going to perform a subset test on 2 sets of data. We will create a method on our <code>Set</code> data structure called <code>isSubsetOf</code>. This will compare the first set against the second, and if the first set is fully contained within the second, it will return <code>true</code>.
<add>For example, if <code>setA = ['a','b']</code> and <code>setB = ['a','b','c','d']</code>, then <code>setA</code> is a subset of <code>setB</code>, so <code>setA.isSubsetOf(setB)</code> should return <code>true</code>.
<ide> </section>
<ide>
<ide> ## Instructions
<ide> For example, if <code>setA = ['a','b']</code> and <code>setB = ['a','b','c','d']
<ide>
<ide> ```yml
<ide> tests:
<del> - text: Your <code>Set</code> class should have a <code>subset</code> method.
<del> testString: assert((function(){var test = new Set(); return (typeof test.subset === 'function')})());
<add> - text: Your <code>Set</code> class should have a <code>isSubsetOf</code> method.
<add> testString: assert((function(){var test = new Set(); return (typeof test.isSubsetOf === 'function')})());
<ide> - text: The first Set() should be contained in the second Set
<del> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setB.add('b'); setB.add('c'); setB.add('a'); setB.add('d'); var subsetSetAB = setA.subset(setB);return (subsetSetAB === true)})());
<del> - text: <code>['a', 'b'].subset(['a', 'b', 'c', 'd'])</code> should return <code>true</code>
<del> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setA.add('b'); setB.add('a'); setB.add('b'); setB.add('c'); setB.add('d'); var subsetSetAB = setA.subset(setB); return (subsetSetAB === true)})());
<del> - text: <code>['a', 'b', 'c'].subset(['a', 'b'])</code> should return <code>false</code>
<del> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setA.add('b'); setA.add('c'); setB.add('a'); setB.add('b'); var subsetSetAB = setA.subset(setB); return (subsetSetAB === false)})());
<del> - text: <code>[].subset([])</code> should return <code>true</code>
<del> testString: assert((function(){var setA = new Set(); var setB = new Set(); var subsetSetAB = setA.subset(setB); return (subsetSetAB === true)})());
<del> - text: <code>['a', 'b'].subset(['c', 'd'])</code> should return <code>false</code>
<del> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setA.add('b'); setB.add('c'); setB.add('d'); var subsetSetAB = setA.subset(setB); return (subsetSetAB === false)})());
<add> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setB.add('b'); setB.add('c'); setB.add('a'); setB.add('d'); var aIsSubsetOfB = setA.isSubsetOf(setB);return (aIsSubsetOfB === true)})());
<add> - text: <code>['a', 'b'].isSubsetOf(['a', 'b', 'c', 'd'])</code> should return <code>true</code>
<add> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setA.add('b'); setB.add('a'); setB.add('b'); setB.add('c'); setB.add('d'); var aIsSubsetOfB = setA.isSubsetOf(setB); return (aIsSubsetOfB === true)})());
<add> - text: <code>['a', 'b', 'c'].isSubsetOf(['a', 'b'])</code> should return <code>false</code>
<add> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setA.add('b'); setA.add('c'); setB.add('a'); setB.add('b'); var aIsSubsetOfB = setA.isSubsetOf(setB); return (aIsSubsetOfB === false)})());
<add> - text: <code>[].isSubsetOf([])</code> should return <code>true</code>
<add> testString: assert((function(){var setA = new Set(); var setB = new Set(); var aIsSubsetOfB = setA.isSubsetOf(setB); return (aIsSubsetOfB === true)})());
<add> - text: <code>['a', 'b'].isSubsetOf(['c', 'd'])</code> should return <code>false</code>
<add> testString: assert((function(){var setA = new Set(); var setB = new Set(); setA.add('a'); setA.add('b'); setB.add('c'); setB.add('d'); var aIsSubsetOfB = setA.isSubsetOf(setB); return (aIsSubsetOfB === false)})());
<ide>
<ide> ```
<ide>
<ide> class Set {
<ide> return newSet;
<ide> }
<ide>
<del> subset(set) {
<add> isSubsetOf(set) {
<ide> for(const value of this.values()){
<ide> if(!set.dictionary[value]) return false;
<ide> } | 1 |
Javascript | Javascript | add test case | 21ca681cd91d985de070540059296472900f4fe7 | <ide><path>test/cases/parsing/local-modules/index.js
<del>it("should define and require a local module", function() {
<add>it("should define and require a local module", function () {
<ide> module.exports = "not set";
<del> define("my-module", function() {
<add> define("my-module", function () {
<ide> return 1234;
<ide> });
<ide> expect(module.exports).toBe("not set");
<del> define(["my-module"], function(myModule) {
<add> define(["my-module"], function (myModule) {
<ide> expect(myModule).toBe(1234);
<ide> return 2345;
<ide> });
<ide> it("should define and require a local module", function() {
<ide> require(["my-module"]);
<ide> });
<ide>
<del>it("should not create a chunk for a AMD require to a local module", function(done) {
<del> define("my-module2", function() {
<add>it("should not create a chunk for a AMD require to a local module", function (done) {
<add> define("my-module2", function () {
<ide> return 1235;
<ide> });
<ide> var sync = false;
<del> require(["my-module2"], function(myModule2) {
<add> require(["my-module2"], function (myModule2) {
<ide> expect(myModule2).toBe(1235);
<ide> sync = true;
<ide> });
<del> setImmediate(function() {
<add> setImmediate(function () {
<ide> expect(sync).toBe(true);
<ide> done();
<ide> });
<ide> });
<ide>
<del>it("should define and require a local module with deps", function() {
<add>it("should define and require a local module with deps", function () {
<ide> module.exports = "not set";
<del> define("my-module3", ["./dep"], function(dep) {
<add> define("my-module3", ["./dep"], function (dep) {
<ide> expect(dep).toBe("dep");
<ide> return 1234;
<ide> });
<ide> expect(module.exports).toBe("not set");
<del> define("my-module4", ["my-module3", "./dep"], function(myModule, dep) {
<add> define("my-module4", ["my-module3", "./dep"], function (myModule, dep) {
<ide> expect(dep).toBe("dep");
<ide> expect(myModule).toBe(1234);
<ide> return 2345;
<ide> it("should define and require a local module with deps", function() {
<ide> });
<ide>
<ide> it("should define and require a local module that is relative", function () {
<del> define("my-dir/my-module3", function() {
<add> define("my-dir/my-module3", function () {
<ide> return 1234;
<ide> });
<del> define("my-dir/my-other-dir/my-module4", function() {
<add> define("my-dir/my-other-dir/my-module4", function () {
<ide> return 2345;
<ide> });
<del> define("my-dir/my-other-dir/my-module5", ["./my-module4", "../my-module3"], function(myModule4, myModule3) {
<add> define("my-dir/my-other-dir/my-module5", [
<add> "./my-module4",
<add> "../my-module3"
<add> ], function (myModule4, myModule3) {
<ide> expect(myModule3).toBe(1234);
<ide> expect(myModule4).toBe(2345);
<ide> return 3456;
<ide> });
<ide> expect(require("my-dir/my-other-dir/my-module5")).toBe(3456);
<del>})
<add>});
<add>
<add>it("issue 12310", () => {
<add> const obj = { ok: true };
<add> define("local-module1", obj);
<add> const fn2 = () => ({ ok: true });
<add> define("local-module2", fn2);
<add> const fn3 = m1 => {
<add> return { m1 };
<add> };
<add> define("local-module3", ["local-module1"], fn3);
<add> expect(require("local-module1")).toBe(obj);
<add> expect(require("local-module2")).toEqual(obj);
<add> expect(require("local-module3")).toEqual({ m1: obj });
<add>}); | 1 |
Javascript | Javascript | use geturl() in historylocation | e0f7c70256a696538fd1f91b2d70025134a6551b | <ide><path>packages/ember-routing/lib/location/history_location.js
<ide> Ember.HistoryLocation = Ember.Object.extend({
<ide> @method initState
<ide> */
<ide> initState: function() {
<del> this.replaceState(get(this, 'location').pathname);
<add> this.replaceState(this.formatURL(this.getURL()));
<ide> set(this, 'history', window.history);
<ide> },
<ide> | 1 |
Javascript | Javascript | add util-deprecate to snapshot blacklist | 32e92c13f0ff608ca78299544841ab1b0c649add | <ide><path>script/lib/generate-startup-snapshot.js
<ide> module.exports = function(packagedAppPath) {
<ide> path.join('..', 'node_modules', 'tree-sitter', 'index.js') ||
<ide> requiredModuleRelativePath ===
<ide> path.join('..', 'node_modules', 'yauzl', 'index.js') ||
<add> requiredModuleRelativePath ===
<add> path.join('..', 'node_modules', 'util-deprecate', 'node.js') ||
<ide> requiredModuleRelativePath ===
<ide> path.join('..', 'node_modules', 'winreg', 'lib', 'registry.js') ||
<ide> requiredModuleRelativePath === | 1 |
Go | Go | add restart policy to restart policy tests | de1dad7074c84f56dd8f29bca0de29ba1e4ae6ce | <ide><path>integration-cli/docker_cli_kill_test.go
<ide> func (s *DockerSuite) TestKillWithSignal(c *check.C) {
<ide> func (s *DockerSuite) TestKillWithStopSignalWithSameSignalShouldDisableRestartPolicy(c *check.C) {
<ide> // Cannot port to Windows - does not support signals int the same way as Linux does
<ide> testRequires(c, DaemonIsLinux)
<del> out, _ := dockerCmd(c, "run", "-d", "--stop-signal=TERM", "busybox", "top")
<add> out, _ := dockerCmd(c, "run", "-d", "--stop-signal=TERM", "--restart=always", "busybox", "top")
<ide> cid := strings.TrimSpace(out)
<ide> c.Assert(waitRun(cid), check.IsNil)
<ide>
<ide> func (s *DockerSuite) TestKillWithStopSignalWithSameSignalShouldDisableRestartPo
<ide> func (s *DockerSuite) TestKillWithStopSignalWithDifferentSignalShouldKeepRestartPolicy(c *check.C) {
<ide> // Cannot port to Windows - does not support signals int the same way as Linux does
<ide> testRequires(c, DaemonIsLinux)
<del> out, _ := dockerCmd(c, "run", "-d", "--stop-signal=CONT", "busybox", "top")
<add> out, _ := dockerCmd(c, "run", "-d", "--stop-signal=CONT", "--restart=always", "busybox", "top")
<ide> cid := strings.TrimSpace(out)
<ide> c.Assert(waitRun(cid), check.IsNil)
<ide> | 1 |
Python | Python | delay import of shlex to reduce startup time | b78d3be51653f6241214f6354303bd261af1b426 | <ide><path>numpy/testing/numpytest.py
<ide> import sys
<ide> import imp
<ide> import types
<del>import shlex
<ide> import unittest
<ide> import traceback
<ide> import warnings
<ide> def run(self):
<ide> """ Run Numpy module test suite with level and verbosity
<ide> taken from sys.argv. Requires optparse module.
<ide> """
<add>
<add> # delayed import of shlex to reduce startup time
<add> import shlex
<add>
<ide> try:
<ide> from optparse import OptionParser
<ide> except ImportError: | 1 |
PHP | PHP | remove deprecation notice | d14c2064fa5ba4e66ae6b691e0f92cc01c7fbea3 | <ide><path>src/Validation/Validation.php
<ide> public static function comparison($check1, $operator, $check2)
<ide> * @param string $field The field to check $check against. This field must be present in $context.
<ide> * @param array $context The validation context.
<ide> * @return bool
<del> * @deprecated 3.6.0 Use compareFields() instead.
<ide> */
<ide> public static function compareWith($check, $field, $context)
<ide> { | 1 |
Javascript | Javascript | fix module tests which got disabled by ngmobile | 5da6b125a7447b4bbabb88b2d82b5634b55c3aea | <ide><path>angularFiles.js
<ide> angularFiles = {
<ide>
<ide> 'angularScenario': [
<ide> 'src/ngScenario/Scenario.js',
<add> 'src/ngScenario/browserTrigger.js',
<ide> 'src/ngScenario/Application.js',
<ide> 'src/ngScenario/Describe.js',
<ide> 'src/ngScenario/Future.js',
<ide> angularFiles = {
<ide> 'lib/jasmine/jasmine.js',
<ide> 'lib/jasmine-jstd-adapter/JasmineAdapter.js',
<ide> 'build/angular.js',
<del> 'build/angular-scenario.js',
<ide> 'src/ngMock/angular-mocks.js',
<ide> 'src/ngCookies/cookies.js',
<ide> 'src/ngResource/resource.js',
<ide> angularFiles = {
<ide> 'src/ngSanitize/sanitize.js',
<ide> 'src/ngSanitize/directive/ngBindHtml.js',
<ide> 'src/ngSanitize/filter/linky.js',
<add> 'src/ngScenario/browserTrigger.js',
<ide> 'test/matchers.js',
<add> 'test/testabilityPatch.js',
<ide> 'test/ngMock/*.js',
<ide> 'test/ngCookies/*.js',
<ide> 'test/ngResource/*.js',
<ide><path>src/ngScenario/Scenario.js
<ide> function callerFile(offset) {
<ide> };
<ide> }
<ide>
<del>/**
<del> * Triggers a browser event. Attempts to choose the right event if one is
<del> * not specified.
<del> *
<del> * @param {Object} element Either a wrapped jQuery/jqLite node or a DOMElement
<del> * @param {string} type Optional event type.
<del> * @param {Array.<string>=} keys Optional list of pressed keys
<del> * (valid values: 'alt', 'meta', 'shift', 'ctrl')
<del> * @param {number} x Optional x-coordinate for mouse/touch events.
<del> * @param {number} y Optional y-coordinate for mouse/touch events.
<del> */
<del>function browserTrigger(element, type, keys, x, y) {
<del> if (element && !element.nodeName) element = element[0];
<del> if (!element) return;
<del> if (!type) {
<del> type = {
<del> 'text': 'change',
<del> 'textarea': 'change',
<del> 'hidden': 'change',
<del> 'password': 'change',
<del> 'button': 'click',
<del> 'submit': 'click',
<del> 'reset': 'click',
<del> 'image': 'click',
<del> 'checkbox': 'click',
<del> 'radio': 'click',
<del> 'select-one': 'change',
<del> 'select-multiple': 'change'
<del> }[lowercase(element.type)] || 'click';
<del> }
<del> if (lowercase(nodeName_(element)) == 'option') {
<del> element.parentNode.value = element.value;
<del> element = element.parentNode;
<del> type = 'change';
<del> }
<del>
<del> keys = keys || [];
<del> function pressed(key) {
<del> return indexOf(keys, key) !== -1;
<del> }
<del>
<del> if (msie < 9) {
<del> switch(element.type) {
<del> case 'radio':
<del> case 'checkbox':
<del> element.checked = !element.checked;
<del> break;
<del> }
<del> // WTF!!! Error: Unspecified error.
<del> // Don't know why, but some elements when detached seem to be in inconsistent state and
<del> // calling .fireEvent() on them will result in very unhelpful error (Error: Unspecified error)
<del> // forcing the browser to compute the element position (by reading its CSS)
<del> // puts the element in consistent state.
<del> element.style.posLeft;
<del>
<del> // TODO(vojta): create event objects with pressed keys to get it working on IE<9
<del> var ret = element.fireEvent('on' + type);
<del> if (lowercase(element.type) == 'submit') {
<del> while(element) {
<del> if (lowercase(element.nodeName) == 'form') {
<del> element.fireEvent('onsubmit');
<del> break;
<del> }
<del> element = element.parentNode;
<del> }
<del> }
<del> return ret;
<del> } else {
<del> var evnt = document.createEvent('MouseEvents'),
<del> originalPreventDefault = evnt.preventDefault,
<del> iframe = _jQuery('#application iframe')[0],
<del> appWindow = iframe ? iframe.contentWindow : window,
<del> fakeProcessDefault = true,
<del> finalProcessDefault,
<del> angular = appWindow.angular || {};
<del>
<del> // igor: temporary fix for https://bugzilla.mozilla.org/show_bug.cgi?id=684208
<del> angular['ff-684208-preventDefault'] = false;
<del> evnt.preventDefault = function() {
<del> fakeProcessDefault = false;
<del> return originalPreventDefault.apply(evnt, arguments);
<del> };
<del>
<del> x = x || 0;
<del> y = y || 0;
<del> evnt.initMouseEvent(type, true, true, window, 0, x, y, x, y, pressed('ctrl'), pressed('alt'),
<del> pressed('shift'), pressed('meta'), 0, element);
<del>
<del> element.dispatchEvent(evnt);
<del> finalProcessDefault = !(angular['ff-684208-preventDefault'] || !fakeProcessDefault);
<del>
<del> delete angular['ff-684208-preventDefault'];
<del>
<del> return finalProcessDefault;
<del> }
<del>}
<ide>
<ide> /**
<ide> * Don't use the jQuery trigger method since it works incorrectly.
<ide><path>src/ngScenario/browserTrigger.js
<add>'use strict';
<add>
<add>(function() {
<add> var msie = parseInt((/msie (\d+)/.exec(navigator.userAgent.toLowerCase()) || [])[1], 10);
<add>
<add> function indexOf(array, obj) {
<add> if (array.indexOf) return array.indexOf(obj);
<add>
<add> for ( var i = 0; i < array.length; i++) {
<add> if (obj === array[i]) return i;
<add> }
<add> return -1;
<add> }
<add>
<add>
<add>
<add> /**
<add> * Triggers a browser event. Attempts to choose the right event if one is
<add> * not specified.
<add> *
<add> * @param {Object} element Either a wrapped jQuery/jqLite node or a DOMElement
<add> * @param {string} eventType Optional event type.
<add> * @param {Array.<string>=} keys Optional list of pressed keys
<add> * (valid values: 'alt', 'meta', 'shift', 'ctrl')
<add> * @param {number} x Optional x-coordinate for mouse/touch events.
<add> * @param {number} y Optional y-coordinate for mouse/touch events.
<add> */
<add> window.browserTrigger = function browserTrigger(element, eventType, keys, x, y) {
<add> if (element && !element.nodeName) element = element[0];
<add> if (!element) return;
<add>
<add> var inputType = (element.type) ? element.type.toLowerCase() : null,
<add> nodeName = element.nodeName.toLowerCase();
<add>
<add> if (!eventType) {
<add> eventType = {
<add> 'text': 'change',
<add> 'textarea': 'change',
<add> 'hidden': 'change',
<add> 'password': 'change',
<add> 'button': 'click',
<add> 'submit': 'click',
<add> 'reset': 'click',
<add> 'image': 'click',
<add> 'checkbox': 'click',
<add> 'radio': 'click',
<add> 'select-one': 'change',
<add> 'select-multiple': 'change',
<add> '_default_': 'click'
<add> }[inputType || '_default_'];
<add> }
<add>
<add> if (nodeName == 'option') {
<add> element.parentNode.value = element.value;
<add> element = element.parentNode;
<add> eventType = 'change';
<add> }
<add>
<add> keys = keys || [];
<add> function pressed(key) {
<add> return indexOf(keys, key) !== -1;
<add> }
<add>
<add> if (msie < 9) {
<add> if (inputType == 'radio' || inputType == 'checkbox') {
<add> element.checked = !element.checked;
<add> }
<add>
<add> // WTF!!! Error: Unspecified error.
<add> // Don't know why, but some elements when detached seem to be in inconsistent state and
<add> // calling .fireEvent() on them will result in very unhelpful error (Error: Unspecified error)
<add> // forcing the browser to compute the element position (by reading its CSS)
<add> // puts the element in consistent state.
<add> element.style.posLeft;
<add>
<add> // TODO(vojta): create event objects with pressed keys to get it working on IE<9
<add> var ret = element.fireEvent('on' + eventType);
<add> if (inputType == 'submit') {
<add> while(element) {
<add> if (element.nodeName.toLowerCase() == 'form') {
<add> element.fireEvent('onsubmit');
<add> break;
<add> }
<add> element = element.parentNode;
<add> }
<add> }
<add> return ret;
<add> } else {
<add> var evnt = document.createEvent('MouseEvents'),
<add> originalPreventDefault = evnt.preventDefault,
<add> appWindow = element.ownerDocument.defaultView,
<add> fakeProcessDefault = true,
<add> finalProcessDefault,
<add> angular = appWindow.angular || {};
<add>
<add> // igor: temporary fix for https://bugzilla.mozilla.org/show_bug.cgi?id=684208
<add> angular['ff-684208-preventDefault'] = false;
<add> evnt.preventDefault = function() {
<add> fakeProcessDefault = false;
<add> return originalPreventDefault.apply(evnt, arguments);
<add> };
<add>
<add> x = x || 0;
<add> y = y || 0;
<add> evnt.initMouseEvent(eventType, true, true, window, 0, x, y, x, y, pressed('ctrl'), pressed('alt'),
<add> pressed('shift'), pressed('meta'), 0, element);
<add>
<add> element.dispatchEvent(evnt);
<add> finalProcessDefault = !(angular['ff-684208-preventDefault'] || !fakeProcessDefault);
<add>
<add> delete angular['ff-684208-preventDefault'];
<add>
<add> return finalProcessDefault;
<add> }
<add> }
<add>}());
<ide><path>test/testabilityPatch.js
<ide> * special event and changes it form 'change' to 'click/keydown' and
<ide> * few others. This horrible hack removes the special treatment
<ide> */
<del>_jQuery.event.special.change = undefined;
<add>if (window._jQuery) _jQuery.event.special.change = undefined;
<add>
<add>if (window.bindJQuery) bindJQuery();
<ide>
<del>bindJQuery();
<ide> beforeEach(function() {
<del> publishExternalAPI(angular);
<add> // all this stuff is not needed for module tests, where jqlite and publishExternalAPI and jqLite are not global vars
<add> if (window.publishExternalAPI) {
<add> publishExternalAPI(angular);
<add>
<add> // workaround for IE bug https://plus.google.com/104744871076396904202/posts/Kqjuj6RSbbT
<add> // IE overwrite window.jQuery with undefined because of empty jQuery var statement, so we have to
<add> // correct this, but only if we are not running in jqLite mode
<add> if (!_jqLiteMode && _jQuery !== jQuery) {
<add> jQuery = _jQuery;
<add> }
<ide>
<del> // workaround for IE bug https://plus.google.com/104744871076396904202/posts/Kqjuj6RSbbT
<del> // IE overwrite window.jQuery with undefined because of empty jQuery var statement, so we have to
<del> // correct this, but only if we are not running in jqLite mode
<del> if (!_jqLiteMode && _jQuery !== jQuery) {
<del> jQuery = _jQuery;
<add> // This resets global id counter;
<add> uid = ['0', '0', '0'];
<add>
<add> // reset to jQuery or default to us.
<add> bindJQuery();
<ide> }
<ide>
<del> // This resets global id counter;
<del> uid = ['0', '0', '0'];
<ide>
<del> // reset to jQuery or default to us.
<del> bindJQuery();
<del> jqLite(document.body).html('').removeData();
<add> angular.element(document.body).html('').removeData();
<ide> });
<ide>
<ide> afterEach(function() {
<ide> afterEach(function() {
<ide>
<ide> // This line should be enabled as soon as this bug is fixed: http://bugs.jquery.com/ticket/11775
<ide> //var cache = jqLite.cache;
<del> var cache = JQLite.cache;
<add> var cache = angular.element.cache;
<ide>
<ide> forEachSorted(cache, function(expando, key){
<del> forEach(expando.data, function(value, key){
<add> angular.forEach(expando.data, function(value, key){
<ide> count ++;
<ide> if (value.$element) {
<ide> dump('LEAK', key, value.$id, sortedHtml(value.$element));
<ide> } else {
<del> dump('LEAK', key, toJson(value));
<add> dump('LEAK', key, angular.toJson(value));
<ide> }
<ide> });
<ide> });
<ide> if (count) {
<ide> throw new Error('Found jqCache references that were not deallocated! count: ' + count);
<ide> }
<add>
<add>
<add> // copied from Angular.js
<add> // we need these two methods here so that we can run module tests with wrapped angular.js
<add> function sortedKeys(obj) {
<add> var keys = [];
<add> for (var key in obj) {
<add> if (obj.hasOwnProperty(key)) {
<add> keys.push(key);
<add> }
<add> }
<add> return keys.sort();
<add> }
<add>
<add> function forEachSorted(obj, iterator, context) {
<add> var keys = sortedKeys(obj);
<add> for ( var i = 0; i < keys.length; i++) {
<add> iterator.call(context, obj[keys[i]], keys[i]);
<add> }
<add> return keys;
<add> }
<ide> });
<ide>
<ide>
<ide> function dealoc(obj) {
<del> var jqCache = jqLite.cache;
<add> var jqCache = angular.element.cache;
<ide> if (obj) {
<del> if (isElement(obj)) {
<del> cleanup(jqLite(obj));
<add> if (angular.isElement(obj)) {
<add> cleanup(angular.element(obj));
<ide> } else {
<ide> for(var key in jqCache) {
<ide> var value = jqCache[key];
<ide> function dealoc(obj) {
<ide> function cleanup(element) {
<ide> element.unbind().removeData();
<ide> for ( var i = 0, children = element.contents() || []; i < children.length; i++) {
<del> cleanup(jqLite(children[i]));
<add> cleanup(angular.element(children[i]));
<ide> }
<ide> }
<ide> } | 4 |
Javascript | Javascript | add canuseeventlisteners to executionenvironment | a21979404c93d5e548ebe6e372e4c80b0eacf625 | <ide><path>src/vendor/core/ExecutionEnvironment.js
<ide> var ExecutionEnvironment = {
<ide>
<ide> canUseWorkers: typeof Worker !== 'undefined',
<ide>
<add> canUseEventListeners:
<add> canUseDOM && (window.addEventListener || window.attachEvent),
<add>
<ide> isInWorker: !canUseDOM // For now, this is true - might change in the future.
<ide>
<ide> }; | 1 |
Python | Python | fix improper getresponse() uses | 23f400ed6d7902284ba316730c7550c0c76937e1 | <ide><path>libcloud/common/brightbox.py
<ide> def _fetch_oauth_token(self):
<ide> response = self.connection.request(method='POST', url='/token',
<ide> body=body, headers=headers)
<ide>
<del> response = self.connection.getresponse()
<del>
<ide> if response.status == httplib.OK:
<ide> return json.loads(response.read())['access_token']
<ide> else:
<del> responseCls = BrightboxResponse(response=response, connection=self)
<add> responseCls = BrightboxResponse(
<add> response=response.getresponse(), connection=self)
<ide> message = responseCls.parse_error()
<ide> raise InvalidCredsError(message)
<ide>
<ide><path>libcloud/common/ovh.py
<ide> def request_consumer_key(self, user_id):
<ide> }
<ide> httpcon = LibcloudConnection(host=self.host, port=443)
<ide> httpcon.request(method='POST', url=action, body=data, headers=headers)
<del> response = httpcon.getresponse()
<add> response = JsonResponse(httpcon.getresponse(), httpcon)
<ide>
<ide> if response.status == httplib.UNAUTHORIZED:
<ide> raise InvalidCredsError()
<ide>
<del> body = response.read()
<del> json_response = json.loads(body)
<add> json_response = response.parse_body()
<ide> httpcon.close()
<ide> return json_response
<ide> | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.